feat(docs): Migrate to dual Nextra/Astro deployment & recreate products section
This commit introduces a significant restructuring of the documentation deployment and content strategy to support a gradual migration from Nextra to Astro. - **New Astro Workflow (`jan-astro-docs.yml`)**: Implemented a new, separate GitHub Actions workflow to build and deploy the Astro site from the `/website` directory to a new subdomain (`v2.jan.ai`). This isolates the new site from the existing one, allowing for independent development and testing. - **Removed Combined Workflow**: Deleted the previous, more complex combined workflow (`jan-combined-docs.yml`) and its associated test scripts to simplify the deployment process and eliminate routing conflicts. - **Astro Config Update**: Simplified the Astro configuration (`astro.config.mjs`) by removing the conditional `base` path. The Astro site is now configured to deploy to the root of its own subdomain. - **Mirrored Content**: Recreated the entire `/products` section from the Astro site within the Nextra site at `/docs/src/pages/products`. This provides content parity and a consistent user experience on both platforms during the transition period. - **File Structure**: Established a clear, organized structure for platforms, models, and tools within the Nextra `products` directory. - **Nextra Sidebar Fix**: Implemented the correct `_meta.json` structure for the new products section. Created nested meta files to build a collapsible sidebar, fixing the UI bug that caused duplicated navigation items. - **"Coming Soon" Pages**: Added clear, concise "Coming Soon" and "In Development" banners and content for upcoming products like Jan V1, Mobile, Server, and native Tools, ensuring consistent messaging across both sites. - **.gitignore**: Updated the root `.gitignore` to properly exclude build artifacts, caches, and environment files for both the Nextra (`/docs`) and Astro (`/website`) projects. - **Repository Cleanup**: Removed temporary and unused files related to the previous combined deployment attempt. This new architecture provides a stable, predictable, and low-risk path for migrating our documentation to Astro while ensuring the current production site remains unaffected.
This commit is contained in:
parent
6c189ea961
commit
8511e35df8
75
.github/workflows/jan-astro-docs.yml
vendored
Normal file
75
.github/workflows/jan-astro-docs.yml
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
name: Deploy Astro Docs (v2)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
paths:
|
||||
- 'website/**'
|
||||
- '.github/workflows/jan-astro-docs.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'website/**'
|
||||
- '.github/workflows/jan-astro-docs.yml'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy to v2.jan.ai
|
||||
env:
|
||||
# IMPORTANT: You will need to create a new Cloudflare Pages project
|
||||
# and name it "jan-v2" or update this value to your new project name.
|
||||
CLOUDFLARE_PROJECT_NAME: jan-v2
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
deployments: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: website
|
||||
run: npm install
|
||||
|
||||
- name: Build Astro Docs
|
||||
working-directory: website
|
||||
# No PUBLIC_BASE_PATH is set, so it builds for the root, which is correct for a subdomain
|
||||
run: npm run build
|
||||
|
||||
- name: Publish to Cloudflare Pages (PR Preview)
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: cloudflare/pages-action@v1
|
||||
id: deployPreview
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
projectName: ${{ env.CLOUDFLARE_PROJECT_NAME }}
|
||||
directory: ./website/dist
|
||||
gitHubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Add PR Comment with Preview URL
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: mshick/add-pr-comment@v2
|
||||
with:
|
||||
message: |
|
||||
🚀 Astro docs preview is ready!
|
||||
URL: ${{ steps.deployPreview.outputs.url }}
|
||||
|
||||
- name: Publish to Cloudflare Pages (Production)
|
||||
if: (github.event_name == 'push' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch')
|
||||
uses: cloudflare/pages-action@v1
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
projectName: ${{ env.CLOUDFLARE_PROJECT_NAME }}
|
||||
directory: ./website/dist
|
||||
# This deploys to the production branch of your new Cloudflare project
|
||||
branch: main
|
||||
gitHubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
23
.gitignore
vendored
23
.gitignore
vendored
@ -55,4 +55,25 @@ archive/
|
||||
# auto qa
|
||||
autoqa/trajectories
|
||||
autoqa/recordings
|
||||
autoqa/__pycache__
|
||||
autoqa/__pycache__
|
||||
|
||||
# Astro / Starlight specific
|
||||
website/dist/
|
||||
website/.astro/
|
||||
website/src/content/config.ts.timestamp-*
|
||||
|
||||
# Nextra specific
|
||||
docs/out/
|
||||
docs/.next/
|
||||
|
||||
# General Node.js
|
||||
**/node_modules
|
||||
**/.env
|
||||
**/.env.*
|
||||
**/npm-debug.log*
|
||||
**/yarn-debug.log*
|
||||
**/yarn-error.log*
|
||||
**/pnpm-debug.log*
|
||||
|
||||
# Combined output for local testing
|
||||
combined-output/
|
||||
|
||||
824
docs/bun.lock
824
docs/bun.lock
File diff suppressed because it is too large
Load Diff
@ -18,6 +18,7 @@
|
||||
"@radix-ui/react-tooltip": "^1.0.7",
|
||||
"@scalar/api-reference-react": "^0.1.31",
|
||||
"@theguild/remark-mermaid": "^0.0.6",
|
||||
"astro-mermaid": "^1.0.4",
|
||||
"autoprefixer": "^10.0.1",
|
||||
"axios": "^1.6.8",
|
||||
"date-fns": "^3.6.0",
|
||||
@ -28,6 +29,7 @@
|
||||
"fs": "^0.0.1-security",
|
||||
"gray-matter": "^4.0.3",
|
||||
"lucide-react": "^0.522.0",
|
||||
"mermaid": "^11.9.0",
|
||||
"next": "^14.1.4",
|
||||
"next-seo": "^6.5.0",
|
||||
"next-sitemap": "^4.2.3",
|
||||
|
||||
@ -11,6 +11,10 @@
|
||||
"type": "page",
|
||||
"title": "Documentation"
|
||||
},
|
||||
"products": {
|
||||
"type": "page",
|
||||
"title": "Products"
|
||||
},
|
||||
"local-server": {
|
||||
"type": "page",
|
||||
"title": "Jan Local Server",
|
||||
@ -26,11 +30,6 @@
|
||||
"title": "Integrations",
|
||||
"display": "hidden"
|
||||
},
|
||||
"platforms": {
|
||||
"type": "page",
|
||||
"title": "Platforms",
|
||||
"display": "hidden"
|
||||
},
|
||||
"changelog": {
|
||||
"type": "page",
|
||||
"title": "Changelog",
|
||||
|
||||
@ -1,9 +0,0 @@
|
||||
{
|
||||
"-- Switcher": {
|
||||
"type": "separator",
|
||||
"title": "Switcher"
|
||||
},
|
||||
"index": {
|
||||
"display": "hidden"
|
||||
}
|
||||
}
|
||||
@ -1,87 +0,0 @@
|
||||
---
|
||||
title: Coming Soon
|
||||
description: Exciting new features and platforms are on the way. Stay tuned for Jan Web, Jan Mobile, and our API Platform.
|
||||
keywords:
|
||||
[
|
||||
Jan,
|
||||
Customizable Intelligence, LLM,
|
||||
local AI,
|
||||
privacy focus,
|
||||
free and open source,
|
||||
private and offline,
|
||||
conversational AI,
|
||||
no-subscription fee,
|
||||
large language models,
|
||||
coming soon,
|
||||
Jan Web,
|
||||
Jan Mobile,
|
||||
API Platform,
|
||||
]
|
||||
---
|
||||
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
<div className="text-center py-12">
|
||||
<div className="mb-8">
|
||||
<h1 className="text-4xl font-bold bg-gradient-to-r from-blue-600 to-purple-600 bg-clip-text text-transparent mb-4 py-2">
|
||||
🚀 Coming Soon
|
||||
</h1>
|
||||
<p className="text-xl text-gray-600 dark:text-gray-300 max-w-2xl mx-auto">
|
||||
We're working on the next stage of Jan - making our local assistant more powerful and available in more platforms.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="grid grid-cols-1 md:grid-cols-3 gap-6 max-w-4xl mx-auto mb-12">
|
||||
<div className="p-6 border border-gray-200 dark:border-gray-700 rounded-lg bg-gradient-to-br from-blue-50 to-indigo-50 dark:from-blue-900/20 dark:to-indigo-900/20">
|
||||
<div className="text-3xl mb-3">🌐</div>
|
||||
<h3 className="text-lg font-semibold mb-2">Jan Web</h3>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400">
|
||||
Access Jan directly from your browser with our powerful web interface
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="p-6 border border-gray-200 dark:border-gray-700 rounded-lg bg-gradient-to-br from-green-50 to-emerald-50 dark:from-green-900/20 dark:to-emerald-900/20">
|
||||
<div className="text-3xl mb-3">📱</div>
|
||||
<h3 className="text-lg font-semibold mb-2">Jan Mobile</h3>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400">
|
||||
Take Jan on the go with our native mobile applications
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="p-6 border border-gray-200 dark:border-gray-700 rounded-lg bg-gradient-to-br from-purple-50 to-pink-50 dark:from-purple-900/20 dark:to-pink-900/20">
|
||||
<div className="text-3xl mb-3">⚡</div>
|
||||
<h3 className="text-lg font-semibold mb-2">API Platform</h3>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400">
|
||||
Integrate Jan's capabilities into your applications with our API
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Callout type="info">
|
||||
**Stay Updated**: Follow our [GitHub repository](https://github.com/menloresearch/jan) and join our [Discord community](https://discord.com/invite/FTk2MvZwJH) for the latest updates on these exciting releases!
|
||||
</Callout>
|
||||
|
||||
<div className="mt-12">
|
||||
<h2 className="text-2xl font-semibold mb-6">What to Expect</h2>
|
||||
<div className="text-left max-w-2xl mx-auto space-y-4">
|
||||
<div className="flex items-start gap-3">
|
||||
<span className="text-green-500 text-xl">✓</span>
|
||||
<div>
|
||||
<strong>Seamless Experience:</strong> Unified interface across all platforms
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-start gap-3">
|
||||
<span className="text-green-500 text-xl">✓</span>
|
||||
<div>
|
||||
<strong>Privacy First:</strong> Same privacy-focused approach you trust
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-start gap-3">
|
||||
<span className="text-green-500 text-xl">✓</span>
|
||||
<div>
|
||||
<strong>Developer Friendly:</strong> Robust APIs and comprehensive documentation
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
6
docs/src/pages/products/_meta.json
Normal file
6
docs/src/pages/products/_meta.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"index": "Overview",
|
||||
"platforms": "Platforms",
|
||||
"models": "Models",
|
||||
"tools": "Tools"
|
||||
}
|
||||
132
docs/src/pages/products/index.mdx
Normal file
132
docs/src/pages/products/index.mdx
Normal file
@ -0,0 +1,132 @@
|
||||
---
|
||||
title: Product Overview
|
||||
description: AI that runs where you need it, how you need it. Jan is a full-stack, self-hostable AI solution.
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
Jan is moving from a local AI application to a complete full-stack AI solution that you can self-host. This includes models, applications, and tools that delight users and help them solve their problems.
|
||||
|
||||
## What We're Building
|
||||
|
||||
**Jan Factory (or Agent)** = Jan Models + Jan Application + Jan Tools
|
||||
|
||||
Unlike other AI assistants that do specific tasks with one model or have many models with a myriad of solutions, Jan provides:
|
||||
- Its own specialised models that are optimised at specific tasks like web-search, creative writing, and translation
|
||||
- Applications that work across all of your devices in an integrated way
|
||||
- Tools that actually get things done
|
||||
|
||||
## Two Modes, One Experience
|
||||
|
||||
### Local (Incognito) Mode
|
||||
Run AI models entirely on your device, giving you complete privacy with no internet required.
|
||||
|
||||
### Cloud Mode
|
||||
Connect to more powerful models when needed - either self-hosted or via jan.ai.
|
||||
|
||||
<Callout type="info" title="Simple Choice">
|
||||
Users shouldn't need to understand models, APIs, or technical details. Just choose Local for privacy or Cloud for power.
|
||||
</Callout>
|
||||
|
||||
## Our Product Principles
|
||||
|
||||
### 1) It Just Works
|
||||
1. Open Jan, start chatting
|
||||
2. Onboarding is fully available but optional
|
||||
3. Setting up an API key is optional
|
||||
4. Selecting a local model is optional
|
||||
5. Become a power user at your own pace, if you want to
|
||||
|
||||
We handle the complexity.
|
||||
|
||||
### 2) Cloud When Needed
|
||||
Start completely locally and own your AI models. Add cloud capabilities only when you choose to.
|
||||
|
||||
### 3) Solve Problems, Not Settings
|
||||
We help users get to answers quickly, not configuration options. Power users can dig deeper, but it's never required.
|
||||
|
||||
## Available on Every Device
|
||||
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-6 mt-6">
|
||||
<div className="p-6 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h3 className="text-lg font-semibold mb-2">Jan Desktop</h3>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-4">
|
||||
**Available Now** <br />
|
||||
Your personal AI workstation. Run models locally or connect to the cloud. Powers your other devices.
|
||||
</p>
|
||||
<a href="/products/platforms/desktop" className="text-blue-600 dark:text-blue-400 font-semibold">Learn more →</a>
|
||||
</div>
|
||||
<div className="p-6 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h3 className="text-lg font-semibold mb-2">Jan Web</h3>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-4">
|
||||
**Beta Launch Soon** <br />
|
||||
Access Jan from any browser with no setup. The default cloud backend for mobile and team collaboration.
|
||||
</p>
|
||||
<a href="/products/platforms/jan-ai" className="text-blue-600 dark:text-blue-400 font-semibold">Learn more →</a>
|
||||
</div>
|
||||
<div className="p-6 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h3 className="text-lg font-semibold mb-2">Jan Mobile</h3>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-4">
|
||||
**Coming Q4 2025** <br />
|
||||
Connect to your Desktop or Server, or run models like Jan Nano locally for a seamless on-the-go experience.
|
||||
</p>
|
||||
<a href="/products/platforms/mobile" className="text-blue-600 dark:text-blue-400 font-semibold">Learn more →</a>
|
||||
</div>
|
||||
<div className="p-6 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h3 className="text-lg font-semibold mb-2">Jan Server</h3>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400 mb-4">
|
||||
**Coming Q2 2025** <br />
|
||||
A self-hosted solution for teams and enterprises. Your own private AI cloud with enterprise features.
|
||||
</p>
|
||||
<a href="/products/platforms/server" className="text-blue-600 dark:text-blue-400 font-semibold">Learn more →</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
## Jan Mobile: Three Modes, One Experience
|
||||
|
||||
Jan Mobile brings the same AI experience to your phone. Connect to your desktop, your server, or run models locally.
|
||||
|
||||
### How It Works
|
||||
Jan Mobile adapts to your situation:
|
||||
|
||||
- **At Home:** Connect to your Jan Desktop over WiFi. `Your Phone → WiFi → Your Desktop → Response`
|
||||
- **At Work:** Connect to your company Jan Server. `Your Phone → Internet → Company Server → Response`
|
||||
- **On the Go:** Run Jan Nano on your phone or talk to your favorite cloud-based model. `Your Phone → Jan Nano (6GB) → Response`
|
||||
|
||||
No configuration needed. It just works.
|
||||
|
||||
### Key Features
|
||||
- **Seamless Switching:** Move from home to office to airplane. One-click and Jan adapts immediately.
|
||||
- **Voice First:** Talk to Jan naturally. Responses can be spoken too.
|
||||
- **Sync Everything:** Conversations, settings, and preferences follow you across devices.
|
||||
|
||||
## What Makes Jan Different
|
||||
|
||||
| Feature | Other AI Assistants | Jan |
|
||||
| :--- | :--- | :--- |
|
||||
| **Models** | Wrapper around Claude/GPT | Our own models + You can own them |
|
||||
| **Dual mode** | Your data on their servers | Your data stays yours |
|
||||
| **Deployment** | Cloud only | Local, self-hosted, or cloud |
|
||||
| **Cost** | Subscription forever | Free locally, pay for cloud |
|
||||
|
||||
## Development Timeline
|
||||
|
||||
Jan is actively developed with regular releases. Our development follows these key milestones:
|
||||
|
||||
### Current Focus
|
||||
- **Jan Desktop:** Continuous improvements and model support
|
||||
- **Jan Web:** Beta launch preparation
|
||||
- **Model Development:** Jan Nano and Lucy optimization
|
||||
|
||||
### Next 6 Months
|
||||
- Jan Web public beta
|
||||
- Mobile app development
|
||||
- Server deployment tools
|
||||
|
||||
### Future Vision
|
||||
- Complete AI Agent platform
|
||||
- Advanced tool integration
|
||||
- Enterprise features
|
||||
|
||||
<Callout>
|
||||
We're building AI that respects your choices. Run it locally for privacy, connect to cloud for power, or self-host for both.
|
||||
</Callout>
|
||||
4
docs/src/pages/products/models/_meta.json
Normal file
4
docs/src/pages/products/models/_meta.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"jan-v1": "Jan V1",
|
||||
"jan-nano": "Jan Nano"
|
||||
}
|
||||
35
docs/src/pages/products/models/jan-nano.mdx
Normal file
35
docs/src/pages/products/models/jan-nano.mdx
Normal file
@ -0,0 +1,35 @@
|
||||
---
|
||||
title: Jan Nano
|
||||
description: Compact research model optimized for finding answers through tool use.
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
Jan Nano is a 4-billion parameter model designed for research and information retrieval. Instead of trying to know everything, it excels at finding anything through deep integration with Model Context Protocol (MCP) tools.
|
||||
|
||||
## Two Variants
|
||||
|
||||
| Model | Context Window | Size | Use Case |
|
||||
|:---|:---|:---|:---|
|
||||
| Jan Nano 32k | 32,768 tokens | 4-8GB | Quick research, general queries |
|
||||
| Jan Nano 128k | 131,072 tokens | 8-12GB | Deep research, document analysis |
|
||||
|
||||
<Callout>
|
||||
Jan Nano requires MCP-enabled tools (like web search) to reach its full potential. Enable MCP in **Settings → Advanced Settings**.
|
||||
</Callout>
|
||||
|
||||
## What Makes Nano Different
|
||||
- **Research-First Design:** Trained to find relevant information, synthesize findings, and provide accurate citations.
|
||||
- **MCP Integration:** Works seamlessly with tools like web search, document analysis, and code repositories.
|
||||
- **Extended Context:** The 128k variant can process entire codebases, book-length documents, or 50+ research papers simultaneously.
|
||||
|
||||
## Technical Details
|
||||
- **Base:** 4B parameter transformer
|
||||
- **Training:** Optimized for tool use and retrieval
|
||||
- **Quantization:** Q4, Q8, FP16 variants available
|
||||
|
||||
## Philosophy
|
||||
Most models try to be encyclopedias. Jan Nano is a research assistant. It doesn't memorize the internet—it knows how to navigate it.
|
||||
|
||||
---
|
||||
|
||||
[Download Jan Desktop](https://jan.ai/download) | [Model Details](https://huggingface.co/Menlo/Jan-nano) | [MCP Documentation](https://jan.ai/docs/mcp)
|
||||
30
docs/src/pages/products/models/jan-v1.mdx
Normal file
30
docs/src/pages/products/models/jan-v1.mdx
Normal file
@ -0,0 +1,30 @@
|
||||
---
|
||||
title: Jan V1
|
||||
description: Our upcoming family of foundational models, built to compete with the best.
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
<Callout type='warning'>**In Development:** Jan V1 models are currently being trained and are not yet available.</Callout>
|
||||
|
||||
## Our Foundational Model Family
|
||||
|
||||
Jan V1 is our in-house family of models designed to compete directly with leading models like GPT-4 and Claude. We're building powerful, general-purpose models from the ground up to solve real-world problems with a focus on efficiency and privacy.
|
||||
|
||||
### Planned Model Lineup
|
||||
|
||||
| Model | Target Size | Intended Use Case | Availability |
|
||||
|:------------|:------------|:-----------------------------|:--------------|
|
||||
| Jan V1-7B | 4-8GB | Fast, efficient daily tasks | Coming Soon |
|
||||
| Jan V1-13B | 8-16GB | Balanced power and performance | Coming Soon |
|
||||
| Jan V1-70B | 40-64GB | Deep analysis, professional work | Coming Soon |
|
||||
| Jan V1-180B | 100GB+ | Frontier research, complex tasks | Planned 2026 |
|
||||
|
||||
### What to Expect
|
||||
- **Competitive Performance**: Aiming for results on par with leading closed-source models.
|
||||
- **Optimized for Local Use**: Efficient quantized versions for running on your own hardware.
|
||||
- **Privacy-Centric**: Trainable and runnable in your own environment, ensuring your data stays yours.
|
||||
- **Seamless Integration**: Designed to work perfectly within the Jan ecosystem.
|
||||
|
||||
---
|
||||
|
||||
[Follow our development →](https://jan.ai/v1-updates)
|
||||
6
docs/src/pages/products/platforms/_meta.json
Normal file
6
docs/src/pages/products/platforms/_meta.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"desktop": "Desktop",
|
||||
"jan-ai": "Jan.ai",
|
||||
"mobile": "Mobile",
|
||||
"server": "Server"
|
||||
}
|
||||
138
docs/src/pages/products/platforms/desktop.mdx
Normal file
138
docs/src/pages/products/platforms/desktop.mdx
Normal file
@ -0,0 +1,138 @@
|
||||
---
|
||||
title: Jan Desktop
|
||||
description: AI that runs on your computer, not someone else's. Your personal AI workstation.
|
||||
---
|
||||
import { Callout, Tabs, Tab } from 'nextra/components'
|
||||
|
||||
This is how Jan started and it has been available since day 1. Jan Desktop strives to be:
|
||||
|
||||
> Your personal AI workstation that helps with our use cases and powers other devices. Run models locally right away or bring an API key to connect to your favorite cloud-based models.
|
||||
|
||||
Jan Desktop is where it all starts. Download it, open it, and start chatting. Your AI runs on your computer with zero setup required.
|
||||
|
||||
## Two Modes, Zero Complexity
|
||||
|
||||
### Local Mode (Default)
|
||||
Your conversations stay on your computer. No internet needed. Complete privacy.
|
||||
|
||||
### Cloud Mode
|
||||
Connect to more powerful models when you need them. Your choice of provider.
|
||||
|
||||
<Callout>
|
||||
First time opening Jan? It just works. No API keys, no model downloads, no settings required.
|
||||
</Callout>
|
||||
|
||||
## What You Get
|
||||
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4 mt-6">
|
||||
<div className="p-4 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h4 className="font-semibold text-lg mb-2">📡 Works Offline</h4>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400">Download once, use forever. Internet is optional.</p>
|
||||
</div>
|
||||
<div className="p-4 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h4 className="font-semibold text-lg mb-2">🛡️ Your Data Stays Yours</h4>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400">Everything stored in `~/jan`. No cloud backups unless you want them.</p>
|
||||
</div>
|
||||
<div className="p-4 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h4 className="font-semibold text-lg mb-2">🖥️ Powers Other Devices</h4>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400">Your desktop becomes an AI server for your phone and other computers.</p>
|
||||
</div>
|
||||
<div className="p-4 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h4 className="font-semibold text-lg mb-2">👨💻 Developer Friendly</h4>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400">Local API at `localhost:1337`. Works with any OpenAI-compatible tool.</p>
|
||||
</div>
|
||||
<div className="p-4 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h4 className="font-semibold text-lg mb-2">🚀 GPU Acceleration</h4>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400">Automatically detects and uses NVIDIA GPUs for faster performance.</p>
|
||||
</div>
|
||||
<div className="p-4 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h4 className="font-semibold text-lg mb-2">💻 Cross-Platform</h4>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400">Windows, macOS, and Linux support with native performance.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
## System Requirements
|
||||
|
||||
### Minimum Requirements
|
||||
- **RAM:** 8GB (models use less than 80% of available memory)
|
||||
- **Storage:** 10GB+ free space
|
||||
- **OS:** Windows 10, macOS 12, Ubuntu 20.04 or newer
|
||||
|
||||
### Recommended
|
||||
- **RAM:** 16GB+ for larger models
|
||||
- **Storage:** 20GB+ for multiple models
|
||||
- **GPU:** NVIDIA GPU with 6GB+ VRAM for acceleration
|
||||
- **OS:** Latest versions for best performance
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. **Download Jan** from [jan.ai/download](https://jan.ai/download)
|
||||
2. **Open the app** - it loads with everything ready
|
||||
3. **Start chatting** - that's it
|
||||
|
||||
## Local Mode Features
|
||||
|
||||
- **Built-in Model:** Jan comes with a model that works immediately. No downloading, no waiting.
|
||||
- **Smart Defaults:** Automatically uses your GPU if available and adjusts to your system's capabilities.
|
||||
- **Complete Privacy:** No telemetry by default, no account required, and no data leaves your machine.
|
||||
|
||||
## Cloud Mode (Optional)
|
||||
|
||||
Connect to external AI providers when you need more power:
|
||||
|
||||
<Tabs items={['jan.ai', 'OpenAI', 'Self-Hosted']}>
|
||||
<Tab>
|
||||
Our cloud service (coming soon). One click to enable.
|
||||
</Tab>
|
||||
<Tab>
|
||||
Use your OpenAI API key for GPT-4 access.
|
||||
</Tab>
|
||||
<Tab>
|
||||
Connect to your own Jan Server.
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
## Desktop as Your AI Hub
|
||||
|
||||
Your desktop can power AI across all your devices by automatically becoming a local server.
|
||||
|
||||
- **Network Sharing:** Mobile apps connect over WiFi, and other computers can access your models.
|
||||
- **API:** Available at `localhost:1337` for any OpenAI-compatible application.
|
||||
- **Offline Access:** No internet required for local network connections.
|
||||
|
||||
## For Developers
|
||||
|
||||
### Local API Server
|
||||
```bash
|
||||
# Always running at localhost:1337
|
||||
curl http://localhost:1337/v1/chat/completions \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"model": "default", "messages": [{"role": "user", "content": "Hello"}]}'
|
||||
```
|
||||
|
||||
## Common Questions
|
||||
|
||||
<details>
|
||||
<summary>Do I need to download models?</summary>
|
||||
No. Jan comes with a default model that works immediately.
|
||||
</details>
|
||||
<details>
|
||||
<summary>Can I use it offline?</summary>
|
||||
Yes. Local Mode works completely offline once installed.
|
||||
</details>
|
||||
<details>
|
||||
<summary>How do I switch models?</summary>
|
||||
Most users don't need to. Power users can explore the Model Hub.
|
||||
</details>
|
||||
<details>
|
||||
<summary>Is it really private?</summary>
|
||||
Yes. In Local Mode, nothing leaves your computer.
|
||||
</details>
|
||||
|
||||
<Callout type="error">
|
||||
Having issues? Most problems are solved by restarting the app. If that doesn't work, check our [troubleshooting guide](/docs/troubleshooting).
|
||||
</Callout>
|
||||
|
||||
## The Bottom Line
|
||||
|
||||
Jan Desktop is AI that respects that your computer is YOUR computer, not a terminal to someone else's server. Just software that works for you.
|
||||
72
docs/src/pages/products/platforms/jan-ai.mdx
Normal file
72
docs/src/pages/products/platforms/jan-ai.mdx
Normal file
@ -0,0 +1,72 @@
|
||||
---
|
||||
title: Jan.ai
|
||||
description: Cloud AI that respects your privacy. Web-based access to Jan with no setup required.
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
**Status:** Beta Launch Soon
|
||||
|
||||
Web-based version of Jan with no setup required. Same default cloud mode for mobile and desktop users.
|
||||
|
||||
## What is Jan Web?
|
||||
|
||||
Jan Web is the cloud-hosted version of Jan that runs in your browser. No installation needed, instant access from any device, with the same AI experience you get locally.
|
||||
|
||||
<Callout>
|
||||
Currently in beta development. Beta launch coming soon.
|
||||
</Callout>
|
||||
|
||||
## How It Works
|
||||
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 gap-4 mt-6">
|
||||
<div class="p-4 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h4 class="font-semibold text-lg mb-2">For Desktop Users</h4>
|
||||
<p class="text-sm text-gray-600 dark:text-gray-400">When you switch to Cloud Mode in Jan Desktop, it connects to Jan Web automatically.</p>
|
||||
</div>
|
||||
<div class="p-4 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h4 class="font-semibold text-lg mb-2">For Mobile Users</h4>
|
||||
<p class="text-sm text-gray-600 dark:text-gray-400">Jan Web serves as the default cloud backend for mobile apps when not connected to your desktop.</p>
|
||||
</div>
|
||||
<div class="p-4 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h4 class="font-semibold text-lg mb-2">For Web Users</h4>
|
||||
<p class="text-sm text-gray-600 dark:text-gray-400">Visit jan.ai directly in your browser for instant access to AI without downloading anything.</p>
|
||||
</div>
|
||||
<div class="p-4 border border-gray-200 dark:border-gray-700 rounded-lg">
|
||||
<h4 class="font-semibold text-lg mb-2">Team Collaboration</h4>
|
||||
<p class="text-sm text-gray-600 dark:text-gray-400">Share prompts, workflows, and collaborate on threads with your team members.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
## Pricing
|
||||
|
||||
| Tier | Features | Price |
|
||||
| :--- | :--- | :--- |
|
||||
| **Free** | Free for everyone | $0 |
|
||||
| **Pro** | Access our latest models<br/>Access other cloud providers without API keys | Coming Soon |
|
||||
| **Enterprise** | Self-host or we host it for you<br/>Active support and SLAs<br/>SSO integration<br/>Team features | Contact Sales |
|
||||
|
||||
## For Developers
|
||||
|
||||
### API Access
|
||||
```javascript
|
||||
// Same API as local Jan
|
||||
const response = await fetch('https://api.jan.ai/v1/chat/completions', {
|
||||
headers: { 'Authorization': 'Bearer YOUR_KEY' },
|
||||
body: JSON.stringify({
|
||||
model: 'jan-nano',
|
||||
messages: [{ role: 'user', content: 'Hello' }]
|
||||
})
|
||||
});
|
||||
```
|
||||
### OpenAI Compatible
|
||||
```python
|
||||
# Just change the base URL
|
||||
client = OpenAI(
|
||||
base_url="https://api.jan.ai/v1",
|
||||
api_key="your-jan-key"
|
||||
)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
[Join Beta Waitlist](https://jan.ai/beta) | [Contact Sales](https://jan.ai/enterprise) | [API Documentation](/docs/api)
|
||||
46
docs/src/pages/products/platforms/mobile.mdx
Normal file
46
docs/src/pages/products/platforms/mobile.mdx
Normal file
@ -0,0 +1,46 @@
|
||||
---
|
||||
title: Jan Mobile
|
||||
description: Your AI assistant on the go. Seamlessly connect to local, desktop, or server models.
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
**Status:** Coming Q4 2025
|
||||
|
||||
Jan Mobile brings the same AI experience to your phone. Connect to your desktop, your server, or run models locally.
|
||||
|
||||
## How It Works
|
||||
Jan Mobile adapts to your situation:
|
||||
- **At Home:** Connect to your Jan Desktop over WiFi.
|
||||
- **At Work:** Connect to your company Jan Server.
|
||||
- **On the Go:** Run Jan Nano on your phone or use a cloud model.
|
||||
|
||||
## Three Modes, One Experience
|
||||
|
||||
### Desktop Mode
|
||||
Access larger, more powerful models running on your home computer. No phone battery drain.
|
||||
|
||||
### Server Mode
|
||||
Connect to your organization's private AI cloud for team collaboration and access to shared knowledge.
|
||||
|
||||
### Local Mode
|
||||
No connection? No problem. Run models like 'Jan Nano' directly on your phone for complete privacy and offline access.
|
||||
|
||||
## Key Features
|
||||
<div class='grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4 mt-6'>
|
||||
<div class='p-4 border border-gray-200 dark:border-gray-700 rounded-lg'>**Seamless Switching**</div>
|
||||
<div class='p-4 border border-gray-200 dark:border-gray-700 rounded-lg'>**Voice First Interface**</div>
|
||||
<div class='p-4 border border-gray-200 dark:border-gray-700 rounded-lg'>**Sync Everything**</div>
|
||||
<div class='p-4 border border-gray-200 dark:border-gray-700 rounded-lg'>**iOS and Android**</div>
|
||||
<div class='p-4 border border-gray-200 dark:border-gray-700 rounded-lg'>**Adaptive Modes**</div>
|
||||
<div class='p-4 border border-gray-200 dark:border-gray-700 rounded-lg'>**Jan Nano Integration**</div>
|
||||
</div>
|
||||
|
||||
## Development Status
|
||||
- Core architecture completed
|
||||
- Desktop/Server connection protocols implemented
|
||||
- Jan Nano mobile optimization in progress
|
||||
- Closed beta planned for Q3 2025
|
||||
|
||||
---
|
||||
|
||||
[Get Notified](https://jan.ai/mobile) | [Follow on Discord](https://discord.gg/jan)
|
||||
36
docs/src/pages/products/platforms/server.mdx
Normal file
36
docs/src/pages/products/platforms/server.mdx
Normal file
@ -0,0 +1,36 @@
|
||||
---
|
||||
title: Jan Server
|
||||
description: Your own private AI cloud. Self-hosted AI for teams and enterprises.
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
**Status:** Coming Q2 2025
|
||||
|
||||
Jan Server is Jan Desktop with multi-user support. Deploy it on your hardware to create your own private AI cloud for your team or organization.
|
||||
|
||||
## Why Organizations Need This
|
||||
Jan Server gives you complete control over your AI infrastructure, ensuring total privacy, predictable costs, and compliance readiness.
|
||||
|
||||
## Key Features
|
||||
<div class='grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4 mt-6'>
|
||||
<div class='p-4 border border-gray-200 dark:border-gray-700 rounded-lg'>**Multi-User Support**</div>
|
||||
<div class='p-4 border border-gray-200 dark:border-gray-700 rounded-lg'>**Enterprise Authentication**</div>
|
||||
<div class='p-4 border border-gray-200 dark:border-gray-700 rounded-lg'>**Flexible Deployment**</div>
|
||||
<div class='p-4 border border-gray-200 dark:border-gray-700 rounded-lg'>**Admin Dashboard**</div>
|
||||
<div class='p-4 border border-gray-200 dark:border-gray-700 rounded-lg'>**Team Knowledge Sharing**</div>
|
||||
<div class='p-4 border border-gray-200 dark:border-gray-700 rounded-lg'>**Same API as Desktop**</div>
|
||||
</div>
|
||||
|
||||
## Deployment Options
|
||||
- **Docker:** Single command setup
|
||||
- **Kubernetes:** Enterprise scale and high availability
|
||||
- **Bare Metal:** Maximum control and performance
|
||||
|
||||
## Scaling Guidelines
|
||||
- **Small Teams (5-10 users):** Single powerful GPU (e.g., RTX 4090)
|
||||
- **Departments (10-50 users):** 2-4 GPU cluster nodes
|
||||
- **Enterprise (50+ users):** DGX cluster or custom configurations
|
||||
|
||||
---
|
||||
|
||||
[Join Early Access](https://jan.ai/server) | [Hardware Guide](/docs/server/hardware)
|
||||
5
docs/src/pages/products/tools/_meta.json
Normal file
5
docs/src/pages/products/tools/_meta.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"deepresearch": "DeepResearch",
|
||||
"search": "Search",
|
||||
"browseruse": "BrowserUse"
|
||||
}
|
||||
22
docs/src/pages/products/tools/browseruse.mdx
Normal file
22
docs/src/pages/products/tools/browseruse.mdx
Normal file
@ -0,0 +1,22 @@
|
||||
---
|
||||
title: BrowserUse
|
||||
description: Native browser automation for Jan, enabling AI to interact with the web on your behalf.
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
<Callout type='warning'>**In Development:** This tool is planned and not yet available.</Callout>
|
||||
|
||||
## Let Jan Use Your Browser
|
||||
|
||||
'BrowserUse' is a native tool being developed for the Jan ecosystem that will allow the AI to securely control a browser to perform tasks, automate workflows, and interact with websites just like a human would.
|
||||
|
||||
Think of it as an integrated, privacy-first automation layer that turns Jan from a conversational AI into a powerful agent for action.
|
||||
|
||||
### Built on MCP
|
||||
The tool will be implemented as a native **Model Context Protocol (MCP)** server within Jan, ensuring secure and standardized communication between the AI model and the browser.
|
||||
|
||||
### Planned Core Features:
|
||||
- **Secure Sessions** in an isolated, sandboxed environment.
|
||||
- **Natural Language Control** (e.g., 'Book a flight...')
|
||||
- **Visual Understanding** to interpret page content.
|
||||
- **User in the Loop** for critical actions.
|
||||
23
docs/src/pages/products/tools/deepresearch.mdx
Normal file
23
docs/src/pages/products/tools/deepresearch.mdx
Normal file
@ -0,0 +1,23 @@
|
||||
---
|
||||
title: DeepResearch
|
||||
description: An AI agent that performs comprehensive, multi-step research for you.
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
<Callout type='warning'>**In Development:** This tool is planned and not yet available.</Callout>
|
||||
|
||||
## Your Personal Research Analyst
|
||||
|
||||
'DeepResearch' is a planned native tool for Jan that transforms it into a powerful research agent. Give it a complex question, and it will autonomously browse, analyze, and synthesize information from numerous sources to deliver a comprehensive, structured report.
|
||||
|
||||
Think of it as Jan's answer to the advanced research capabilities seen in **OpenAI's ChatGPT** and **Google's Gemini**, but built with privacy and user control at its core.
|
||||
|
||||
### How It Will Work
|
||||
Unlike a simple web search that returns a list of links, 'DeepResearch' will understand your goal, create a research plan, execute it, and deliver a final, synthesized document with citations.
|
||||
|
||||
### Planned Core Features:
|
||||
- **Autonomous Multi-Step Research**
|
||||
- **Comprehensive Source Analysis**
|
||||
- **Structured Report Generation**
|
||||
- **Full Transparency with Citations**
|
||||
- **Local-First Privacy**
|
||||
25
docs/src/pages/products/tools/search.mdx
Normal file
25
docs/src/pages/products/tools/search.mdx
Normal file
@ -0,0 +1,25 @@
|
||||
---
|
||||
title: Search
|
||||
description: A native search tool that gives you answers, not just links, with complete privacy.
|
||||
---
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
<Callout type='warning'>**In Development:** This tool is planned and not yet available.</Callout>
|
||||
|
||||
## Answers, Not Just Links
|
||||
|
||||
'Search' is a planned native tool for Jan that rethinks web search. Instead of just giving you a list of links to sift through, it understands your question, scours the web, and provides a direct, synthesized answer with sources cited.
|
||||
|
||||
Think of it as a private, self-hosted alternative to services like **Perplexity.ai**, integrated directly into your AI assistant.
|
||||
|
||||
### How It's Different
|
||||
- **Privacy-First:** Your search queries are processed locally and anonymized.
|
||||
- **Direct Answers:** Get a concise, accurate answer compiled from the best sources.
|
||||
- **Cited Sources:** Every piece of information is backed by a verifiable source.
|
||||
- **Conversational Follow-up:** Ask follow-up questions in a natural way.
|
||||
|
||||
### Planned Core Features:
|
||||
- **Real-Time Information**
|
||||
- **Source Verification**
|
||||
- **Customizable Focus**
|
||||
- **Seamless Integration** with other tools
|
||||
23313
docs/yarn.lock
23313
docs/yarn.lock
File diff suppressed because it is too large
Load Diff
@ -3,22 +3,27 @@ import { defineConfig } from 'astro/config'
|
||||
import starlight from '@astrojs/starlight'
|
||||
import starlightThemeRapide from 'starlight-theme-rapide'
|
||||
import starlightSidebarTopics from 'starlight-sidebar-topics'
|
||||
import starlightVideos from 'starlight-videos'
|
||||
// import starlightOpenAPI, { openAPISidebarGroups } from 'starlight-openapi'
|
||||
import mermaid from 'astro-mermaid'
|
||||
|
||||
// https://astro.build/config
|
||||
export default defineConfig({
|
||||
// Deploy to the new v2 subdomain
|
||||
site: 'https://v2.jan.ai',
|
||||
// No 'base' property is needed, as this will be deployed to the root of the subdomain.
|
||||
integrations: [
|
||||
mermaid({
|
||||
theme: 'default',
|
||||
autoTheme: true,
|
||||
}),
|
||||
starlight({
|
||||
title: '👋 Jan',
|
||||
favicon: 'jan2.png',
|
||||
plugins: [
|
||||
starlightThemeRapide(),
|
||||
starlightVideos(),
|
||||
starlightSidebarTopics(
|
||||
[
|
||||
{
|
||||
label: 'Jan',
|
||||
label: 'Jan Desktop',
|
||||
link: '/',
|
||||
icon: 'rocket',
|
||||
items: [
|
||||
@ -27,7 +32,7 @@ export default defineConfig({
|
||||
items: [
|
||||
{
|
||||
label: 'Install 👋 Jan',
|
||||
collapsed: true,
|
||||
collapsed: false,
|
||||
autogenerate: { directory: 'jan/installation' },
|
||||
},
|
||||
{ label: 'Start Chatting', slug: 'jan/threads' },
|
||||
@ -40,13 +45,18 @@ export default defineConfig({
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'TUTORIALS',
|
||||
label: 'Cloud Providers',
|
||||
items: [
|
||||
{ label: 'Translation', slug: 'jan/tutorials/translation' },
|
||||
{ label: 'Anthropic', slug: 'jan/remote-models/anthropic' },
|
||||
{ label: 'OpenAI', slug: 'jan/remote-models/openai' },
|
||||
{ label: 'Gemini', slug: 'jan/remote-models/google' },
|
||||
{
|
||||
label: 'Creative Writing',
|
||||
slug: 'jan/tutorials/creative-writing',
|
||||
label: 'OpenRouter',
|
||||
slug: 'jan/remote-models/openrouter',
|
||||
},
|
||||
{ label: 'Cohere', slug: 'jan/remote-models/cohere' },
|
||||
{ label: 'Mistral', slug: 'jan/remote-models/mistralai' },
|
||||
{ label: 'Groq', slug: 'jan/remote-models/groq' },
|
||||
],
|
||||
},
|
||||
{
|
||||
@ -70,7 +80,36 @@ export default defineConfig({
|
||||
{
|
||||
label: 'MCP Examples',
|
||||
collapsed: true,
|
||||
autogenerate: { directory: 'jan/mcp-examples' },
|
||||
items: [
|
||||
{
|
||||
label: 'Code Sandbox (E2B)',
|
||||
slug: 'jan/mcp-examples/data-analysis/e2b',
|
||||
},
|
||||
{
|
||||
label: 'Web Search with Exa',
|
||||
slug: 'jan/mcp-examples/search/exa',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'Local Server',
|
||||
items: [
|
||||
{ label: 'Server Setup', slug: 'local-server/api-server' },
|
||||
{
|
||||
label: 'Jan Data Folder',
|
||||
slug: 'local-server/data-folder',
|
||||
},
|
||||
{ label: 'Server Settings', slug: 'local-server/settings' },
|
||||
{
|
||||
label: 'Llama.cpp Server',
|
||||
slug: 'local-server/llama-cpp',
|
||||
},
|
||||
{
|
||||
label: 'Integrations',
|
||||
collapsed: true,
|
||||
autogenerate: { directory: 'local-server/integrations' },
|
||||
},
|
||||
],
|
||||
},
|
||||
@ -86,79 +125,27 @@ export default defineConfig({
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'Local Server',
|
||||
link: '/local-server/',
|
||||
icon: 'setting',
|
||||
items: [
|
||||
{ label: 'Server Setup', slug: 'local-server/api-server' },
|
||||
{ label: 'Jan Data Folder', slug: 'local-server/data-folder' },
|
||||
{ label: 'Settings', slug: 'local-server/settings' },
|
||||
{ label: 'Llama.cpp', slug: 'local-server/llama-cpp' },
|
||||
{
|
||||
label: 'Integrations',
|
||||
collapsed: true,
|
||||
autogenerate: { directory: 'local-server/integrations' },
|
||||
},
|
||||
{
|
||||
label: 'Troubleshooting',
|
||||
slug: 'local-server/troubleshooting',
|
||||
},
|
||||
],
|
||||
label: 'Jan Mobile',
|
||||
link: '/mobile/',
|
||||
badge: { text: 'Coming Soon', variant: 'caution' },
|
||||
icon: 'phone',
|
||||
items: [{ label: 'Overview', slug: 'mobile' }],
|
||||
},
|
||||
{
|
||||
label: 'Jan AI University',
|
||||
link: '/university/',
|
||||
icon: 'star',
|
||||
id: 'university',
|
||||
items: [
|
||||
{ label: 'Welcome', slug: 'university' },
|
||||
{
|
||||
label: 'Getting Started Path',
|
||||
items: [
|
||||
{
|
||||
label: 'Getting to Know Jan Series',
|
||||
slug: 'university/getting-started/getting-to-know-jan',
|
||||
},
|
||||
{
|
||||
label: 'Installation Guide',
|
||||
slug: 'university/getting-started/installation',
|
||||
},
|
||||
{
|
||||
label: 'Introduction Course',
|
||||
slug: 'university/getting-started/introduction',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'Advanced Path',
|
||||
items: [
|
||||
{
|
||||
label: 'Using MCPs Tutorial Series',
|
||||
slug: 'university/advanced/using-mcps',
|
||||
},
|
||||
{
|
||||
label: 'API Integration',
|
||||
slug: 'university/advanced/api-integration',
|
||||
},
|
||||
{
|
||||
label: 'Performance Optimization',
|
||||
slug: 'university/advanced/performance-optimization',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
label: 'Jan Server',
|
||||
link: '/server/',
|
||||
badge: { text: 'Coming Soon', variant: 'caution' },
|
||||
icon: 'forward-slash',
|
||||
items: [{ label: 'Overview', slug: 'server' }],
|
||||
},
|
||||
],
|
||||
{
|
||||
exclude: ['/prods', '/api-reference', '/products', '/products/**/*'],
|
||||
topics: {
|
||||
university: [
|
||||
'/university/getting-started/what-is-jan',
|
||||
'/university/getting-started/privacy-first-ai',
|
||||
'/university/advanced/mcp-introduction',
|
||||
'/university/advanced/search-integration-exa',
|
||||
],
|
||||
},
|
||||
exclude: [
|
||||
'/prods',
|
||||
'/api-reference',
|
||||
'/products',
|
||||
'/products/**/*',
|
||||
],
|
||||
}
|
||||
),
|
||||
],
|
||||
@ -168,6 +155,16 @@ export default defineConfig({
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/menloresearch/jan',
|
||||
},
|
||||
{
|
||||
icon: 'x.com',
|
||||
label: 'X',
|
||||
href: 'https://twitter.com/jandotai',
|
||||
},
|
||||
{
|
||||
icon: 'discord',
|
||||
label: 'Discord',
|
||||
href: 'https://discord.com/invite/FTk2MvZwJH',
|
||||
},
|
||||
],
|
||||
components: {
|
||||
Header: './src/components/CustomNav.astro',
|
||||
|
||||
269
website/bun.lock
269
website/bun.lock
@ -7,6 +7,7 @@
|
||||
"@astrojs/starlight": "^0.35.1",
|
||||
"@lorenzo_lewis/starlight-utils": "^0.3.2",
|
||||
"astro": "^5.6.1",
|
||||
"astro-mermaid": "^1.0.4",
|
||||
"gsap": "^3.13.0",
|
||||
"phosphor-astro": "^2.1.0",
|
||||
"sharp": "^0.34.2",
|
||||
@ -18,6 +19,10 @@
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@antfu/install-pkg": ["@antfu/install-pkg@1.1.0", "", { "dependencies": { "package-manager-detector": "^1.3.0", "tinyexec": "^1.0.1" } }, "sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ=="],
|
||||
|
||||
"@antfu/utils": ["@antfu/utils@8.1.1", "", {}, "sha512-Mex9nXf9vR6AhcXmMrlz/HVgYYZpVGJ6YlPgwl7UnaFpnshXs6EK/oa5Gpf3CzENMjkvEx2tQtntGnb7UtSTOQ=="],
|
||||
|
||||
"@apidevtools/swagger-methods": ["@apidevtools/swagger-methods@3.0.2", "", {}, "sha512-QAkD5kK2b1WfjDS/UQn/qQkbwF31uqRjPTrsCs5ZG9BQGAkjwvqGFjjPqAuzac/IYzpPtRzjCP1WrTuAIjMrXg=="],
|
||||
|
||||
"@astro-community/astro-embed-youtube": ["@astro-community/astro-embed-youtube@0.5.6", "", { "dependencies": { "lite-youtube-embed": "^0.3.3" }, "peerDependencies": { "astro": "^2.0.0 || ^3.0.0-beta || ^4.0.0-beta || ^5.0.0-beta" } }, "sha512-/mRfCl/eTBUz0kmjD1psOy0qoDDBorVp0QumUacjFcIkBullYtbeFQ2ZGZ+3N/tA6cR/OIyzr2QA4dQXlY6USg=="],
|
||||
@ -50,8 +55,20 @@
|
||||
|
||||
"@babel/types": ["@babel/types@7.28.1", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1" } }, "sha512-x0LvFTekgSX+83TI28Y9wYPUfzrnl2aT5+5QLnO6v7mSJYtEEevuDRN0F0uSHRk1G1IWZC43o00Y0xDDrpBGPQ=="],
|
||||
|
||||
"@braintree/sanitize-url": ["@braintree/sanitize-url@7.1.1", "", {}, "sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw=="],
|
||||
|
||||
"@capsizecss/unpack": ["@capsizecss/unpack@2.4.0", "", { "dependencies": { "blob-to-buffer": "^1.2.8", "cross-fetch": "^3.0.4", "fontkit": "^2.0.2" } }, "sha512-GrSU71meACqcmIUxPYOJvGKF0yryjN/L1aCuE9DViCTJI7bfkjgYDPD1zbNDcINJwSSP6UaBZY9GAbYDO7re0Q=="],
|
||||
|
||||
"@chevrotain/cst-dts-gen": ["@chevrotain/cst-dts-gen@11.0.3", "", { "dependencies": { "@chevrotain/gast": "11.0.3", "@chevrotain/types": "11.0.3", "lodash-es": "4.17.21" } }, "sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ=="],
|
||||
|
||||
"@chevrotain/gast": ["@chevrotain/gast@11.0.3", "", { "dependencies": { "@chevrotain/types": "11.0.3", "lodash-es": "4.17.21" } }, "sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q=="],
|
||||
|
||||
"@chevrotain/regexp-to-ast": ["@chevrotain/regexp-to-ast@11.0.3", "", {}, "sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA=="],
|
||||
|
||||
"@chevrotain/types": ["@chevrotain/types@11.0.3", "", {}, "sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ=="],
|
||||
|
||||
"@chevrotain/utils": ["@chevrotain/utils@11.0.3", "", {}, "sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ=="],
|
||||
|
||||
"@ctrl/tinycolor": ["@ctrl/tinycolor@4.1.0", "", {}, "sha512-WyOx8cJQ+FQus4Mm4uPIZA64gbk3Wxh0so5Lcii0aJifqwoVOlfFtorjLE0Hen4OYyHZMXDWqMmaQemBhgxFRQ=="],
|
||||
|
||||
"@emnapi/runtime": ["@emnapi/runtime@1.4.5", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-++LApOtY0pEEz1zrd9vy1/zXVaVJJ/EbAF3u0fXIzPJEDtnITsBGbbK0EkM72amhl/R5b+5xx0Y/QhcVOpuulg=="],
|
||||
@ -118,6 +135,10 @@
|
||||
|
||||
"@humanwhocodes/momoa": ["@humanwhocodes/momoa@2.0.4", "", {}, "sha512-RE815I4arJFtt+FVeU1Tgp9/Xvecacji8w/V6XtXsWWH/wz/eNkNbhb+ny/+PlVZjV0rxQpRSQKNKE3lcktHEA=="],
|
||||
|
||||
"@iconify/types": ["@iconify/types@2.0.0", "", {}, "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg=="],
|
||||
|
||||
"@iconify/utils": ["@iconify/utils@2.3.0", "", { "dependencies": { "@antfu/install-pkg": "^1.0.0", "@antfu/utils": "^8.1.0", "@iconify/types": "^2.0.0", "debug": "^4.4.0", "globals": "^15.14.0", "kolorist": "^1.8.0", "local-pkg": "^1.0.0", "mlly": "^1.7.4" } }, "sha512-GmQ78prtwYW6EtzXRU1rY+KwOKfz32PD7iJh6Iyqw68GiKuoZ2A6pRtzWONz5VQJbp50mEjXh/7NkumtrAgRKA=="],
|
||||
|
||||
"@img/sharp-darwin-arm64": ["@img/sharp-darwin-arm64@0.34.3", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-arm64": "1.2.0" }, "os": "darwin", "cpu": "arm64" }, "sha512-ryFMfvxxpQRsgZJqBd4wsttYQbCxsJksrv9Lw/v798JcQ8+w84mBWuXwl+TT0WJ/WrYOLaYpwQXi3sA9nTIaIg=="],
|
||||
|
||||
"@img/sharp-darwin-x64": ["@img/sharp-darwin-x64@0.34.3", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-x64": "1.2.0" }, "os": "darwin", "cpu": "x64" }, "sha512-yHpJYynROAj12TA6qil58hmPmAwxKKC7reUqtGLzsOHfP7/rniNGTL8tjWX6L3CTV4+5P4ypcS7Pp+7OB+8ihA=="],
|
||||
@ -170,6 +191,8 @@
|
||||
|
||||
"@mdx-js/mdx": ["@mdx-js/mdx@3.1.0", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdx": "^2.0.0", "collapse-white-space": "^2.0.0", "devlop": "^1.0.0", "estree-util-is-identifier-name": "^3.0.0", "estree-util-scope": "^1.0.0", "estree-walker": "^3.0.0", "hast-util-to-jsx-runtime": "^2.0.0", "markdown-extensions": "^2.0.0", "recma-build-jsx": "^1.0.0", "recma-jsx": "^1.0.0", "recma-stringify": "^1.0.0", "rehype-recma": "^1.0.0", "remark-mdx": "^3.0.0", "remark-parse": "^11.0.0", "remark-rehype": "^11.0.0", "source-map": "^0.7.0", "unified": "^11.0.0", "unist-util-position-from-estree": "^2.0.0", "unist-util-stringify-position": "^4.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" } }, "sha512-/QxEhPAvGwbQmy1Px8F899L5Uc2KZ6JtXwlCgJmjSTBedwOZkByYcBG4GceIGPXRDsmfxhHazuS+hlOShRLeDw=="],
|
||||
|
||||
"@mermaid-js/parser": ["@mermaid-js/parser@0.6.2", "", { "dependencies": { "langium": "3.3.1" } }, "sha512-+PO02uGF6L6Cs0Bw8RpGhikVvMWEysfAyl27qTlroUB8jSWr1lL0Sf6zi78ZxlSnmgSY2AMMKVgghnN9jTtwkQ=="],
|
||||
|
||||
"@oslojs/encoding": ["@oslojs/encoding@1.1.0", "", {}, "sha512-70wQhgYmndg4GCPxPPxPGevRKqTIJ2Nh4OkiMWmDAVYsTQ+Ta7Sq+rPevXyXGdzr30/qZBnyOalCszoMxlyldQ=="],
|
||||
|
||||
"@pagefind/darwin-arm64": ["@pagefind/darwin-arm64@1.3.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-365BEGl6ChOsauRjyVpBjXybflXAOvoMROw3TucAROHIcdBvXk9/2AmEvGFU0r75+vdQI4LJdJdpH4Y6Yqaj4A=="],
|
||||
@ -250,6 +273,68 @@
|
||||
|
||||
"@swc/helpers": ["@swc/helpers@0.5.17", "", { "dependencies": { "tslib": "^2.8.0" } }, "sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A=="],
|
||||
|
||||
"@types/d3": ["@types/d3@7.4.3", "", { "dependencies": { "@types/d3-array": "*", "@types/d3-axis": "*", "@types/d3-brush": "*", "@types/d3-chord": "*", "@types/d3-color": "*", "@types/d3-contour": "*", "@types/d3-delaunay": "*", "@types/d3-dispatch": "*", "@types/d3-drag": "*", "@types/d3-dsv": "*", "@types/d3-ease": "*", "@types/d3-fetch": "*", "@types/d3-force": "*", "@types/d3-format": "*", "@types/d3-geo": "*", "@types/d3-hierarchy": "*", "@types/d3-interpolate": "*", "@types/d3-path": "*", "@types/d3-polygon": "*", "@types/d3-quadtree": "*", "@types/d3-random": "*", "@types/d3-scale": "*", "@types/d3-scale-chromatic": "*", "@types/d3-selection": "*", "@types/d3-shape": "*", "@types/d3-time": "*", "@types/d3-time-format": "*", "@types/d3-timer": "*", "@types/d3-transition": "*", "@types/d3-zoom": "*" } }, "sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww=="],
|
||||
|
||||
"@types/d3-array": ["@types/d3-array@3.2.1", "", {}, "sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg=="],
|
||||
|
||||
"@types/d3-axis": ["@types/d3-axis@3.0.6", "", { "dependencies": { "@types/d3-selection": "*" } }, "sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw=="],
|
||||
|
||||
"@types/d3-brush": ["@types/d3-brush@3.0.6", "", { "dependencies": { "@types/d3-selection": "*" } }, "sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A=="],
|
||||
|
||||
"@types/d3-chord": ["@types/d3-chord@3.0.6", "", {}, "sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg=="],
|
||||
|
||||
"@types/d3-color": ["@types/d3-color@3.1.3", "", {}, "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A=="],
|
||||
|
||||
"@types/d3-contour": ["@types/d3-contour@3.0.6", "", { "dependencies": { "@types/d3-array": "*", "@types/geojson": "*" } }, "sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg=="],
|
||||
|
||||
"@types/d3-delaunay": ["@types/d3-delaunay@6.0.4", "", {}, "sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw=="],
|
||||
|
||||
"@types/d3-dispatch": ["@types/d3-dispatch@3.0.6", "", {}, "sha512-4fvZhzMeeuBJYZXRXrRIQnvUYfyXwYmLsdiN7XXmVNQKKw1cM8a5WdID0g1hVFZDqT9ZqZEY5pD44p24VS7iZQ=="],
|
||||
|
||||
"@types/d3-drag": ["@types/d3-drag@3.0.7", "", { "dependencies": { "@types/d3-selection": "*" } }, "sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ=="],
|
||||
|
||||
"@types/d3-dsv": ["@types/d3-dsv@3.0.7", "", {}, "sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g=="],
|
||||
|
||||
"@types/d3-ease": ["@types/d3-ease@3.0.2", "", {}, "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA=="],
|
||||
|
||||
"@types/d3-fetch": ["@types/d3-fetch@3.0.7", "", { "dependencies": { "@types/d3-dsv": "*" } }, "sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA=="],
|
||||
|
||||
"@types/d3-force": ["@types/d3-force@3.0.10", "", {}, "sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw=="],
|
||||
|
||||
"@types/d3-format": ["@types/d3-format@3.0.4", "", {}, "sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g=="],
|
||||
|
||||
"@types/d3-geo": ["@types/d3-geo@3.1.0", "", { "dependencies": { "@types/geojson": "*" } }, "sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ=="],
|
||||
|
||||
"@types/d3-hierarchy": ["@types/d3-hierarchy@3.1.7", "", {}, "sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg=="],
|
||||
|
||||
"@types/d3-interpolate": ["@types/d3-interpolate@3.0.4", "", { "dependencies": { "@types/d3-color": "*" } }, "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA=="],
|
||||
|
||||
"@types/d3-path": ["@types/d3-path@3.1.1", "", {}, "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg=="],
|
||||
|
||||
"@types/d3-polygon": ["@types/d3-polygon@3.0.2", "", {}, "sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA=="],
|
||||
|
||||
"@types/d3-quadtree": ["@types/d3-quadtree@3.0.6", "", {}, "sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg=="],
|
||||
|
||||
"@types/d3-random": ["@types/d3-random@3.0.3", "", {}, "sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ=="],
|
||||
|
||||
"@types/d3-scale": ["@types/d3-scale@4.0.9", "", { "dependencies": { "@types/d3-time": "*" } }, "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw=="],
|
||||
|
||||
"@types/d3-scale-chromatic": ["@types/d3-scale-chromatic@3.1.0", "", {}, "sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ=="],
|
||||
|
||||
"@types/d3-selection": ["@types/d3-selection@3.0.11", "", {}, "sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w=="],
|
||||
|
||||
"@types/d3-shape": ["@types/d3-shape@3.1.7", "", { "dependencies": { "@types/d3-path": "*" } }, "sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg=="],
|
||||
|
||||
"@types/d3-time": ["@types/d3-time@3.0.4", "", {}, "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g=="],
|
||||
|
||||
"@types/d3-time-format": ["@types/d3-time-format@4.0.3", "", {}, "sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg=="],
|
||||
|
||||
"@types/d3-timer": ["@types/d3-timer@3.0.2", "", {}, "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw=="],
|
||||
|
||||
"@types/d3-transition": ["@types/d3-transition@3.0.9", "", { "dependencies": { "@types/d3-selection": "*" } }, "sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg=="],
|
||||
|
||||
"@types/d3-zoom": ["@types/d3-zoom@3.0.8", "", { "dependencies": { "@types/d3-interpolate": "*", "@types/d3-selection": "*" } }, "sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw=="],
|
||||
|
||||
"@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ=="],
|
||||
|
||||
"@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
|
||||
@ -258,6 +343,8 @@
|
||||
|
||||
"@types/fontkit": ["@types/fontkit@2.0.8", "", { "dependencies": { "@types/node": "*" } }, "sha512-wN+8bYxIpJf+5oZdrdtaX04qUuWHcKxcDEgRS9Qm9ZClSHjzEn13SxUC+5eRM+4yXIeTYk8mTzLAWGF64847ew=="],
|
||||
|
||||
"@types/geojson": ["@types/geojson@7946.0.16", "", {}, "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg=="],
|
||||
|
||||
"@types/hast": ["@types/hast@3.0.4", "", { "dependencies": { "@types/unist": "*" } }, "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ=="],
|
||||
|
||||
"@types/js-yaml": ["@types/js-yaml@4.0.9", "", {}, "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg=="],
|
||||
@ -276,6 +363,8 @@
|
||||
|
||||
"@types/sax": ["@types/sax@1.2.7", "", { "dependencies": { "@types/node": "*" } }, "sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A=="],
|
||||
|
||||
"@types/trusted-types": ["@types/trusted-types@2.0.7", "", {}, "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw=="],
|
||||
|
||||
"@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="],
|
||||
|
||||
"@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="],
|
||||
@ -314,6 +403,8 @@
|
||||
|
||||
"astro-integration-kit": ["astro-integration-kit@0.18.0", "", { "dependencies": { "pathe": "^1.1.2", "recast": "^0.23.7" }, "peerDependencies": { "astro": "^4.12.0 || ^5.0.0" } }, "sha512-Z0QW5IQjosuKQDEGYYkvUX6EhEtrmE4/oViqWz23QveV8U7AuyFsTdg00WRNPevWZl/5a4lLUeDpv4bCRynRRg=="],
|
||||
|
||||
"astro-mermaid": ["astro-mermaid@1.0.4", "", { "dependencies": { "mdast-util-to-string": "^4.0.0", "unist-util-visit": "^5.0.0" }, "peerDependencies": { "astro": "^4.0.0 || ^5.0.0", "mermaid": "^10.0.0 || ^11.0.0" } }, "sha512-2M4bVjqLpDB2EZ4EfD6Utzs7VEEORmlt5hNZcMK54IcKWzflohKvowCzg79RHoAdu30W8a4aECAExH8mF7wG4w=="],
|
||||
|
||||
"axobject-query": ["axobject-query@4.1.0", "", {}, "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ=="],
|
||||
|
||||
"bail": ["bail@2.0.2", "", {}, "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw=="],
|
||||
@ -350,6 +441,10 @@
|
||||
|
||||
"character-reference-invalid": ["character-reference-invalid@2.0.1", "", {}, "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw=="],
|
||||
|
||||
"chevrotain": ["chevrotain@11.0.3", "", { "dependencies": { "@chevrotain/cst-dts-gen": "11.0.3", "@chevrotain/gast": "11.0.3", "@chevrotain/regexp-to-ast": "11.0.3", "@chevrotain/types": "11.0.3", "@chevrotain/utils": "11.0.3", "lodash-es": "4.17.21" } }, "sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw=="],
|
||||
|
||||
"chevrotain-allstar": ["chevrotain-allstar@0.3.1", "", { "dependencies": { "lodash-es": "^4.17.21" }, "peerDependencies": { "chevrotain": "^11.0.0" } }, "sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw=="],
|
||||
|
||||
"chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="],
|
||||
|
||||
"ci-info": ["ci-info@4.3.0", "", {}, "sha512-l+2bNRMiQgcfILUi33labAZYIWlH1kWDp+ecNo5iisRKrbm0xcRyCww71/YU0Fkw0mAFpz9bJayXPjey6vkmaQ=="],
|
||||
@ -372,12 +467,18 @@
|
||||
|
||||
"comma-separated-tokens": ["comma-separated-tokens@2.0.3", "", {}, "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg=="],
|
||||
|
||||
"commander": ["commander@8.3.0", "", {}, "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww=="],
|
||||
|
||||
"common-ancestor-path": ["common-ancestor-path@1.0.1", "", {}, "sha512-L3sHRo1pXXEqX8VU28kfgUY+YGsk09hPqZiZmLacNib6XNTCM8ubYeT7ryXQw8asB1sKgcU5lkB7ONug08aB8w=="],
|
||||
|
||||
"confbox": ["confbox@0.2.2", "", {}, "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ=="],
|
||||
|
||||
"cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="],
|
||||
|
||||
"cookie-es": ["cookie-es@1.2.2", "", {}, "sha512-+W7VmiVINB+ywl1HGXJXmrqkOhpKrIiVZV6tQuV54ZyQC7MMuBt81Vc336GMLoHBq5hV/F9eXgt5Mnx0Rha5Fg=="],
|
||||
|
||||
"cose-base": ["cose-base@1.0.3", "", { "dependencies": { "layout-base": "^1.0.0" } }, "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg=="],
|
||||
|
||||
"cross-fetch": ["cross-fetch@3.2.0", "", { "dependencies": { "node-fetch": "^2.7.0" } }, "sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q=="],
|
||||
|
||||
"crossws": ["crossws@0.3.5", "", { "dependencies": { "uncrypto": "^0.1.3" } }, "sha512-ojKiDvcmByhwa8YYqbQI/hg7MEU0NC03+pSdEq4ZUnZR9xXpwk7E43SMNGkn+JxJGPFtNvQ48+vV2p+P1ml5PA=="],
|
||||
@ -388,12 +489,88 @@
|
||||
|
||||
"cssesc": ["cssesc@3.0.0", "", { "bin": { "cssesc": "bin/cssesc" } }, "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg=="],
|
||||
|
||||
"cytoscape": ["cytoscape@3.32.1", "", {}, "sha512-dbeqFTLYEwlFg7UGtcZhCCG/2WayX72zK3Sq323CEX29CY81tYfVhw1MIdduCtpstB0cTOhJswWlM/OEB3Xp+Q=="],
|
||||
|
||||
"cytoscape-cose-bilkent": ["cytoscape-cose-bilkent@4.1.0", "", { "dependencies": { "cose-base": "^1.0.0" }, "peerDependencies": { "cytoscape": "^3.2.0" } }, "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ=="],
|
||||
|
||||
"cytoscape-fcose": ["cytoscape-fcose@2.2.0", "", { "dependencies": { "cose-base": "^2.2.0" }, "peerDependencies": { "cytoscape": "^3.2.0" } }, "sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ=="],
|
||||
|
||||
"d3": ["d3@7.9.0", "", { "dependencies": { "d3-array": "3", "d3-axis": "3", "d3-brush": "3", "d3-chord": "3", "d3-color": "3", "d3-contour": "4", "d3-delaunay": "6", "d3-dispatch": "3", "d3-drag": "3", "d3-dsv": "3", "d3-ease": "3", "d3-fetch": "3", "d3-force": "3", "d3-format": "3", "d3-geo": "3", "d3-hierarchy": "3", "d3-interpolate": "3", "d3-path": "3", "d3-polygon": "3", "d3-quadtree": "3", "d3-random": "3", "d3-scale": "4", "d3-scale-chromatic": "3", "d3-selection": "3", "d3-shape": "3", "d3-time": "3", "d3-time-format": "4", "d3-timer": "3", "d3-transition": "3", "d3-zoom": "3" } }, "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA=="],
|
||||
|
||||
"d3-array": ["d3-array@3.2.4", "", { "dependencies": { "internmap": "1 - 2" } }, "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg=="],
|
||||
|
||||
"d3-axis": ["d3-axis@3.0.0", "", {}, "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw=="],
|
||||
|
||||
"d3-brush": ["d3-brush@3.0.0", "", { "dependencies": { "d3-dispatch": "1 - 3", "d3-drag": "2 - 3", "d3-interpolate": "1 - 3", "d3-selection": "3", "d3-transition": "3" } }, "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ=="],
|
||||
|
||||
"d3-chord": ["d3-chord@3.0.1", "", { "dependencies": { "d3-path": "1 - 3" } }, "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g=="],
|
||||
|
||||
"d3-color": ["d3-color@3.1.0", "", {}, "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA=="],
|
||||
|
||||
"d3-contour": ["d3-contour@4.0.2", "", { "dependencies": { "d3-array": "^3.2.0" } }, "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA=="],
|
||||
|
||||
"d3-delaunay": ["d3-delaunay@6.0.4", "", { "dependencies": { "delaunator": "5" } }, "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A=="],
|
||||
|
||||
"d3-dispatch": ["d3-dispatch@3.0.1", "", {}, "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg=="],
|
||||
|
||||
"d3-drag": ["d3-drag@3.0.0", "", { "dependencies": { "d3-dispatch": "1 - 3", "d3-selection": "3" } }, "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg=="],
|
||||
|
||||
"d3-dsv": ["d3-dsv@3.0.1", "", { "dependencies": { "commander": "7", "iconv-lite": "0.6", "rw": "1" }, "bin": { "csv2json": "bin/dsv2json.js", "csv2tsv": "bin/dsv2dsv.js", "dsv2dsv": "bin/dsv2dsv.js", "dsv2json": "bin/dsv2json.js", "json2csv": "bin/json2dsv.js", "json2dsv": "bin/json2dsv.js", "json2tsv": "bin/json2dsv.js", "tsv2csv": "bin/dsv2dsv.js", "tsv2json": "bin/dsv2json.js" } }, "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q=="],
|
||||
|
||||
"d3-ease": ["d3-ease@3.0.1", "", {}, "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w=="],
|
||||
|
||||
"d3-fetch": ["d3-fetch@3.0.1", "", { "dependencies": { "d3-dsv": "1 - 3" } }, "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw=="],
|
||||
|
||||
"d3-force": ["d3-force@3.0.0", "", { "dependencies": { "d3-dispatch": "1 - 3", "d3-quadtree": "1 - 3", "d3-timer": "1 - 3" } }, "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg=="],
|
||||
|
||||
"d3-format": ["d3-format@3.1.0", "", {}, "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA=="],
|
||||
|
||||
"d3-geo": ["d3-geo@3.1.1", "", { "dependencies": { "d3-array": "2.5.0 - 3" } }, "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q=="],
|
||||
|
||||
"d3-hierarchy": ["d3-hierarchy@3.1.2", "", {}, "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA=="],
|
||||
|
||||
"d3-interpolate": ["d3-interpolate@3.0.1", "", { "dependencies": { "d3-color": "1 - 3" } }, "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g=="],
|
||||
|
||||
"d3-path": ["d3-path@3.1.0", "", {}, "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ=="],
|
||||
|
||||
"d3-polygon": ["d3-polygon@3.0.1", "", {}, "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg=="],
|
||||
|
||||
"d3-quadtree": ["d3-quadtree@3.0.1", "", {}, "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw=="],
|
||||
|
||||
"d3-random": ["d3-random@3.0.1", "", {}, "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ=="],
|
||||
|
||||
"d3-sankey": ["d3-sankey@0.12.3", "", { "dependencies": { "d3-array": "1 - 2", "d3-shape": "^1.2.0" } }, "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ=="],
|
||||
|
||||
"d3-scale": ["d3-scale@4.0.2", "", { "dependencies": { "d3-array": "2.10.0 - 3", "d3-format": "1 - 3", "d3-interpolate": "1.2.0 - 3", "d3-time": "2.1.1 - 3", "d3-time-format": "2 - 4" } }, "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ=="],
|
||||
|
||||
"d3-scale-chromatic": ["d3-scale-chromatic@3.1.0", "", { "dependencies": { "d3-color": "1 - 3", "d3-interpolate": "1 - 3" } }, "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ=="],
|
||||
|
||||
"d3-selection": ["d3-selection@3.0.0", "", {}, "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ=="],
|
||||
|
||||
"d3-shape": ["d3-shape@3.2.0", "", { "dependencies": { "d3-path": "^3.1.0" } }, "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA=="],
|
||||
|
||||
"d3-time": ["d3-time@3.1.0", "", { "dependencies": { "d3-array": "2 - 3" } }, "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q=="],
|
||||
|
||||
"d3-time-format": ["d3-time-format@4.1.0", "", { "dependencies": { "d3-time": "1 - 3" } }, "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg=="],
|
||||
|
||||
"d3-timer": ["d3-timer@3.0.1", "", {}, "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA=="],
|
||||
|
||||
"d3-transition": ["d3-transition@3.0.1", "", { "dependencies": { "d3-color": "1 - 3", "d3-dispatch": "1 - 3", "d3-ease": "1 - 3", "d3-interpolate": "1 - 3", "d3-timer": "1 - 3" }, "peerDependencies": { "d3-selection": "2 - 3" } }, "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w=="],
|
||||
|
||||
"d3-zoom": ["d3-zoom@3.0.0", "", { "dependencies": { "d3-dispatch": "1 - 3", "d3-drag": "2 - 3", "d3-interpolate": "1 - 3", "d3-selection": "2 - 3", "d3-transition": "2 - 3" } }, "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw=="],
|
||||
|
||||
"dagre-d3-es": ["dagre-d3-es@7.0.11", "", { "dependencies": { "d3": "^7.9.0", "lodash-es": "^4.17.21" } }, "sha512-tvlJLyQf834SylNKax8Wkzco/1ias1OPw8DcUMDE7oUIoSEW25riQVuiu/0OWEFqT0cxHT3Pa9/D82Jr47IONw=="],
|
||||
|
||||
"dayjs": ["dayjs@1.11.13", "", {}, "sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg=="],
|
||||
|
||||
"debug": ["debug@4.4.1", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ=="],
|
||||
|
||||
"decode-named-character-reference": ["decode-named-character-reference@1.2.0", "", { "dependencies": { "character-entities": "^2.0.0" } }, "sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q=="],
|
||||
|
||||
"defu": ["defu@6.1.4", "", {}, "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg=="],
|
||||
|
||||
"delaunator": ["delaunator@5.0.1", "", { "dependencies": { "robust-predicates": "^3.0.2" } }, "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw=="],
|
||||
|
||||
"dequal": ["dequal@2.0.3", "", {}, "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="],
|
||||
|
||||
"destr": ["destr@2.0.5", "", {}, "sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA=="],
|
||||
@ -414,6 +591,8 @@
|
||||
|
||||
"dlv": ["dlv@1.1.3", "", {}, "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA=="],
|
||||
|
||||
"dompurify": ["dompurify@3.2.6", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, "sha512-/2GogDQlohXPZe6D6NOgQvXLPSYBqIWMnZ8zzOhn09REE4eyAzb+Hed3jhoM9OkuaJ8P6ZGTTVWQKAi8ieIzfQ=="],
|
||||
|
||||
"dset": ["dset@3.1.4", "", {}, "sha512-2QF/g9/zTaPDc3BjNcVTGoBbXBgYfMTTceLaYcFJ/W9kggFUkhxD/hMEeuLKbugyef9SqAx8cpgwlIP/jinUTA=="],
|
||||
|
||||
"emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="],
|
||||
@ -450,6 +629,8 @@
|
||||
|
||||
"expressive-code": ["expressive-code@0.41.3", "", { "dependencies": { "@expressive-code/core": "^0.41.3", "@expressive-code/plugin-frames": "^0.41.3", "@expressive-code/plugin-shiki": "^0.41.3", "@expressive-code/plugin-text-markers": "^0.41.3" } }, "sha512-YLnD62jfgBZYrXIPQcJ0a51Afv9h8VlWqEGK9uU2T5nL/5rb8SnA86+7+mgCZe5D34Tff5RNEA5hjNVJYHzrFg=="],
|
||||
|
||||
"exsolve": ["exsolve@1.0.7", "", {}, "sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw=="],
|
||||
|
||||
"extend": ["extend@3.0.2", "", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="],
|
||||
|
||||
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
|
||||
@ -470,10 +651,14 @@
|
||||
|
||||
"github-slugger": ["github-slugger@2.0.0", "", {}, "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="],
|
||||
|
||||
"globals": ["globals@15.15.0", "", {}, "sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg=="],
|
||||
|
||||
"gsap": ["gsap@3.13.0", "", {}, "sha512-QL7MJ2WMjm1PHWsoFrAQH/J8wUeqZvMtHO58qdekHpCfhvhSL4gSiz6vJf5EeMP0LOn3ZCprL2ki/gjED8ghVw=="],
|
||||
|
||||
"h3": ["h3@1.15.3", "", { "dependencies": { "cookie-es": "^1.2.2", "crossws": "^0.3.4", "defu": "^6.1.4", "destr": "^2.0.5", "iron-webcrypto": "^1.2.1", "node-mock-http": "^1.0.0", "radix3": "^1.1.2", "ufo": "^1.6.1", "uncrypto": "^0.1.3" } }, "sha512-z6GknHqyX0h9aQaTx22VZDf6QyZn+0Nh+Ym8O/u0SGSkyF5cuTJYKlc8MkzW3Nzf9LE1ivcpmYC3FUGpywhuUQ=="],
|
||||
|
||||
"hachure-fill": ["hachure-fill@0.5.2", "", {}, "sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg=="],
|
||||
|
||||
"hast-util-embedded": ["hast-util-embedded@3.0.0", "", { "dependencies": { "@types/hast": "^3.0.0", "hast-util-is-element": "^3.0.0" } }, "sha512-naH8sld4Pe2ep03qqULEtvYr7EjrLK2QHY8KJR6RJkTUjPGObe1vnx585uzem2hGra+s1q08DZZpfgDVYRbaXA=="],
|
||||
|
||||
"hast-util-format": ["hast-util-format@1.1.0", "", { "dependencies": { "@types/hast": "^3.0.0", "hast-util-embedded": "^3.0.0", "hast-util-minify-whitespace": "^1.0.0", "hast-util-phrasing": "^3.0.0", "hast-util-whitespace": "^3.0.0", "html-whitespace-sensitive-tag-names": "^3.0.0", "unist-util-visit-parents": "^6.0.0" } }, "sha512-yY1UDz6bC9rDvCWHpx12aIBGRG7krurX0p0Fm6pT547LwDIZZiNr8a+IHDogorAdreULSEzP82Nlv5SZkHZcjA=="],
|
||||
@ -524,10 +709,14 @@
|
||||
|
||||
"i18next": ["i18next@23.16.8", "", { "dependencies": { "@babel/runtime": "^7.23.2" } }, "sha512-06r/TitrM88Mg5FdUXAKL96dJMzgqLE5dv3ryBAra4KCwD9mJ4ndOTS95ZuymIGoE+2hzfdaMak2X11/es7ZWg=="],
|
||||
|
||||
"iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
|
||||
|
||||
"import-meta-resolve": ["import-meta-resolve@4.1.0", "", {}, "sha512-I6fiaX09Xivtk+THaMfAwnA3MVA5Big1WHF1Dfx9hFuvNIWpXnorlkzhcQf6ehrqQiiZECRt1poOAkPmer3ruw=="],
|
||||
|
||||
"inline-style-parser": ["inline-style-parser@0.2.4", "", {}, "sha512-0aO8FkhNZlj/ZIbNi7Lxxr12obT7cL1moPfE4tg1LkX7LlLfC6DeX4l2ZEud1ukP9jNQyNnfzQVqwbwmAATY4Q=="],
|
||||
|
||||
"internmap": ["internmap@2.0.3", "", {}, "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg=="],
|
||||
|
||||
"iron-webcrypto": ["iron-webcrypto@1.2.1", "", {}, "sha512-feOM6FaSr6rEABp/eDfVseKyTMDt+KGpeB35SkVn9Tyn0CqvVsY3EwI0v5i8nMHyJnzCIQf7nsy3p41TPkJZhg=="],
|
||||
|
||||
"is-alphabetical": ["is-alphabetical@2.0.1", "", {}, "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ=="],
|
||||
@ -560,14 +749,28 @@
|
||||
|
||||
"jsonpointer": ["jsonpointer@5.0.1", "", {}, "sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ=="],
|
||||
|
||||
"katex": ["katex@0.16.22", "", { "dependencies": { "commander": "^8.3.0" }, "bin": { "katex": "cli.js" } }, "sha512-XCHRdUw4lf3SKBaJe4EvgqIuWwkPSo9XoeO8GjQW94Bp7TWv9hNhzZjZ+OH9yf1UmLygb7DIT5GSFQiyt16zYg=="],
|
||||
|
||||
"khroma": ["khroma@2.1.0", "", {}, "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw=="],
|
||||
|
||||
"kleur": ["kleur@4.1.5", "", {}, "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ=="],
|
||||
|
||||
"klona": ["klona@2.0.6", "", {}, "sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA=="],
|
||||
|
||||
"kolorist": ["kolorist@1.8.0", "", {}, "sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ=="],
|
||||
|
||||
"langium": ["langium@3.3.1", "", { "dependencies": { "chevrotain": "~11.0.3", "chevrotain-allstar": "~0.3.0", "vscode-languageserver": "~9.0.1", "vscode-languageserver-textdocument": "~1.0.11", "vscode-uri": "~3.0.8" } }, "sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w=="],
|
||||
|
||||
"layout-base": ["layout-base@1.0.2", "", {}, "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg=="],
|
||||
|
||||
"leven": ["leven@3.1.0", "", {}, "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A=="],
|
||||
|
||||
"lite-youtube-embed": ["lite-youtube-embed@0.3.3", "", {}, "sha512-gFfVVnj6NRjxVfJKo3qoLtpi0v5mn3AcR4eKD45wrxQuxzveFJUb+7Cr6uV6n+DjO8X3p0UzPPquhGt0H/y+NA=="],
|
||||
|
||||
"local-pkg": ["local-pkg@1.1.1", "", { "dependencies": { "mlly": "^1.7.4", "pkg-types": "^2.0.1", "quansync": "^0.2.8" } }, "sha512-WunYko2W1NcdfAFpuLUoucsgULmgDBRkdxHxWQ7mK0cQqwPiy8E1enjuRBrhLtZkB5iScJ1XIPdhVEFK8aOLSg=="],
|
||||
|
||||
"lodash-es": ["lodash-es@4.17.21", "", {}, "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw=="],
|
||||
|
||||
"longest-streak": ["longest-streak@3.1.0", "", {}, "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g=="],
|
||||
|
||||
"lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
|
||||
@ -580,6 +783,8 @@
|
||||
|
||||
"markdown-table": ["markdown-table@3.0.4", "", {}, "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw=="],
|
||||
|
||||
"marked": ["marked@16.1.1", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-ij/2lXfCRT71L6u0M29tJPhP0bM5shLL3u5BePhFwPELj2blMJ6GDtD7PfJhRLhJ/c2UwrK17ySVcDzy2YHjHQ=="],
|
||||
|
||||
"mdast-util-definitions": ["mdast-util-definitions@6.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "unist-util-visit": "^5.0.0" } }, "sha512-scTllyX6pnYNZH/AIp/0ePz6s4cZtARxImwoPJ7kS42n+MnVsI4XbnG6d4ibehRIldYMWM2LD7ImQblVhUejVQ=="],
|
||||
|
||||
"mdast-util-directive": ["mdast-util-directive@3.1.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "ccount": "^2.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0", "parse-entities": "^4.0.0", "stringify-entities": "^4.0.0", "unist-util-visit-parents": "^6.0.0" } }, "sha512-I3fNFt+DHmpWCYAT7quoM6lHf9wuqtI+oCOfvILnoicNIqjh5E3dEJWiXuYME2gNe8vl1iMQwyUHa7bgFmak6Q=="],
|
||||
@ -618,6 +823,8 @@
|
||||
|
||||
"mdn-data": ["mdn-data@2.12.2", "", {}, "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA=="],
|
||||
|
||||
"mermaid": ["mermaid@11.9.0", "", { "dependencies": { "@braintree/sanitize-url": "^7.0.4", "@iconify/utils": "^2.1.33", "@mermaid-js/parser": "^0.6.2", "@types/d3": "^7.4.3", "cytoscape": "^3.29.3", "cytoscape-cose-bilkent": "^4.1.0", "cytoscape-fcose": "^2.2.0", "d3": "^7.9.0", "d3-sankey": "^0.12.3", "dagre-d3-es": "7.0.11", "dayjs": "^1.11.13", "dompurify": "^3.2.5", "katex": "^0.16.22", "khroma": "^2.1.0", "lodash-es": "^4.17.21", "marked": "^16.0.0", "roughjs": "^4.6.6", "stylis": "^4.3.6", "ts-dedent": "^2.2.0", "uuid": "^11.1.0" } }, "sha512-YdPXn9slEwO0omQfQIsW6vS84weVQftIyyTGAZCwM//MGhPzL1+l6vO6bkf0wnP4tHigH1alZ5Ooy3HXI2gOag=="],
|
||||
|
||||
"micromark": ["micromark@4.0.2", "", { "dependencies": { "@types/debug": "^4.0.0", "debug": "^4.0.0", "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "micromark-core-commonmark": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-combine-extensions": "^2.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-encode": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-sanitize-uri": "^2.0.0", "micromark-util-subtokenize": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA=="],
|
||||
|
||||
"micromark-core-commonmark": ["micromark-core-commonmark@2.0.3", "", { "dependencies": { "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "micromark-factory-destination": "^2.0.0", "micromark-factory-label": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-factory-title": "^2.0.0", "micromark-factory-whitespace": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-html-tag-name": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-subtokenize": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg=="],
|
||||
@ -690,6 +897,8 @@
|
||||
|
||||
"micromark-util-types": ["micromark-util-types@2.0.2", "", {}, "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA=="],
|
||||
|
||||
"mlly": ["mlly@1.7.4", "", { "dependencies": { "acorn": "^8.14.0", "pathe": "^2.0.1", "pkg-types": "^1.3.0", "ufo": "^1.5.4" } }, "sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw=="],
|
||||
|
||||
"mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
@ -738,6 +947,8 @@
|
||||
|
||||
"parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="],
|
||||
|
||||
"path-data-parser": ["path-data-parser@0.1.0", "", {}, "sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w=="],
|
||||
|
||||
"pathe": ["pathe@1.1.2", "", {}, "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ=="],
|
||||
|
||||
"phosphor-astro": ["phosphor-astro@2.1.0", "", {}, "sha512-qyYUlxF8DbfHc+85DDGPL04ghNBwrVK75EsNsBfYOChiCeCRwAwfbHxj/qqPrrSFPMgh9cUyEvgKYjI/7bjCUA=="],
|
||||
@ -746,6 +957,12 @@
|
||||
|
||||
"picomatch": ["picomatch@4.0.3", "", {}, "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q=="],
|
||||
|
||||
"pkg-types": ["pkg-types@2.2.0", "", { "dependencies": { "confbox": "^0.2.2", "exsolve": "^1.0.7", "pathe": "^2.0.3" } }, "sha512-2SM/GZGAEkPp3KWORxQZns4M+WSeXbC2HEvmOIJe3Cmiv6ieAJvdVhDldtHqM5J1Y7MrR1XhkBT/rMlhh9FdqQ=="],
|
||||
|
||||
"points-on-curve": ["points-on-curve@0.2.0", "", {}, "sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A=="],
|
||||
|
||||
"points-on-path": ["points-on-path@0.2.1", "", { "dependencies": { "path-data-parser": "0.1.0", "points-on-curve": "0.2.0" } }, "sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g=="],
|
||||
|
||||
"postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="],
|
||||
|
||||
"postcss-nested": ["postcss-nested@6.2.0", "", { "dependencies": { "postcss-selector-parser": "^6.1.1" }, "peerDependencies": { "postcss": "^8.2.14" } }, "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ=="],
|
||||
@ -758,6 +975,8 @@
|
||||
|
||||
"property-information": ["property-information@7.1.0", "", {}, "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ=="],
|
||||
|
||||
"quansync": ["quansync@0.2.10", "", {}, "sha512-t41VRkMYbkHyCYmOvx/6URnN80H7k4X0lLdBMGsz+maAwrJQYB1djpV6vHrQIBE0WBSGqhtEHrK9U3DWWH8v7A=="],
|
||||
|
||||
"radix3": ["radix3@1.1.2", "", {}, "sha512-b484I/7b8rDEdSDKckSSBA8knMpcdsXudlE/LNL639wFoHKwLbEkQFZHWEYwDC0wa0FKUcCY+GAF73Z7wxNVFA=="],
|
||||
|
||||
"readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
|
||||
@ -818,8 +1037,16 @@
|
||||
|
||||
"retext-stringify": ["retext-stringify@4.0.0", "", { "dependencies": { "@types/nlcst": "^2.0.0", "nlcst-to-string": "^4.0.0", "unified": "^11.0.0" } }, "sha512-rtfN/0o8kL1e+78+uxPTqu1Klt0yPzKuQ2BfWwwfgIUSayyzxpM1PJzkKt4V8803uB9qSy32MvI7Xep9khTpiA=="],
|
||||
|
||||
"robust-predicates": ["robust-predicates@3.0.2", "", {}, "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg=="],
|
||||
|
||||
"rollup": ["rollup@4.45.1", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.45.1", "@rollup/rollup-android-arm64": "4.45.1", "@rollup/rollup-darwin-arm64": "4.45.1", "@rollup/rollup-darwin-x64": "4.45.1", "@rollup/rollup-freebsd-arm64": "4.45.1", "@rollup/rollup-freebsd-x64": "4.45.1", "@rollup/rollup-linux-arm-gnueabihf": "4.45.1", "@rollup/rollup-linux-arm-musleabihf": "4.45.1", "@rollup/rollup-linux-arm64-gnu": "4.45.1", "@rollup/rollup-linux-arm64-musl": "4.45.1", "@rollup/rollup-linux-loongarch64-gnu": "4.45.1", "@rollup/rollup-linux-powerpc64le-gnu": "4.45.1", "@rollup/rollup-linux-riscv64-gnu": "4.45.1", "@rollup/rollup-linux-riscv64-musl": "4.45.1", "@rollup/rollup-linux-s390x-gnu": "4.45.1", "@rollup/rollup-linux-x64-gnu": "4.45.1", "@rollup/rollup-linux-x64-musl": "4.45.1", "@rollup/rollup-win32-arm64-msvc": "4.45.1", "@rollup/rollup-win32-ia32-msvc": "4.45.1", "@rollup/rollup-win32-x64-msvc": "4.45.1", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-4iya7Jb76fVpQyLoiVpzUrsjQ12r3dM7fIVz+4NwoYvZOShknRmiv+iu9CClZml5ZLGb0XMcYLutK6w9tgxHDw=="],
|
||||
|
||||
"roughjs": ["roughjs@4.6.6", "", { "dependencies": { "hachure-fill": "^0.5.2", "path-data-parser": "^0.1.0", "points-on-curve": "^0.2.0", "points-on-path": "^0.2.1" } }, "sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ=="],
|
||||
|
||||
"rw": ["rw@1.3.3", "", {}, "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ=="],
|
||||
|
||||
"safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="],
|
||||
|
||||
"sax": ["sax@1.4.1", "", {}, "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg=="],
|
||||
|
||||
"semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
@ -864,6 +1091,8 @@
|
||||
|
||||
"style-to-object": ["style-to-object@1.0.9", "", { "dependencies": { "inline-style-parser": "0.2.4" } }, "sha512-G4qppLgKu/k6FwRpHiGiKPaPTFcG3g4wNVX/Qsfu+RqQM30E7Tyu/TEgxcL9PNLF5pdRLwQdE3YKKf+KF2Dzlw=="],
|
||||
|
||||
"stylis": ["stylis@4.3.6", "", {}, "sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ=="],
|
||||
|
||||
"tiny-inflate": ["tiny-inflate@1.0.3", "", {}, "sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw=="],
|
||||
|
||||
"tiny-invariant": ["tiny-invariant@1.3.3", "", {}, "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg=="],
|
||||
@ -878,6 +1107,8 @@
|
||||
|
||||
"trough": ["trough@2.2.0", "", {}, "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw=="],
|
||||
|
||||
"ts-dedent": ["ts-dedent@2.2.0", "", {}, "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ=="],
|
||||
|
||||
"tsconfck": ["tsconfck@3.1.6", "", { "peerDependencies": { "typescript": "^5.0.0" }, "optionalPeers": ["typescript"], "bin": { "tsconfck": "bin/tsconfck.js" } }, "sha512-ks6Vjr/jEw0P1gmOVwutM3B7fWxoWBL2KRDb1JfqGVawBmO5UsvmWOQFGHBPl5yxYz4eERr19E6L7NMv+Fej4w=="],
|
||||
|
||||
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
@ -926,6 +1157,8 @@
|
||||
|
||||
"util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
|
||||
|
||||
"uuid": ["uuid@11.1.0", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A=="],
|
||||
|
||||
"vfile": ["vfile@6.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "vfile-message": "^4.0.0" } }, "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q=="],
|
||||
|
||||
"vfile-location": ["vfile-location@5.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "vfile": "^6.0.0" } }, "sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg=="],
|
||||
@ -936,6 +1169,18 @@
|
||||
|
||||
"vitefu": ["vitefu@1.1.1", "", { "peerDependencies": { "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" }, "optionalPeers": ["vite"] }, "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ=="],
|
||||
|
||||
"vscode-jsonrpc": ["vscode-jsonrpc@8.2.0", "", {}, "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA=="],
|
||||
|
||||
"vscode-languageserver": ["vscode-languageserver@9.0.1", "", { "dependencies": { "vscode-languageserver-protocol": "3.17.5" }, "bin": { "installServerIntoExtension": "bin/installServerIntoExtension" } }, "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g=="],
|
||||
|
||||
"vscode-languageserver-protocol": ["vscode-languageserver-protocol@3.17.5", "", { "dependencies": { "vscode-jsonrpc": "8.2.0", "vscode-languageserver-types": "3.17.5" } }, "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg=="],
|
||||
|
||||
"vscode-languageserver-textdocument": ["vscode-languageserver-textdocument@1.0.12", "", {}, "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA=="],
|
||||
|
||||
"vscode-languageserver-types": ["vscode-languageserver-types@3.17.5", "", {}, "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg=="],
|
||||
|
||||
"vscode-uri": ["vscode-uri@3.0.8", "", {}, "sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw=="],
|
||||
|
||||
"web-namespaces": ["web-namespaces@2.0.1", "", {}, "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ=="],
|
||||
|
||||
"webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="],
|
||||
@ -966,6 +1211,8 @@
|
||||
|
||||
"zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="],
|
||||
|
||||
"@antfu/install-pkg/tinyexec": ["tinyexec@1.0.1", "", {}, "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw=="],
|
||||
|
||||
"@rollup/pluginutils/estree-walker": ["estree-walker@2.0.2", "", {}, "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w=="],
|
||||
|
||||
"ansi-align/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="],
|
||||
@ -974,10 +1221,24 @@
|
||||
|
||||
"astro/sharp": ["sharp@0.33.5", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.6.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.33.5", "@img/sharp-darwin-x64": "0.33.5", "@img/sharp-libvips-darwin-arm64": "1.0.4", "@img/sharp-libvips-darwin-x64": "1.0.4", "@img/sharp-libvips-linux-arm": "1.0.5", "@img/sharp-libvips-linux-arm64": "1.0.4", "@img/sharp-libvips-linux-s390x": "1.0.4", "@img/sharp-libvips-linux-x64": "1.0.4", "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", "@img/sharp-libvips-linuxmusl-x64": "1.0.4", "@img/sharp-linux-arm": "0.33.5", "@img/sharp-linux-arm64": "0.33.5", "@img/sharp-linux-s390x": "0.33.5", "@img/sharp-linux-x64": "0.33.5", "@img/sharp-linuxmusl-arm64": "0.33.5", "@img/sharp-linuxmusl-x64": "0.33.5", "@img/sharp-wasm32": "0.33.5", "@img/sharp-win32-ia32": "0.33.5", "@img/sharp-win32-x64": "0.33.5" } }, "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw=="],
|
||||
|
||||
"cytoscape-fcose/cose-base": ["cose-base@2.2.0", "", { "dependencies": { "layout-base": "^2.0.0" } }, "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g=="],
|
||||
|
||||
"d3-dsv/commander": ["commander@7.2.0", "", {}, "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw=="],
|
||||
|
||||
"d3-sankey/d3-array": ["d3-array@2.12.1", "", { "dependencies": { "internmap": "^1.0.0" } }, "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ=="],
|
||||
|
||||
"d3-sankey/d3-shape": ["d3-shape@1.3.7", "", { "dependencies": { "d3-path": "1" } }, "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw=="],
|
||||
|
||||
"hast-util-to-parse5/property-information": ["property-information@6.5.0", "", {}, "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig=="],
|
||||
|
||||
"mlly/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
|
||||
|
||||
"mlly/pkg-types": ["pkg-types@1.3.1", "", { "dependencies": { "confbox": "^0.1.8", "mlly": "^1.7.4", "pathe": "^2.0.1" } }, "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ=="],
|
||||
|
||||
"parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="],
|
||||
|
||||
"pkg-types/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
|
||||
|
||||
"prompts/kleur": ["kleur@3.0.3", "", {}, "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w=="],
|
||||
|
||||
"recast/source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
|
||||
@ -1024,6 +1285,14 @@
|
||||
|
||||
"astro/sharp/@img/sharp-win32-x64": ["@img/sharp-win32-x64@0.33.5", "", { "os": "win32", "cpu": "x64" }, "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg=="],
|
||||
|
||||
"cytoscape-fcose/cose-base/layout-base": ["layout-base@2.0.1", "", {}, "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg=="],
|
||||
|
||||
"d3-sankey/d3-array/internmap": ["internmap@1.0.1", "", {}, "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw=="],
|
||||
|
||||
"d3-sankey/d3-shape/d3-path": ["d3-path@1.0.9", "", {}, "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg=="],
|
||||
|
||||
"mlly/pkg-types/confbox": ["confbox@0.1.8", "", {}, "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w=="],
|
||||
|
||||
"ansi-align/string-width/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
|
||||
}
|
||||
}
|
||||
|
||||
@ -13,6 +13,7 @@
|
||||
"@astrojs/starlight": "^0.35.1",
|
||||
"@lorenzo_lewis/starlight-utils": "^0.3.2",
|
||||
"astro": "^5.6.1",
|
||||
"astro-mermaid": "^1.0.4",
|
||||
"gsap": "^3.13.0",
|
||||
"phosphor-astro": "^2.1.0",
|
||||
"sharp": "^0.34.2",
|
||||
|
||||
|
Before Width: | Height: | Size: 22 MiB After Width: | Height: | Size: 22 MiB |
BIN
website/public/videos/jan-nano-demo.mp4
Normal file
BIN
website/public/videos/jan-nano-demo.mp4
Normal file
Binary file not shown.
@ -3,6 +3,7 @@
|
||||
// This overrides the default Starlight Header component
|
||||
import Search from '@astrojs/starlight/components/Search.astro';
|
||||
import ThemeSelect from '@astrojs/starlight/components/ThemeSelect.astro';
|
||||
import { Icon } from '@astrojs/starlight/components';
|
||||
---
|
||||
|
||||
<div class="sl-nav-wrapper">
|
||||
@ -13,34 +14,49 @@ import ThemeSelect from '@astrojs/starlight/components/ThemeSelect.astro';
|
||||
<a href="/" class="sl-nav__title">
|
||||
👋 Jan
|
||||
</a>
|
||||
|
||||
|
||||
<!-- Main navigation links -->
|
||||
<div class="sl-nav__links">
|
||||
<a href="/products/" class="sl-nav__link">
|
||||
<a href="/products" class="sl-nav__link">
|
||||
Products
|
||||
</a>
|
||||
<a href="/api-reference" class="sl-nav__link">
|
||||
API Reference
|
||||
<a href="/docs/" class="sl-nav__link">
|
||||
Docs
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Right side items (search, theme toggle, social links, etc.) -->
|
||||
<div class="sl-nav__end">
|
||||
<!-- Search -->
|
||||
|
||||
<!-- Center search -->
|
||||
<div class="sl-nav__center">
|
||||
<div class="sl-nav__search">
|
||||
<Search />
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<!-- Right side items (API Reference, theme toggle, social links, etc.) -->
|
||||
<div class="sl-nav__end">
|
||||
<!-- API Reference moved to right -->
|
||||
<a href="/api-reference" class="sl-nav__link">
|
||||
API Reference
|
||||
</a>
|
||||
|
||||
<!-- Theme toggle -->
|
||||
<ThemeSelect />
|
||||
|
||||
|
||||
<!-- Social links -->
|
||||
<div class="sl-nav__social">
|
||||
<a href="https://github.com/menloresearch/jan" class="sl-nav__social-link" aria-label="GitHub">
|
||||
<svg aria-hidden="true" width="16" height="16" viewBox="0 0 24 24" fill="currentColor">
|
||||
<path d="M12 0c-6.626 0-12 5.373-12 12 0 5.302 3.438 9.8 8.207 11.387.599.111.793-.261.793-.577v-2.234c-3.338.726-4.033-1.416-4.033-1.416-.546-1.387-1.333-1.756-1.333-1.756-1.089-.745.083-.729.083-.729 1.205.084 1.839 1.237 1.839 1.237 1.07 1.834 2.807 1.304 3.492.997.107-.775.418-1.305.762-1.604-2.665-.305-5.467-1.334-5.467-5.931 0-1.311.469-2.381 1.236-3.221-.124-.303-.535-1.524.117-3.176 0 0 1.008-.322 3.301 1.23.957-.266 1.983-.399 3.003-.404 1.02.005 2.047.138 3.006.404 2.291-1.552 3.297-1.23 3.297-1.23.653 1.653.242 2.874.118 3.176.77.84 1.235 1.911 1.235 3.221 0 4.609-2.807 5.624-5.479 5.921.43.372.823 1.102.823 2.222v3.293c0 .319.192.694.801.576 4.765-1.589 8.199-6.086 8.199-11.386 0-6.627-5.373-12-12-12z"/>
|
||||
</svg>
|
||||
<Icon name="github"/>
|
||||
</a>
|
||||
</div>
|
||||
<div class="sl-nav__social">
|
||||
<a href="https://twitter.com/jandotai" class="sl-nav__social-link" aria-label="X">
|
||||
<Icon name="x.com"/>
|
||||
</a>
|
||||
</div>
|
||||
<div class="sl-nav__social">
|
||||
<a href="https://discord.com/invite/FTk2MvZwJH" class="sl-nav__social-link" aria-label="Discord">
|
||||
<Icon name="discord"/>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
@ -70,6 +86,15 @@ import ThemeSelect from '@astrojs/starlight/components/ThemeSelect.astro';
|
||||
height: var(--sl-nav-height, 3.5rem);
|
||||
}
|
||||
|
||||
.sl-nav__center {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex: 1;
|
||||
justify-content: center;
|
||||
max-width: 500px;
|
||||
margin: 0 2rem;
|
||||
}
|
||||
|
||||
.sl-nav__left {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
@ -138,6 +163,14 @@ import ThemeSelect from '@astrojs/starlight/components/ThemeSelect.astro';
|
||||
.sl-nav__search {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
min-width: 300px;
|
||||
}
|
||||
|
||||
/* Make the actual search input wider */
|
||||
.sl-nav__search :global(input) {
|
||||
min-width: 300px !important;
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.sl-nav__social {
|
||||
@ -167,24 +200,86 @@ import ThemeSelect from '@astrojs/starlight/components/ThemeSelect.astro';
|
||||
.sl-nav__links {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
||||
.sl-nav__left {
|
||||
gap: 0.25rem;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.sl-nav__center {
|
||||
margin: 0 0.5rem;
|
||||
max-width: 200px;
|
||||
flex-shrink: 1;
|
||||
}
|
||||
|
||||
.sl-nav__end {
|
||||
gap: 0.25rem;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.sl-nav__search {
|
||||
min-width: 150px;
|
||||
}
|
||||
|
||||
.sl-nav__search :global(input) {
|
||||
min-width: 150px !important;
|
||||
}
|
||||
|
||||
/* Hide API Reference on very small screens */
|
||||
.sl-nav__end > .sl-nav__link {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/* Make social links smaller on mobile */
|
||||
.sl-nav__social-link {
|
||||
width: 1.5rem;
|
||||
height: 1.5rem;
|
||||
}
|
||||
}
|
||||
|
||||
/* Responsive adjustments */
|
||||
@media (max-width: 640px) {
|
||||
.sl-nav {
|
||||
padding: 0.5rem 1rem;
|
||||
padding: 0.5rem 0.75rem;
|
||||
}
|
||||
|
||||
|
||||
.sl-nav__title {
|
||||
font-size: 1.125rem;
|
||||
font-size: 1rem;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
|
||||
.sl-nav__left {
|
||||
gap: 0.125rem;
|
||||
}
|
||||
|
||||
.sl-nav__center {
|
||||
margin: 0 0.25rem;
|
||||
max-width: 150px;
|
||||
}
|
||||
|
||||
.sl-nav__search {
|
||||
min-width: 120px;
|
||||
}
|
||||
|
||||
.sl-nav__search :global(input) {
|
||||
min-width: 120px !important;
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.sl-nav__end {
|
||||
gap: 0.5rem;
|
||||
gap: 0.125rem;
|
||||
flex-shrink: 0;
|
||||
min-width: fit-content;
|
||||
}
|
||||
|
||||
/* Hide some social links on very small screens to make room */
|
||||
.sl-nav__social:nth-child(n+2) {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/* Hide theme toggle on very small screens */
|
||||
:global(starlight-theme-select) {
|
||||
display: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
@ -215,7 +310,7 @@ import ThemeSelect from '@astrojs/starlight/components/ThemeSelect.astro';
|
||||
// Add active state highlighting based on current page
|
||||
const currentPath = window.location.pathname;
|
||||
const links = document.querySelectorAll('.sl-nav__link');
|
||||
|
||||
|
||||
links.forEach(link => {
|
||||
const href = link.getAttribute('href');
|
||||
if (href && currentPath.startsWith(href)) {
|
||||
@ -224,4 +319,4 @@ import ThemeSelect from '@astrojs/starlight/components/ThemeSelect.astro';
|
||||
}
|
||||
});
|
||||
|
||||
</script>
|
||||
</script>
|
||||
|
||||
@ -36,10 +36,10 @@ add it to Jan via the configuration's page and start talking to your favorite mo
|
||||
|
||||
### Features
|
||||
|
||||
- Download popular open-source LLMs (Llama3, Gemma3, Qwen3, and more) from the HuggingFace [Model Hub](./docs/manage-models.mdx)
|
||||
- Download popular open-source LLMs (Llama3, Gemma3, Qwen3, and more) from the HuggingFace [Model Hub](./jan/manage-models)
|
||||
or import any GGUF files (the model format used by llama.cpp) available locally
|
||||
- Connect to [cloud services](/docs/remote-models/openai) (OpenAI, Anthropic, Mistral, Groq, etc.)
|
||||
- [Chat](./docs/threads.mdx) with AI models & [customize their parameters](/docs/model-parameters.mdx) via our
|
||||
- Connect to [cloud services](./jan/remote-models/openai) (OpenAI, Anthropic, Mistral, Groq, etc.)
|
||||
- [Chat](./jan/threads) with AI models & [customize their parameters](./jan/explanation/model-parameters) via our
|
||||
intuitive interface
|
||||
- Use our [local API server](https://jan.ai/api-reference) with an OpenAI-equivalent API to power other apps.
|
||||
|
||||
@ -53,7 +53,7 @@ Jan is built to be [user-owned](about#-user-owned), this means that Jan is:
|
||||
- We do not collect or sell user data. See our [Privacy Policy](./privacy).
|
||||
|
||||
<Aside>
|
||||
You can read more about our [philosophy](/about#philosophy) here.
|
||||
You can read more about our [philosophy](https://jan.ai/about#philosophy) here.
|
||||
</Aside>
|
||||
|
||||
### Inspirations
|
||||
@ -79,7 +79,7 @@ Jan is a customizable AI assistant that can run offline on your computer - a pri
|
||||
<details>
|
||||
<summary><strong>How do I get started with Jan?</strong></summary>
|
||||
|
||||
Download Jan on your computer, download a model or add API key for a cloud-based one, and start chatting. For detailed setup instructions, see our [Quick Start](/docs/quickstart) guide.
|
||||
Download Jan on your computer, download a model or add API key for a cloud-based one, and start chatting. For detailed setup instructions, see our installation guides.
|
||||
|
||||
</details>
|
||||
|
||||
@ -87,9 +87,9 @@ Download Jan on your computer, download a model or add API key for a cloud-based
|
||||
<summary><strong>Is Jan compatible with my system?</strong></summary>
|
||||
|
||||
Jan supports all major operating systems:
|
||||
- [Mac](/docs/desktop/mac#compatibility)
|
||||
- [Windows](/docs/desktop/windows#compatibility)
|
||||
- [Linux](/docs/desktop/linux)
|
||||
- [Mac](./jan/installation/mac#compatibility)
|
||||
- [Windows](./jan/installation/windows#compatibility)
|
||||
- [Linux](./jan/installation/linux)
|
||||
|
||||
Hardware compatibility includes:
|
||||
- NVIDIA GPUs (CUDA)
|
||||
@ -105,21 +105,21 @@ Hardware compatibility includes:
|
||||
Jan prioritizes privacy by:
|
||||
- Running 100% offline with locally-stored data
|
||||
- Using open-source models that keep your conversations private
|
||||
- Storing all files and chat history on your device in the [Jan Data Folder](/docs/data-folder)
|
||||
- Storing all files and chat history on your device in the [Jan Data Folder](./jan/data-folder)
|
||||
- Never collecting or selling your data
|
||||
|
||||
<Aside type="danger">
|
||||
When using third-party cloud AI services through Jan, their data policies apply. Check their privacy terms.
|
||||
</Aside>
|
||||
|
||||
You can optionally share anonymous usage statistics to help improve Jan, but your conversations are never shared. See our complete [Privacy Policy](./docs/privacy).
|
||||
You can optionally share anonymous usage statistics to help improve Jan, but your conversations are never shared. See our complete [Privacy Policy](./jan/privacy).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>What models can I use with Jan?</strong></summary>
|
||||
|
||||
- Download optimized models from the [Jan Hub](/docs/manage-models)
|
||||
- Download optimized models from the [Jan Hub](./jan/manage-models)
|
||||
- Import GGUF models from Hugging Face or your local files
|
||||
- Connect to cloud providers like OpenAI, Anthropic, Mistral and Groq (requires your own API keys)
|
||||
|
||||
@ -145,7 +145,7 @@ Yes! Once you've downloaded a local model, Jan works completely offline with no
|
||||
- Join our [Discord community](https://discord.gg/qSwXFx6Krr) to connect with other users
|
||||
- Contribute through [GitHub](https://github.com/menloresearch/jan) (no permission needed!)
|
||||
- Get troubleshooting help in our [Discord](https://discord.com/invite/FTk2MvZwJH) channel [#🆘|jan-help](https://discord.com/channels/1107178041848909847/1192090449725358130)
|
||||
- Check our [Troubleshooting](./docs/troubleshooting) guide for common issues
|
||||
- Check our [Troubleshooting](./jan/troubleshooting) guide for common issues
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
@ -17,9 +17,6 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
|
||||
# Assistants
|
||||
|
||||
Jan allows you to give models specific sets of instructions without having to repeat yourself. We called these
|
||||
models with your instructions, Assistants. Each of these assistants can also have their own set of configuration
|
||||
which can help guide how the AI model should behave and respond to your inputs. You can add, edit, or delete
|
||||
|
||||
@ -19,20 +19,18 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
|
||||
# Jan Data Folder
|
||||
Jan stores your data locally in JSON format. Your data is yours alone.
|
||||
|
||||
## Open Jan Data Folder
|
||||
|
||||
Via Jan:
|
||||
1. Settings (<Settings width={16} height={16} style={{display:"inline"}}/>) > Advanced Settings
|
||||
2. Click <FolderOpen width={16} height={16} style={{display:"inline"}}/>
|
||||
1. **Settings** > **General**
|
||||
2. Click on the **Change Location** button.
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
|
||||
Via Terminal:
|
||||
|
||||
@ -50,11 +48,11 @@ cd ~/.config/Jan/data # Default install
|
||||
|
||||
## Directory Structure
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Root directory: `~/jan`
|
||||
</Aside>
|
||||
|
||||
```sh
|
||||
```
|
||||
/assistants/
|
||||
/jan/
|
||||
assistant.json
|
||||
@ -151,7 +149,7 @@ Debugging headquarters (`/logs/app.txt`):
|
||||
### `models/`
|
||||
The silicon brain collection. Each model has its own `model.json`.
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Full parameters: [here](/docs/model-parameters)
|
||||
</Aside>
|
||||
|
||||
@ -213,5 +211,6 @@ Chat archive. Each thread (`/threads/jan_unixstamp/`) contains:
|
||||
|
||||
|
||||
## Delete Jan Data
|
||||
Uninstall guides: [Mac](/docs/desktop/mac#step-2-clean-up-data-optional),
|
||||
[Windows](/docs/desktop/windows#step-2-handle-jan-data), or [Linux](docs/desktop/linux#uninstall-jan).
|
||||
|
||||
Uninstall guides: [Mac](./installation/mac#step-2-clean-up-data-optional),
|
||||
[Windows](./installation/windows#step-2-handle-jan-data), or [Linux](./installation/linux#uninstall-jan).
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
title: llama.cpp Engine
|
||||
title: Local AI Engine
|
||||
description: Understand and configure Jan's local AI engine for running models on your hardware.
|
||||
keywords:
|
||||
[
|
||||
@ -23,29 +23,31 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
|
||||
# Local AI Engine (llama.cpp)
|
||||
import { Aside, Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
|
||||
## What is llama.cpp?
|
||||
|
||||
llama.cpp is the engine that runs AI models locally on your computer. Think of it as the software that takes an AI model file and makes it actually work on your hardware - whether that's your CPU, graphics card, or Apple's M-series chips.
|
||||
llama.cpp is the engine that runs AI models locally on your computer. Think of it as the software
|
||||
that takes an AI model file and makes it work on your hardware - whether that's your CPU,
|
||||
graphics card, or Apple's M-series chips.
|
||||
|
||||
Originally created by Georgi Gerganov, llama.cpp is designed to run large language models efficiently on consumer hardware without requiring specialized AI accelerators or cloud connections.
|
||||
Originally created by Georgi Gerganov, llama.cpp is designed to run large language models
|
||||
efficiently on consumer hardware without requiring specialized AI accelerators or cloud connections.
|
||||
|
||||
## Why This Matters
|
||||
|
||||
**Privacy**: Your conversations never leave your computer
|
||||
**Cost**: No monthly subscription fees or API costs
|
||||
**Speed**: No internet required once models are downloaded
|
||||
**Control**: Choose exactly which models to run and how they behave
|
||||
- **Privacy**: Your conversations never leave your computer
|
||||
- **Cost**: No monthly subscription fees or API costs
|
||||
- **Speed**: No internet required once models are downloaded
|
||||
- **Control**: Choose exactly which models to run and how they behave
|
||||
|
||||
## Accessing Engine Settings
|
||||
|
||||
Find llama.cpp settings at **Settings** (<Settings width={16} height={16} style={{display:"inline"}}/>) > **Local Engine** > **llama.cpp**:
|
||||
Find llama.cpp settings at **Settings** > **Model Providers** > **Llama.cpp**:
|
||||
|
||||

|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
These are advanced settings. You typically only need to adjust them if models aren't working properly or you want to optimize performance for your specific hardware.
|
||||
</Aside>
|
||||
|
||||
@ -61,13 +63,13 @@ These are advanced settings. You typically only need to adjust them if models ar
|
||||
|
||||
Jan offers different backend versions optimized for your specific hardware. Think of these as different "drivers" - each one is tuned for particular processors or graphics cards.
|
||||
|
||||
<Aside type="warning">
|
||||
<Aside type="caution">
|
||||
Using the wrong backend can make models run slowly or fail to load. Pick the one that matches your hardware.
|
||||
</Aside>
|
||||
|
||||
<Tabs items={['Windows', 'Linux', 'macOS']}>
|
||||
<Tabs>
|
||||
|
||||
<Tabs.Tab>
|
||||
<TabItem label="Windows">
|
||||
|
||||
### NVIDIA Graphics Cards (Recommended for Speed)
|
||||
Choose based on your CUDA version (check NVIDIA Control Panel):
|
||||
@ -89,13 +91,13 @@ Choose based on your CUDA version (check NVIDIA Control Panel):
|
||||
### Other Graphics Cards
|
||||
- `llama.cpp-vulkan` (AMD, Intel Arc, some others)
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
**Quick Test**: Start with `avx2-cuda-12-0` if you have an NVIDIA card, or `avx2` for CPU-only. If it doesn't work, try the `avx` variant.
|
||||
</Aside>
|
||||
|
||||
</Tabs.Tab>
|
||||
</TabItem>
|
||||
|
||||
<Tabs.Tab>
|
||||
<TabItem label="Linux">
|
||||
|
||||
### NVIDIA Graphics Cards
|
||||
Same CUDA options as Windows:
|
||||
@ -110,9 +112,9 @@ Same CUDA options as Windows:
|
||||
### Other Graphics Cards
|
||||
- `llama.cpp-vulkan` (AMD, Intel graphics)
|
||||
|
||||
</Tabs.Tab>
|
||||
</TabItem>
|
||||
|
||||
<Tabs.Tab>
|
||||
<TabItem label="MacOS">
|
||||
|
||||
### Apple Silicon (M1/M2/M3/M4)
|
||||
- `llama.cpp-mac-arm64` (recommended)
|
||||
@ -120,11 +122,11 @@ Same CUDA options as Windows:
|
||||
### Intel Macs
|
||||
- `llama.cpp-mac-amd64`
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Apple Silicon Macs automatically use the GPU through Metal - no additional setup needed.
|
||||
</Aside>
|
||||
|
||||
</Tabs.Tab>
|
||||
</TabItem>
|
||||
|
||||
</Tabs>
|
||||
|
||||
@ -152,7 +154,7 @@ These control how models use your computer's memory:
|
||||
|
||||
### KV Cache Types Explained
|
||||
- **f16**: Most stable, uses more memory
|
||||
- **q8_0**: Balanced memory usage and quality
|
||||
- **q8_0**: Balanced memory usage and quality
|
||||
- **q4_0**: Uses least memory, slight quality loss
|
||||
|
||||
## Troubleshooting Common Issues
|
||||
@ -199,6 +201,7 @@ These control how models use your computer's memory:
|
||||
2. Reduce Context Size in model settings
|
||||
3. Try a smaller model
|
||||
|
||||
<Aside type="info">
|
||||
Most users can run Jan successfully without changing any of these settings. The defaults are chosen to work well on typical hardware.
|
||||
<Aside type="note">
|
||||
Most users can run Jan successfully without changing any of these settings. The defaults are chosen
|
||||
to work well on typical hardware.
|
||||
</Aside>
|
||||
|
||||
@ -17,7 +17,7 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
# Model Parameters
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
Model parameters control how your AI thinks and responds. Think of them as the AI's personality settings and performance controls.
|
||||
|
||||
@ -32,7 +32,7 @@ Model parameters control how your AI thinks and responds. Think of them as the A
|
||||
**For model capabilities:**
|
||||
- Click the **edit button** next to a model to enable features like vision or tools
|
||||
|
||||
## Performance Settings (Gear Icon)
|
||||
## Performance Settings
|
||||
|
||||
These settings control how the model thinks and performs:
|
||||
|
||||
@ -51,7 +51,7 @@ These settings control how the model thinks and performs:
|
||||
|
||||

|
||||
|
||||
## Model Capabilities (Edit Button)
|
||||
## Model Capabilities
|
||||
|
||||
These toggle switches enable special features:
|
||||
|
||||
@ -82,7 +82,7 @@ This is like the model's short-term memory - how much of your conversation it ca
|
||||
- **Longer context** = Remembers more of your conversation, better for long discussions
|
||||
- **Shorter context** = Uses less memory, runs faster, but might "forget" earlier parts of long conversations
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Jan defaults to 8192 tokens (roughly 6000 words) or your model's maximum, whichever is smaller. This handles most conversations well.
|
||||
</Aside>
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
title: Linux Installation
|
||||
title: Linux
|
||||
description: Get started quickly with Jan, an AI chat application that runs 100% offline on your desktop & mobile (*coming soon*).
|
||||
keywords:
|
||||
[
|
||||
@ -98,7 +98,6 @@ System requirements:
|
||||
|
||||
Installation steps:
|
||||
|
||||
<ol>
|
||||
|
||||
### Step 1: Download Application
|
||||
|
||||
@ -110,17 +109,7 @@ Available releases:
|
||||
- Others: [Jan.AppImage](https://app.jan.ai/download/latest/linux-amd64-appimage)
|
||||
- Official Website: https://jan.ai/download
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="Beta Release">
|
||||
Beta release:
|
||||
- Ubuntu: [jan.deb](https://app.jan.ai/download/beta/linux-amd64-deb)
|
||||
- Others: [Jan.AppImage](https://app.jan.ai/download/beta/linux-amd64-appimage)
|
||||
|
||||
<Aside type="note">
|
||||
May contain bugs. Use with caution.
|
||||
</Aside>
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="Nightly Release">
|
||||
Development build:
|
||||
- Ubuntu: [jan.deb](https://app.jan.ai/download/nightly/linux-amd64-deb)
|
||||
@ -161,7 +150,6 @@ Installation commands:
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
</ol>
|
||||
|
||||
|
||||
## Data Folder
|
||||
@ -264,7 +252,7 @@ Removal commands:
|
||||
rm -rf ~/.config/Jan/cache
|
||||
```
|
||||
</TabItem>
|
||||
|
||||
|
||||
<TabItem label="Others">
|
||||
```bash
|
||||
rm jan-linux-x86_64-{version}.AppImage
|
||||
|
||||
@ -23,13 +23,12 @@ keywords:
|
||||
import { Aside, Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
|
||||
|
||||
|
||||
|
||||
# Mac Installation
|
||||
Jan runs natively on both Apple Silicon and Intel-based Macs.
|
||||
|
||||
## Compatibility
|
||||
|
||||
### Minimum Requirements
|
||||
|
||||
Your Mac needs:
|
||||
- **Operating System:** MacOSX 13.6 or higher
|
||||
- **Memory:**
|
||||
@ -39,9 +38,14 @@ Your Mac needs:
|
||||
- **Storage:** 10GB+ free space
|
||||
|
||||
### Mac Performance Guide
|
||||
<Aside type="info">
|
||||
**Apple Silicon Macs** use Metal for GPU acceleration, making them faster than **Apple Intel Macs**, which operate on CPU only.
|
||||
|
||||
<Aside type="note">
|
||||
|
||||
**Apple Silicon Macs** use Metal for GPU acceleration, making them faster than **Apple Intel Macs**,
|
||||
which operate on CPU only.
|
||||
|
||||
</Aside>
|
||||
|
||||
**Apple Silicon (M1, M2, M3)**
|
||||
- Metal acceleration enabled by default
|
||||
- GPU-accelerated processing
|
||||
@ -56,35 +60,24 @@ _Check your Mac's processor: Apple menu → About This Mac_
|
||||
|
||||
Installation steps:
|
||||
|
||||
<ol>
|
||||
|
||||
### Step 1: Download Application
|
||||
|
||||
Select version:
|
||||
|
||||
<Tabs>
|
||||
<TabItem label="Stable Release">
|
||||
Get Jan from here:
|
||||
|
||||
- [Download Jan's Stable Version](https://app.jan.ai/download/latest/mac-universal)
|
||||
- Official Website: https://jan.ai/download
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="Beta Release">
|
||||
Beta: New features with potential instability.
|
||||
|
||||
[Download Jan's Beta Version](https://app.jan.ai/download/beta/mac-universal)
|
||||
|
||||
<Aside type="warning">
|
||||
May contain bugs.
|
||||
</Aside>
|
||||
</TabItem>
|
||||
|
||||
|
||||
<TabItem label="Nightly Release">
|
||||
Nightly: Latest features, less stable.
|
||||
|
||||
[Download Jan's Nightly Version](https://app.jan.ai/download/nightly/mac-universal)
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Expect potential instability.
|
||||
</Aside>
|
||||
</TabItem>
|
||||
@ -98,8 +91,6 @@ Select version:
|
||||
3. Wait a moment
|
||||
4. Launch Jan
|
||||
|
||||
</ol>
|
||||
|
||||
|
||||
## Jan Data Folder
|
||||
|
||||
@ -109,14 +100,15 @@ Default location:
|
||||
# Default installation directory
|
||||
~/Library/Application\ Support/Jan/data
|
||||
```
|
||||
See [Jan Data Folder](/docs/data-folder) for details.
|
||||
|
||||
See [Jan Data Folder](../data-folder) for details.
|
||||
|
||||
|
||||
## Uninstall Jan
|
||||
|
||||
<ol>
|
||||
|
||||
### Step 1: Remove Application
|
||||
|
||||
1. Close Jan if it's running
|
||||
2. Open **Finder**
|
||||
3. Go to **Applications**
|
||||
@ -129,39 +121,11 @@ See [Jan Data Folder](/docs/data-folder) for details.
|
||||
### Step 2: Clean Up Data (Optional)
|
||||
|
||||
Run this in **Terminal** to remove all data:
|
||||
|
||||
```bash
|
||||
rm -rf ~/Library/Application\ Support/Jan/data
|
||||
```
|
||||
</ol>
|
||||
|
||||
<Aside type="warning">
|
||||
<Aside type="caution">
|
||||
This permanently deletes all data. Create backups if needed.
|
||||
</Aside>
|
||||
|
||||
{/* ## FAQs
|
||||
|
||||
## What are Nightly Releases, and how can I access them?
|
||||
|
||||
Nightly Releases allow you to test new features and previews of upcoming stable releases. You can download
|
||||
them from Jan's GitHub repository. However, remember that these builds might contain bugs and crash frequently.
|
||||
|
||||
## Can I move the Jan data folder to a different location?
|
||||
|
||||
Yes, you can move the Jan data folder.
|
||||
|
||||
## How do I enable GPU acceleration for better performance
|
||||
|
||||
Depending on your Mac type (Apple Silicon or Intel), you won't be able to utilize the GPU acceleration feature
|
||||
if you have a Mac with an Intel processor.
|
||||
|
||||
## Can I recover the deleted Jan data folder after uninstallation?
|
||||
|
||||
No, it cannot be restored once you delete the Jan data folder during uninstallation.
|
||||
|
||||
|
||||
<Aside type="info">
|
||||
💡 Warning: If you have any trouble during installation, please see our [Troubleshooting](/docs/troubleshooting)
|
||||
guide to resolve your problem.
|
||||
|
||||
</Aside>
|
||||
*/}
|
||||
|
||||
@ -42,7 +42,7 @@ import { Aside, Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Processor must support **AVX2 or newer**. See [full list of supported processors](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions#CPUs_with_AVX2).
|
||||
Performance is significantly degraded on AVX-only processors.
|
||||
</Aside>
|
||||
@ -52,7 +52,7 @@ Performance is significantly degraded on AVX-only processors.
|
||||
- 16GB → up to 7B parameter models (int4)
|
||||
- 32GB → up to 13B parameter models (int4)
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Newer RAM generations provide better performance.
|
||||
</Aside>
|
||||
|
||||
@ -61,7 +61,7 @@ Newer RAM generations provide better performance.
|
||||
- 8GB → up to 7B parameter models
|
||||
- 12GB → up to 13B parameter models
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Minimum 6GB VRAM recommended for NVIDIA, AMD, or Intel Arc GPUs.
|
||||
</Aside>
|
||||
|
||||
@ -70,8 +70,6 @@ Minimum 6GB VRAM recommended for NVIDIA, AMD, or Intel Arc GPUs.
|
||||
|
||||
## Install Jan
|
||||
|
||||
<ol>
|
||||
|
||||
### Step 1: Download Application
|
||||
|
||||
<Tabs>
|
||||
@ -79,23 +77,13 @@ Minimum 6GB VRAM recommended for NVIDIA, AMD, or Intel Arc GPUs.
|
||||
- [Download Stable Jan](https://app.jan.ai/download/latest/win-x64)
|
||||
- Official Website: [Download Jan](https://jan.ai/download)
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="Beta Release">
|
||||
Beta: Contains newer features but may be unstable
|
||||
|
||||
[Download Beta Jan](https://app.jan.ai/download/beta/win-x64)
|
||||
|
||||
<Aside type="warning">
|
||||
May be unstable
|
||||
</Aside>
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="Nightly Release">
|
||||
Nightly: Development build with latest features
|
||||
|
||||
[Download Nightly Jan](https://app.jan.ai/download/nightly/win-x64)
|
||||
|
||||
<Aside type="warning">
|
||||
<Aside type="caution">
|
||||
Unstable development build
|
||||
</Aside>
|
||||
</TabItem>
|
||||
@ -107,8 +95,6 @@ Minimum 6GB VRAM recommended for NVIDIA, AMD, or Intel Arc GPUs.
|
||||
2. Wait for installation to complete
|
||||
3. Launch Jan
|
||||
|
||||
</ol>
|
||||
|
||||
## Data Folder
|
||||
|
||||
Default installation path:
|
||||
@ -126,8 +112,6 @@ See [Jan Data Folder](/docs/data-folder) for complete folder structure details.
|
||||
<Tabs>
|
||||
<TabItem label="NVIDIA GPU">
|
||||
|
||||
<ol>
|
||||
|
||||
### Step 1: Verify Hardware & Install Dependencies
|
||||
**1.1. Check GPU Detection**
|
||||
|
||||
@ -156,30 +140,29 @@ nvcc --version
|
||||
Navigate to **Settings** > **Hardware** > **GPUs**
|
||||
and toggle the **ON** switch if not enabled.
|
||||
|
||||
</ol>
|
||||
</TabItem>
|
||||
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="AMD GPU">
|
||||
<TabItem label="AMD GPU">
|
||||
AMD GPUs require **Vulkan** support.
|
||||
|
||||
Navigate to **Settings** > **Hardware** > **GPUs**
|
||||
and toggle the **ON** switch if not enabled.
|
||||
</TabItem>
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="Intel Arc GPU">
|
||||
|
||||
<TabItem label="Intel Arc GPU">
|
||||
Intel Arc GPUs require **Vulkan** support.
|
||||
|
||||
Navigate to **Settings** > **Hardware** > **GPUs**
|
||||
and toggle the **ON** switch if not enabled.
|
||||
</TabItem>
|
||||
|
||||
</TabItem>
|
||||
|
||||
</Tabs>
|
||||
|
||||
|
||||
## Uninstall Jan
|
||||
|
||||
<ol>
|
||||
|
||||
### Step 1: Remove Application through Control Panel
|
||||
|
||||
1. Open **Control Panels**
|
||||
@ -203,8 +186,7 @@ or via **Terminal**:
|
||||
cd C:\Users\%USERNAME%\AppData\Roaming
|
||||
rmdir /S Jan
|
||||
```
|
||||
</ol>
|
||||
|
||||
<Aside type="warning">
|
||||
<Aside type="caution">
|
||||
Deleted data folders cannot be recovered. Backup important data first.
|
||||
</Aside>
|
||||
|
||||
@ -18,12 +18,10 @@ keywords:
|
||||
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
# Jan-Nano-128k
|
||||
|
||||
> Enabling deeper research through extended context understanding.
|
||||
|
||||
Jan-Nano-128k represents a notable advancement in compact language models for different applications. Building upon the
|
||||
success of Jan-Nano-32k, this enhanced version features a native 128k context window that enables deeper, more comprehensive
|
||||
Jan-Nano-128k represents a notable advancement in compact language models for different applications. Building upon the
|
||||
success of Jan-Nano-32k, this enhanced version features a native 128k context window that enables deeper, more comprehensive
|
||||
research capabilities without the performance degradation typically associated with context extension methods.
|
||||
|
||||
You can have a look at all of our models, and download them from the HuggingFace [Menlo Models page](https://huggingface.co/Menlo).
|
||||
@ -34,29 +32,29 @@ You can have a look at all of our models, and download them from the HuggingFace
|
||||
- ⚡ Native 128k Window: Built to handle long contexts efficiently, maintaining performance across the full context range
|
||||
- 📈 Enhanced Performance: Unlike traditional context extension methods, Jan-Nano-128k's performance remains consistent with longer contexts
|
||||
|
||||
This model maintains full compatibility with Model Context Protocol (MCP) servers while dramatically expanding the scope of research
|
||||
This model maintains full compatibility with Model Context Protocol (MCP) servers while dramatically expanding the scope of research
|
||||
tasks it can handle in a single session.
|
||||
|
||||
|
||||
## Why Jan-Nano-128k?
|
||||
|
||||
Most small models hit a wall at 8-32k tokens. Jan-Nano-128k goes beyond this limitation with a native 128k context window—that's roughly
|
||||
Most small models hit a wall at 8-32k tokens. Jan-Nano-128k goes beyond this limitation with a native 128k context window—that's roughly
|
||||
300 pages of text or an entire novel's worth of information processed simultaneously.
|
||||
|
||||
Unlike YaRN or PI methods that retrofit models beyond their limits and degrade performance, Jan-Nano-128k was architecturally rewired for
|
||||
128k contexts from the ground up. The result: an inverse scaling behavior where performance actually improves with longer contexts,
|
||||
Unlike YaRN or PI methods that retrofit models beyond their limits and degrade performance, Jan-Nano-128k was architecturally rewired for
|
||||
128k contexts from the ground up. The result: an inverse scaling behavior where performance actually improves with longer contexts,
|
||||
maintaining consistent accuracy from 1k to 128k tokens as the model leverages more information for synthesis.
|
||||
|
||||
|
||||
<Aside type="info">
|
||||
**Position Interpolation (PI):** A method that extends a model's context by scaling down position indices to fit within the original context
|
||||
<Aside type="note">
|
||||
**Position Interpolation (PI):** A method that extends a model's context by scaling down position indices to fit within the original context
|
||||
window. For example, to extend a 4k model to 32k, PI compresses the 32k positions into the original 4k range by dividing each position by 8.
|
||||
|
||||
**YaRN (Yet another RoPE extensioN method):** A more sophisticated context extension method that preserves frequently occurring tokens while
|
||||
selectively scaling others. YaRN divides position embeddings into frequency groups and applies different scaling factors to each, resulting
|
||||
**YaRN (Yet another RoPE extensioN method):** A more sophisticated context extension method that preserves frequently occurring tokens while
|
||||
selectively scaling others. YaRN divides position embeddings into frequency groups and applies different scaling factors to each, resulting
|
||||
in more efficient training and better performance than PI.
|
||||
|
||||
The key difference is that PI applies uniform scaling across all dimensions, while YaRN uses targeted interpolation based on frequency analysis—preserving
|
||||
The key difference is that PI applies uniform scaling across all dimensions, while YaRN uses targeted interpolation based on frequency analysis—preserving
|
||||
high-frequency information that's crucial for distinguishing nearby tokens while interpolating lower frequencies more aggressively.
|
||||
</Aside>
|
||||
|
||||
@ -67,8 +65,8 @@ high-frequency information that's crucial for distinguishing nearby tokens while
|
||||
- **Business**: Distill insights from quarters of financial data
|
||||
- **Content**: Maintain narrative coherence across book-length outputs
|
||||
|
||||
**MCP Usage:** Jan-Nano-128k doesn't memorize, it orchestrates. With MCP integration, it becomes a research conductor that fetches dozens
|
||||
of sources, holds everything in active memory, extracts precisely what's needed, and synthesizes findings across a marathon research session. It's
|
||||
**MCP Usage:** Jan-Nano-128k doesn't memorize, it orchestrates. With MCP integration, it becomes a research conductor that fetches dozens
|
||||
of sources, holds everything in active memory, extracts precisely what's needed, and synthesizes findings across a marathon research session. It's
|
||||
not about understanding every word; it's about finding the needle in a haystack of haystacks.
|
||||
|
||||
## Evaluation
|
||||
@ -87,7 +85,7 @@ Jan-Nano-128k has been rigorously evaluated on the SimpleQA benchmark using our
|
||||
### Demo
|
||||
|
||||
<video width="100%" controls>
|
||||
<source src="/assets/videos/jan-nano-demo.mp4" type="video/mp4" />
|
||||
<source src="/videos/jan-nano-demo.mp4" type="video/mp4" />
|
||||
Your browser does not support the video tag.
|
||||
</video>
|
||||
|
||||
|
||||
@ -14,31 +14,31 @@ keywords:
|
||||
Model Context Protocol,
|
||||
MCP,
|
||||
]
|
||||
sidebar:
|
||||
order: 1
|
||||
---
|
||||
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
# Jan Nano
|
||||
|
||||

|
||||
|
||||
## Why Jan Nano?
|
||||
|
||||
Most language models face a fundamental tradeoff where powerful capabilities require a lot of computational resources. Jan
|
||||
Nano breaks this constraint through a focused design philosophy where instead of trying to know everything, it excels at
|
||||
Most language models face a fundamental tradeoff where powerful capabilities require a lot of computational resources. Jan
|
||||
Nano breaks this constraint through a focused design philosophy where instead of trying to know everything, it excels at
|
||||
knowing how to find anything.
|
||||
|
||||
|
||||
## What is Jan Nano?
|
||||
|
||||
Jan Nano is a compact 4-billion parameter language model specifically designed and trained for deep research tasks.
|
||||
This model has been optimized to work seamlessly with Model Context Protocol (MCP) servers, enabling efficient integration
|
||||
Jan Nano is a compact 4-billion parameter language model specifically designed and trained for deep research tasks.
|
||||
This model has been optimized to work seamlessly with Model Context Protocol (MCP) servers, enabling efficient integration
|
||||
with various research tools and data sources.
|
||||
|
||||
The model and its different model variants are fully supported by Jan.
|
||||
|
||||
<Aside type="info">
|
||||
To use Jan-Nano, you will need to use a search engine via MCP. You can enable MCP in the **Settings**
|
||||
<Aside type="note">
|
||||
To use Jan-Nano, you will need to use a search engine via MCP. You can enable MCP in the **Settings**
|
||||
tab under **Advanced Settings**.
|
||||
</Aside>
|
||||
|
||||
@ -70,7 +70,7 @@ Go to the Hub Tab, search for Jan-Nano-Gguf, and click on the download button to
|
||||
Go to **Settings** > **Model Providers** > **Llama.cpp** click on the pencil icon and enable tool use for Jan-Nano-Gguf.
|
||||
|
||||
**Step 4**
|
||||
To take advantage of Jan-Nano's full capabilities, you need to enable MCP support. We're going to use it with Serper's
|
||||
To take advantage of Jan-Nano's full capabilities, you need to enable MCP support. We're going to use it with Serper's
|
||||
API. You can get a free API key from [here](https://serper.dev/). Sign up and they will immediately generate one for you.
|
||||
|
||||
**Step 5**
|
||||
@ -81,7 +81,7 @@ Add the serper MCP to Jan via the **Settings** > **MCP Servers** tab.
|
||||
**Step 6**
|
||||
Open up a new chat and ask Jan-Nano to search the web for you.
|
||||
|
||||

|
||||

|
||||
|
||||
## Queries to Try
|
||||
|
||||
|
||||
@ -18,8 +18,7 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
|
||||
# Model Management
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
This guide shows you how to add, customize, and delete models within Jan.
|
||||
|
||||
@ -42,13 +41,12 @@ The easiest way to get started is using Jan's built-in model hub (which is conne
|
||||
3. Choose a model that fits your needs & hardware specifications
|
||||
4. Click **Download** on your chosen model
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Jan will indicate if a model might be **Slow on your device** or **Not enough RAM** based on your system specifications.
|
||||
</Aside>
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
|
||||
#### 2. Import from [Hugging Face](https://huggingface.co/)
|
||||
|
||||
@ -62,14 +60,11 @@ You can download models with a direct link from Hugging Face:
|
||||
4. In Jan, paste the model ID to the **Search** bar in **Hub** page
|
||||
5. Select your preferred quantized version to download (if the option is available)
|
||||
|
||||
<br/>
|
||||
**Copy the model ID.**
|
||||

|
||||
|
||||
<br/>
|
||||
**Paste it in Jan's Hub Search Bar.**
|
||||

|
||||
<br/>
|
||||
|
||||
#### 3. Import Local Files
|
||||
|
||||
@ -81,29 +76,24 @@ If you already have one or many GGUF model files on your computer:
|
||||
- **Duplicate:** Makes a copy of model files in Jan's directory
|
||||
4. Click **Import** to complete (check the [Jan Data Folder](./data-folder) section for more info)
|
||||
|
||||
<Aside type="warning">
|
||||
<Aside type="caution">
|
||||
You need to own your **model configurations**, use at your own risk. Misconfigurations may result in lower
|
||||
quality or unexpected outputs. Learn about [model configurations here](./model-parameters).
|
||||
</Aside>
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
|
||||
#### 4. Manual Setup
|
||||
|
||||
For advanced users who want to add a specific model that is not available within the Jan **Hub**:
|
||||
|
||||
<ol>
|
||||
##### Step 1: Create Model File
|
||||
|
||||
1. Navigate to the [Jan Data Folder](./data-folder)
|
||||
2. Open `models` folder
|
||||
3. Create a new **Folder** for your model
|
||||
@ -154,7 +144,7 @@ For advanced users who want to add a specific model that is not available within
|
||||
Key fields to configure:
|
||||
1. The **Settings** array is where you can set the path or location of your model in your computer, the context
|
||||
length allowed, and the chat template expected by your model.
|
||||
2. The [**Parameters**](/docs/model-parameters) are the adjustable settings that affect how your model operates or
|
||||
2. The [**Parameters**](./model-parameters) are the adjustable settings that affect how your model operates or
|
||||
processes the data. The fields in the parameters array are typically general and can be used across different
|
||||
models. Here is an example of model parameters:
|
||||
|
||||
@ -168,35 +158,30 @@ models. Here is an example of model parameters:
|
||||
"presence_penalty": 0,
|
||||
}
|
||||
```
|
||||
</ol>
|
||||
|
||||
### Delete Models
|
||||
1. Go to **Settings > Model Providers > Llama.cpp**
|
||||
2. Find the model you want to remove
|
||||
3. Select the three dots <EllipsisVertical width={16} height={16} style={{display:"inline"}}/> icon next to it and select **Delete Model**
|
||||
3. Select the three dots icon next to it and select **Delete Model**
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
|
||||
## Cloud Models
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
When using cloud models, be aware of any associated costs and rate limits from the providers. See detailed guide for
|
||||
each cloud model provider [here](/docs/remote-models/anthropic).
|
||||
each cloud model provider [here](./remote-models/anthropic).
|
||||
</Aside>
|
||||
|
||||
Jan supports connecting to various AI cloud providers that are OpenAI API-compatible, including: OpenAI (GPT-4o, o3,...),
|
||||
Anthropic (Claude), Groq, Mistral, and more.
|
||||
1. Navigate to **Settings** (<Settings width={16} height={16} style={{display:"inline"}}/>)
|
||||
1. Navigate to the **Settings** page
|
||||
2. Under **Model Providers** section in the left sidebar, choose your preferred provider (OpenAI, Anthropic, etc.)
|
||||
3. Enter your API key
|
||||
4. The activated cloud models will be available in your model selector inside the **Chat** panel
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
As soon as you add your key for a model provider like Anthropic or OpenAI, you will be able to pick one of their models to chat with.
|
||||
|
||||

|
||||
<br/>
|
||||
|
||||
@ -16,14 +16,13 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
|
||||
# E2B Code Sandbox MCP
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
E2B MCP provides isolated Python execution environments. Your AI can run actual code instead of just describing what code might do.
|
||||
|
||||
The real value emerges when you combine secure remote execution with Jan's flexible model selection. You can use
|
||||
local models for conversation and reasoning while offloading actual computation to E2B's sandboxes. This means you
|
||||
get the privacy and control of local models plus the computational power of cloud infrastructure, without the
|
||||
The real value emerges when you combine secure remote execution with Jan's flexible model selection. You can use
|
||||
local models for conversation and reasoning while offloading actual computation to E2B's sandboxes. This means you
|
||||
get the privacy and control of local models plus the computational power of cloud infrastructure, without the
|
||||
complexity of managing Python environments or dependencies locally.
|
||||
|
||||
## Setup
|
||||
@ -41,7 +40,7 @@ complexity of managing Python environments or dependencies locally.
|
||||
|
||||

|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Don't forget that MCP gets enabled once you turn on Experimental Features in Jan's General settings.
|
||||
</Aside>
|
||||
|
||||
@ -59,7 +58,7 @@ Configure:
|
||||
- **Server Name**: `e2b-server`
|
||||
- **Command**: `npx`
|
||||
- **Arguments**: `@e2b/mcp-server`
|
||||
- **Environment Variables**:
|
||||
- **Environment Variables**:
|
||||
- Key: `E2B_API_KEY`
|
||||
- Value: `your-api-key`
|
||||
|
||||
@ -76,7 +75,7 @@ The sandbox includes these packages by default:
|
||||
|
||||
**Data Analysis & Science:**
|
||||
- `pandas` (1.5.3) - Data manipulation
|
||||
- `numpy` (1.26.4) - Numerical computing
|
||||
- `numpy` (1.26.4) - Numerical computing
|
||||
- `scipy` (1.12.0) - Scientific computing
|
||||
- `scikit-learn` (1.4.1) - Machine learning
|
||||
- `sympy` (1.12) - Symbolic mathematics
|
||||
@ -110,10 +109,10 @@ Additional packages can be installed as needed.
|
||||
## Examples
|
||||
|
||||
|
||||
For the following examples, we'll use Claude 4 Sonnet but you can use any local or remote
|
||||
For the following examples, we'll use Claude 4 Sonnet but you can use any local or remote
|
||||
model with tool calling capabilities you'd like.
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Make sure you activate Tools on the model you're using.
|
||||
</Aside>
|
||||
|
||||
@ -121,13 +120,13 @@ Make sure you activate Tools on the model you're using.
|
||||
|
||||
### Basic Data Analysis
|
||||
|
||||
Start small. Open a new chat, confirm that the model has tools enabled and ask it to create a small dataset of 100 students with grades and study hours.
|
||||
Start small. Open a new chat, confirm that the model has tools enabled and ask it to create a small dataset of 100 students with grades and study hours.
|
||||
|
||||

|
||||
|
||||
|
||||
```
|
||||
Create a small dataset of 100 students with grades and study hours.
|
||||
Create a small dataset of 100 students with grades and study hours.
|
||||
Calculate the correlation and create a scatter plot.
|
||||
```
|
||||
|
||||
@ -143,7 +142,7 @@ The model will:
|
||||

|
||||
|
||||
|
||||
<Aside type="warning">
|
||||
<Aside type="caution">
|
||||
As of `v0.6.5`, Jan won't be able to display visualizations in the chat but we're to get this fixed for the next release.
|
||||
</Aside>
|
||||
|
||||
@ -165,7 +164,7 @@ For more intensive simulations, increase iterations gradually and monitor perfor
|
||||
### Machine Learning
|
||||
|
||||
```
|
||||
Create a simple 2-class dataset with 200 samples. Train a logistic regression
|
||||
Create a simple 2-class dataset with 200 samples. Train a logistic regression
|
||||
model and visualize the decision boundary.
|
||||
```
|
||||
|
||||
@ -178,7 +177,7 @@ The model will:
|
||||
### Time Series Analysis
|
||||
|
||||
```
|
||||
Generate daily temperature data for one year. Calculate moving averages
|
||||
Generate daily temperature data for one year. Calculate moving averages
|
||||
and identify seasonal patterns.
|
||||
```
|
||||
|
||||
@ -235,7 +234,7 @@ Returns structured data:
|
||||
```json
|
||||
{
|
||||
"type": "bar",
|
||||
"title": "Sample Bar Chart",
|
||||
"title": "Sample Bar Chart",
|
||||
"elements": [
|
||||
{"label": "A", "value": 10},
|
||||
{"label": "B", "value": 20},
|
||||
@ -249,7 +248,7 @@ Supported chart types: line, bar, scatter, pie, box plots.
|
||||
## Available Tools
|
||||
|
||||
- **run_code**: Execute Python code
|
||||
- **install_package**: Add Python packages
|
||||
- **install_package**: Add Python packages
|
||||
- **create_file**: Save files to sandbox
|
||||
- **read_file**: Access sandbox files
|
||||
- **list_files**: Browse sandbox contents
|
||||
@ -272,7 +271,7 @@ Supported chart types: line, bar, scatter, pie, box plots.
|
||||
- System dependencies may cause failures for some packages
|
||||
- Try alternative packages if installation fails
|
||||
|
||||
<Aside type="warning">
|
||||
<Aside type="caution">
|
||||
E2B has computational and memory limits. Break large operations into smaller chunks if needed.
|
||||
</Aside>
|
||||
|
||||
@ -281,9 +280,9 @@ Supported chart types: line, bar, scatter, pie, box plots.
|
||||
E2B is useful for:
|
||||
|
||||
- **Academic Research**: Statistical analysis, data visualization, hypothesis testing
|
||||
- **Data Science**: Exploratory data analysis, model prototyping, result validation
|
||||
- **Data Science**: Exploratory data analysis, model prototyping, result validation
|
||||
- **Financial Analysis**: Portfolio optimization, risk calculations, market simulations
|
||||
- **Scientific Computing**: Numerical simulations, mathematical modeling, algorithm testing
|
||||
- **Prototyping**: Quick algorithm validation, proof-of-concept development
|
||||
|
||||
The sandbox provides isolated execution without local environment setup or dependency management.
|
||||
The sandbox provides isolated execution without local environment setup or dependency management.
|
||||
|
||||
@ -15,16 +15,14 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
# Exa Search MCP
|
||||
|
||||
[Exa MCP](https://docs.exa.ai/examples/exa-mcp) provides real-time web search capabilities for AI models. Instead of relying on training data,
|
||||
models can access current web content through Exa's search API.
|
||||
[Exa MCP](https://docs.exa.ai/examples/exa-mcp) provides real-time web search capabilities for AI
|
||||
models. Instead of relying on training data, models can access current web content through Exa's search API.
|
||||
|
||||
## Available Tools
|
||||
|
||||
Exa MCP includes eight search functions:
|
||||
|
||||
- `web_search_exa`: General web search with content extraction
|
||||
- `research_paper_search`: Academic papers and research content
|
||||
- `company_research`: Company analysis and business intelligence
|
||||
@ -41,8 +39,8 @@ Exa MCP includes eight search functions:
|
||||
- Model with tool calling support
|
||||
- Node.js installed
|
||||
|
||||
<Aside type="info">
|
||||
Tool calling support varies by model. Jan Nano 32k and 128k, Claude, Gemini, GPT-4o and above models work reliably. For both local and remote models,
|
||||
<Aside type="note">
|
||||
Tool calling support varies by model. Jan Nano 32k and 128k, Claude, Gemini, GPT-4o and above models work reliably. For both local and remote models,
|
||||
verify tool calling is enabled in model parameters.
|
||||
</Aside>
|
||||
|
||||
@ -68,12 +66,11 @@ verify tool calling is enabled in model parameters.
|
||||
|
||||
Click `+` in MCP Servers section:
|
||||
|
||||
|
||||
**Configuration:**
|
||||
- **Server Name**: `exa`
|
||||
- **Command**: `npx`
|
||||
- **Arguments**: `-y exa-mcp-server`
|
||||
- **Environment Variables**:
|
||||
- **Environment Variables**:
|
||||
- Key: `EXA_API_KEY`
|
||||
- Value: `your-api-key`
|
||||
|
||||
@ -105,6 +102,7 @@ Start a new chat with a tool-enabled model. Exa tools will appear in the availab
|
||||
### Example Queries
|
||||
|
||||
**Current Events & Activities:**
|
||||
|
||||
```
|
||||
What is happening this week, mid July 2025, in Sydney, Australia?
|
||||
```
|
||||
@ -112,56 +110,67 @@ What is happening this week, mid July 2025, in Sydney, Australia?
|
||||

|
||||
|
||||
**Investment Research:**
|
||||
|
||||
```
|
||||
Find recent research papers about quantum computing startups that received Series A funding in 2024-2025
|
||||
```
|
||||
|
||||
**Tech Discovery:**
|
||||
|
||||
```
|
||||
Find GitHub repositories for WebAssembly runtime engines written in Rust with active development
|
||||
```
|
||||
|
||||
**Career Intelligence:**
|
||||
|
||||
```
|
||||
Search LinkedIn for AI safety researchers at major tech companies who published papers in the last 6 months
|
||||
```
|
||||
|
||||
**Competitive Analysis:**
|
||||
|
||||
```
|
||||
Research emerging competitors to OpenAI in the large language model space, focusing on companies founded after 2023
|
||||
```
|
||||
|
||||
**Travel & Local Research:**
|
||||
|
||||
```
|
||||
Find authentic local food experiences in Tokyo that aren't in typical tourist guides, mentioned in recent travel blogs
|
||||
```
|
||||
|
||||
**Academic Research:**
|
||||
|
||||
```
|
||||
Find recent papers about carbon capture technology breakthroughs published in Nature or Science during 2025
|
||||
```
|
||||
|
||||
**Creator Economy:**
|
||||
|
||||
```
|
||||
Research successful creators who transitioned from TikTok to longer-form content platforms in 2024-2025
|
||||
```
|
||||
|
||||
**Emerging Tech Trends:**
|
||||
|
||||
```
|
||||
Find startups working on brain-computer interfaces that have raised funding in the past 12 months
|
||||
```
|
||||
|
||||
**Health & Wellness:**
|
||||
|
||||
```
|
||||
Extract information about the latest longevity research findings from Peter Attia's recent podcast episodes
|
||||
```
|
||||
|
||||
**Regulatory Intelligence:**
|
||||
|
||||
```
|
||||
Find recent AI regulation developments in the EU that could impact US companies, focusing on July 2025 updates
|
||||
```
|
||||
|
||||
**Supply Chain Research:**
|
||||
|
||||
```
|
||||
Research companies developing sustainable packaging alternatives that have partnerships with major retailers
|
||||
```
|
||||
@ -205,11 +214,11 @@ LinkedIn searches for industry connections and expertise.
|
||||
- Check rate limits on your plan
|
||||
- Regenerate API key if needed
|
||||
|
||||
<Aside type="warning">
|
||||
<Aside type="caution">
|
||||
Exa has API rate limits. Check your plan limits to avoid interruptions.
|
||||
</Aside>
|
||||
|
||||
## Next Steps
|
||||
|
||||
Exa MCP enables real-time web search within Jan's privacy-focused environment. Models can access current information while maintaining
|
||||
local conversation processing.
|
||||
Exa MCP enables real-time web search within Jan's privacy-focused environment. Models can access current
|
||||
information while maintaining local conversation processing.
|
||||
|
||||
@ -18,39 +18,39 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
# Using the Model Context Protocol (MCP) in Jan
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
subgraph "What is MCP?"
|
||||
You[You using Jan Desktop]
|
||||
Claude[Jan AI Assistant]
|
||||
|
||||
|
||||
subgraph "Your Connected Tools"
|
||||
Files[📁 Your Files<br/>Documents, folders,<br/>text files]
|
||||
Database[📊 Your Data<br/>Spreadsheets,<br/>databases]
|
||||
WebServices[🌐 Online Services<br/>GitHub, Slack,<br/>Google Drive]
|
||||
Custom[🔧 Custom Tools<br/>Special programs<br/>you've added]
|
||||
end
|
||||
|
||||
|
||||
subgraph "What Jan Can Do"
|
||||
Read[Read & Understand<br/>- View your files<br/>- Check your data<br/>- See updates]
|
||||
Action[Take Actions<br/>- Search for info<br/>- Create content<br/>- Run commands]
|
||||
Templates[Use Templates<br/>- Common tasks<br/>- Saved prompts<br/>- Workflows]
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
You --> Claude
|
||||
Claude -->|"Can I see this file?"| Files
|
||||
Claude -->|"What's in my database?"| Database
|
||||
Claude -->|"Check my GitHub"| WebServices
|
||||
Claude -->|"Run this tool"| Custom
|
||||
|
||||
|
||||
Files --> Read
|
||||
Database --> Read
|
||||
WebServices --> Action
|
||||
Custom --> Templates
|
||||
|
||||
|
||||
style You fill:transparent
|
||||
style Claude fill:transparent
|
||||
style Files fill:transparent
|
||||
@ -89,14 +89,14 @@ access to search your local codebase, query a database, or interact with web API
|
||||
* **Flexibility:** Because the interface is standardized, you can swap out models or tools with minimal friction,
|
||||
making your workflows more modular and adaptable over time.
|
||||
|
||||
<Aside type="warning">
|
||||
<Aside type="caution">
|
||||
Please note that not all models that you can download and use, whether in Jan or other tools, may be good at
|
||||
tool calling or compatible with MCP. Make sure that the model you choose is MCP-compliant before integrating
|
||||
it into your workflows. This might be available in the model card or you may need to implement it yourself to
|
||||
test the capabilities of the model.
|
||||
</Aside>
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
To use MCP effectively, ensure your AI model supports tool calling capabilities:
|
||||
- For cloud models (like Claude or GPT-4): Verify tool calling is enabled in your API settings
|
||||
- For local models: Enable tool calling in the model parameters [click the edit button in Model Capabilities](/docs/model-parameters#model-capabilities-edit-button)
|
||||
|
||||
@ -1,125 +0,0 @@
|
||||
---
|
||||
title: Jan Privacy Policy
|
||||
description: Jan's data collection practices, privacy measures, and your rights. Learn how we protect your data and maintain transparency.
|
||||
---
|
||||
|
||||
# Privacy Policy
|
||||
|
||||
<div className="text-sm text-gray-500 mt-2 mb-8">
|
||||
Last Updated: January 16, 2025
|
||||
</div>
|
||||
|
||||
## Introduction
|
||||
|
||||
We are committed to protecting your privacy and ensuring you have control over your data. This Privacy Policy outlines what information Menlo Research Pte Ltd (the "Company") collects from users of the Jan desktop app and website (the "Services"), how the Company uses that information, and the measures the Company takes to safeguard that information.
|
||||
|
||||
## 1. Data Collection and Consent
|
||||
|
||||
### Explicit Consent
|
||||
|
||||
The Company does not collect any data until you explicitly allow tracking.
|
||||
|
||||
### Tracking Preferences
|
||||
|
||||
Upon first launching the Jan desktop app or visiting the website, you will be prompted to set your tracking preferences. These preferences can be modified at any time via the app's Settings menu or the website's Privacy Settings.
|
||||
|
||||
### Legal Basis
|
||||
|
||||
Pursuant to the European Union's General Data Protection Regulation (EU) 2016/679 (the "GDPR"), the Company processes data based on your explicit consent (GDPR Article 6(1)(a)). This means:
|
||||
|
||||
- The Company only processes your data after receiving clear, affirmative consent from you.
|
||||
- You may withdraw your consent at any time through the app's Settings menu or the website's Privacy Settings.
|
||||
- If you withdraw your consent, the Company will stop optional data collection from the effective date of withdrawal.
|
||||
- Your withdrawal of consent will not affect the lawfulness of processing before its withdrawal.
|
||||
|
||||
## 2. Data We Do Not Collect
|
||||
|
||||
Regardless of your analytics permissions, the Company does not collect the following:
|
||||
|
||||
- Chat History: Your conversations with the Jan app are private and inaccessible to the Company.
|
||||
- Chat Settings: Your personalized settings remain solely with you.
|
||||
- Language Models: The specific language models you use are not tracked.
|
||||
|
||||
## 3. Uses of Information
|
||||
|
||||
To build a reliable and user-friendly product offering, understanding how the Jan app is used is essential. If you permit tracking, the Company collects product analytics data to:
|
||||
|
||||
- Improve User Experience: Enhance app functionality based on usage patterns; and
|
||||
- Measure Engagement: Assess active users and retention rates to ensure ongoing value.
|
||||
|
||||
## 4. Product Analytics
|
||||
|
||||
### Data Collected
|
||||
|
||||
When you opt-in to tracking, we collect the following anonymous data:
|
||||
|
||||
- Active Users: Number of daily active users to gauge engagement.
|
||||
- Retention Rates: Track if users continue to find value in the Jan app over time.
|
||||
|
||||
### Data Anonymity
|
||||
|
||||
- User ID: Analytics data is tied to a randomly generated user ID, ensuring no link to your personal identity.
|
||||
- Privacy Assurance: Your chat history and personal data are not tracked or linked to your usage data.
|
||||
|
||||
## 5. What We Do Not Track
|
||||
|
||||
Even with analytics permissions granted, the Company does not track the following:
|
||||
|
||||
- Conversations: Your interactions with the Jan app remain private.
|
||||
- Files: The Company does not scan, upload, or view your files.
|
||||
- Personal Identity: The Company does not collect personally identifiable information about users.
|
||||
- Prompts: Your prompts and prompt templates are not monitored.
|
||||
- Conversation Metrics: The Company does not track context length or conversation length.
|
||||
- Model Usage: The specific models you use or their types are not tracked.
|
||||
- Storage: You retain full control over storing your files and logs, and your privacy is prioritized.
|
||||
|
||||
## 6. Using Cloud Models
|
||||
|
||||
The Jan app allows you to connect to cloud-based model APIs (e.g. GPT, Claude models).
|
||||
|
||||
- Data Handling: The API provider processes your messages directly; the Jan app does not access or store these messages.
|
||||
- Local Models: Choosing local models ensures all data remains on your device, with no external access.
|
||||
|
||||
## 7. Data Storage and Processing
|
||||
|
||||
### Analytics Provider
|
||||
|
||||
The Company uses PostHog EU for analytics, which ensures all data is processed within the European Union.
|
||||
|
||||
### Data Security
|
||||
|
||||
- Encryption: All data transfers are encrypted using Transport Layer Security (TLS) to ensure secure transmission.
|
||||
- Storage: PostHog securely manages the data the Company collects. For more information, please refer to PostHog's GDPR documentation.
|
||||
|
||||
## 8. Data Retention
|
||||
|
||||
- Retention Period: The Company retains analytics data for up to 12 months unless otherwise required to comply with any applicable legal requirements.
|
||||
- Deletion Requests: If you wish to request the deletion of your analytics data, you may do so by sending a written request to hello@jan.ai.
|
||||
|
||||
## 9. Your Rights and Choices
|
||||
|
||||
- Access and Control: You may access, modify, or delete your tracking preferences at any time through the Jan app or website settings.
|
||||
- Data Requests: If you have any requests related to your data, please address them to hello@jan.ai.
|
||||
|
||||
## 10. Children's Privacy
|
||||
|
||||
Our Services are not targeted at children under the age of 13. The Company does not knowingly collect data from children under the age of 13. If the Company becomes aware that data of persons under the age of 13 has been collected without verifiable parental consent, the Company will take appropriate actions to delete this information.
|
||||
|
||||
## 11. Changes to the Privacy Policy
|
||||
|
||||
The Company reserves the right, at its sole discretion, to update this Privacy Policy at any time to reflect changes in the practices or legal requirements of the Company. The Company will use reasonable efforts to notify you of any significant changes via app notifications, the website, or email. Your continued use of the Services following such updates means you accept those changes.
|
||||
|
||||
## 12. Cookies and Tracking Technologies
|
||||
|
||||
Our website utilizes cookies to:
|
||||
|
||||
- Enhance user experience; and
|
||||
- Measure website traffic and usage patterns.
|
||||
|
||||
Most browsers allow you to remove or manage cookie functions and adjust your privacy and security preferences.
|
||||
|
||||
For more details, please refer to our Cookie Policy.
|
||||
|
||||
## 13. Contact Us
|
||||
|
||||
For any questions or concerns about this Privacy Policy or our data practices, please contact hello@jan.ai.
|
||||
@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Jan's Privacy Approach
|
||||
description: Jan is an app that allows you to own your AI. We prioritize your control over your data and explain what data we collect and why.
|
||||
title: Jan Privacy Policy
|
||||
description: Jan's data collection practices, privacy measures, and your rights. Learn how we protect your data and maintain transparency.
|
||||
keywords:
|
||||
[
|
||||
Jan AI,
|
||||
@ -12,66 +12,129 @@ keywords:
|
||||
large language model,
|
||||
about Jan,
|
||||
desktop application,
|
||||
thinking machine,
|
||||
jan vision,
|
||||
privacy policy,
|
||||
data protection,
|
||||
]
|
||||
---
|
||||
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
---
|
||||
<div className="text-sm text-gray-500 mt-2 mb-8">
|
||||
Last Updated: January 16, 2025
|
||||
</div>
|
||||
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
Jan is your AI. Here's what we do with data.
|
||||
|
||||
# Privacy
|
||||
|
||||
Jan is your AI. Period. Here's what we do with data.
|
||||
|
||||
<Aside>
|
||||
Full privacy policy lives [here](/docs/privacy-policy), if you're into that sort of thing.
|
||||
<Aside type="note">
|
||||
We don't collect any data without your explicit permission.
|
||||
</Aside>
|
||||
|
||||
<Aside type="info">
|
||||
Zero data collection until you say so. Scout's honor.
|
||||
</Aside>
|
||||
## 1. Data Collection and Consent
|
||||
|
||||
You'll choose tracking preferences at first launch. Change them anytime in Settings.
|
||||
### No Data Collection Until You Allow It
|
||||
|
||||
Jan will **never** peek at your chats, settings, or model choices. Not even if you ask nicely.
|
||||
Menlo Research Pte Ltd (the "Company") doesn't collect anything until you explicitly allow tracking.
|
||||
|
||||
## Data We Track (With Permission)
|
||||
### Your Choice
|
||||
|
||||
We track basic app usage to improve Jan. That's it.
|
||||
You'll choose tracking preferences at first launch. Change them anytime in Settings or Privacy Settings.
|
||||
|
||||
### Legal Basis (GDPR)
|
||||
|
||||
Under EU GDPR Article 6(1)(a), we process data based on your explicit consent:
|
||||
|
||||
- Clear consent required before any data collection
|
||||
- Withdraw consent anytime through Settings
|
||||
- Withdrawal doesn't affect previous lawful processing
|
||||
- Processing stops immediately upon withdrawal
|
||||
|
||||
## 2. What We Never Collect
|
||||
|
||||
Jan will **never** access your chats, settings, or model choices without permission:
|
||||
|
||||
- **Chat History**: Your conversations stay private
|
||||
- **Chat Settings**: Your personalized settings remain with you
|
||||
- **Language Models**: We don't track which models you use
|
||||
- **Files**: No scanning, uploading, or viewing
|
||||
- **Personal Identity**: No personally identifiable information
|
||||
- **Prompts**: Your prompts and templates aren't monitored
|
||||
- **Conversation Metrics**: No context or conversation length tracking
|
||||
- **Model Usage**: Specific models and types aren't tracked
|
||||
|
||||
## 3. Data We Track (With Permission)
|
||||
|
||||
We track basic app usage to improve Jan.
|
||||
|
||||
### Product Analytics
|
||||
|
||||
When allowed, we count:
|
||||
When allowed, we collect:
|
||||
|
||||
- **Active Users**: Daily Jan-thusiasts
|
||||
- **Retention**: Who sticks around
|
||||
- **Active Users**: Daily active users to gauge engagement
|
||||
- **Retention**: User retention metrics to ensure ongoing value
|
||||
|
||||
Everything's tied to a random ID - not you. Your chats stay yours.
|
||||
Everything's tied to a random ID - not your personal information. Your chats remain private.
|
||||
|
||||
## What We Don't Track
|
||||
|
||||
<Aside type="info">
|
||||
Jan respects boundaries. We're committed to protecting your privacy.
|
||||
<Aside type="note">
|
||||
Your data stays private. Analytics are anonymous and aggregated.
|
||||
</Aside>
|
||||
|
||||
- No chat snooping
|
||||
- No file scanning
|
||||
- No identity tracking
|
||||
- No prompt logging
|
||||
- No conversation monitoring
|
||||
- No model tracking
|
||||
## 4. Cloud Model Use
|
||||
|
||||
Your private stuff stays private.
|
||||
Cloud models (like GPT, Claude) need to see your messages to work. That's between you and the cloud provider - Jan facilitates the connection.
|
||||
|
||||
## Cloud Model Use
|
||||
- **API Processing**: Cloud providers process your messages directly
|
||||
- **Jan Access**: We don't access or store these messages
|
||||
- **Local Models**: Keep everything on your device with no external access
|
||||
|
||||
Cloud models (like GPT, Claude) need to see your messages to work. That's between you and them - Jan just makes
|
||||
the introduction. Local models keep everything at home where the neighbors can't gossip.
|
||||
## 5. Data Storage and Security
|
||||
|
||||
## Data Storage
|
||||
[PostHog EU](https://posthog.com/eu) handles our analytics. All EU-based, GDPR-compliant, properly buttoned
|
||||
up. Details in their [GDPR docs](https://posthog.com/docs/privacy/gdpr-compliance).
|
||||
### Analytics Provider
|
||||
|
||||
[PostHog EU](https://posthog.com/eu) handles our analytics. All EU-based, GDPR-compliant data processing.
|
||||
|
||||
### Security Measures
|
||||
|
||||
- **Encryption**: All transfers use TLS encryption
|
||||
- **EU Processing**: Data processed within European Union
|
||||
- **Secure Storage**: PostHog manages data securely
|
||||
|
||||
Details in their [GDPR docs](https://posthog.com/docs/privacy/gdpr-compliance).
|
||||
|
||||
## 6. Data Retention
|
||||
|
||||
- **Retention Period**: Analytics data kept for up to 12 months
|
||||
- **Deletion Requests**: Request deletion by emailing hello@jan.ai
|
||||
- **Legal Requirements**: May retain longer if legally required
|
||||
|
||||
## 7. Your Rights
|
||||
|
||||
- **Access and Control**: Modify tracking preferences anytime in Settings
|
||||
- **Data Requests**: Contact hello@jan.ai for any data-related requests
|
||||
- **Withdrawal**: Stop data collection immediately through Settings
|
||||
|
||||
## 8. Children's Privacy
|
||||
|
||||
Services not targeted at children under 13. We don't knowingly collect data from children under 13. If we become aware of such collection, we'll delete the information.
|
||||
|
||||
## 9. Cookies and Tracking
|
||||
|
||||
Our website uses cookies to:
|
||||
|
||||
- Enhance user experience
|
||||
- Measure website traffic and usage
|
||||
|
||||
Most browsers let you manage cookies and adjust privacy preferences. See our Cookie Policy for details.
|
||||
|
||||
## 10. Policy Changes
|
||||
|
||||
We may update this policy to reflect practice or legal changes. We'll notify you of significant changes via:
|
||||
|
||||
- App notifications
|
||||
- Website announcements
|
||||
- Email (if provided)
|
||||
|
||||
Continued use means you accept the changes.
|
||||
|
||||
## 11. Contact Us
|
||||
|
||||
Questions about privacy or data practices? Contact hello@menlo.ai.
|
||||
|
||||
@ -15,17 +15,16 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
|
||||
# Anthropic
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
Jan supports all of [Anthropic's models](https://anthropic.com/) via API integration, allowing
|
||||
you to chat with Claude's latest Opus, Sonnet and Haiku models.
|
||||
|
||||
## Integrate Anthropic API with Jan
|
||||
|
||||
<ol>
|
||||
|
||||
### Step 1: Get Your API Key
|
||||
|
||||
1. Visit [Anthropic Console](https://console.anthropic.com/settings/keys) and sign in
|
||||
2. Create & copy a new API key or copy your existing one
|
||||
|
||||
@ -35,13 +34,11 @@ Ensure your API key has sufficient credits
|
||||
|
||||
### Step 2: Configure Jan
|
||||
|
||||
1. Navigate to **Settings** (<Settings width={16} height={16} style={{display:"inline"}}/>)
|
||||
1. Navigate to the **Settings** page
|
||||
2. Under **Model Providers**, select **Anthropic**
|
||||
3. Insert your **API Key**
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
### Step 3: Start Using Anthropic's Models
|
||||
|
||||
@ -49,7 +46,6 @@ Ensure your API key has sufficient credits
|
||||
2. Select an Anthropic model from **model selector**
|
||||
3. Start chatting
|
||||
|
||||
</ol>
|
||||
|
||||
## Available Anthropic Models
|
||||
|
||||
|
||||
@ -15,17 +15,16 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
|
||||
# Cohere
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
Jan supports [Cohere](https://cohere.com/) API integration, allowing you to use Cohere's
|
||||
models (Command, Command-R and more) through Jan's interface.
|
||||
|
||||
## Integrate Cohere API with Jan
|
||||
|
||||
<ol>
|
||||
|
||||
### Step 1: Get Your API Key
|
||||
|
||||
1. Visit [Cohere Dashboard](https://dashboard.cohere.com/api-keys) and sign in
|
||||
2. Create a new API key and/or copy your existing one
|
||||
|
||||
@ -35,20 +34,19 @@ Ensure your API key has sufficient credits.
|
||||
|
||||
### Step 2: Configure Jan
|
||||
|
||||
1. Navigate to **Settings** (<Settings width={16} height={16} style={{display:"inline"}}/>)
|
||||
1. Navigate to the **Settings** page
|
||||
2. Under **Model Providers**, select **Cohere**
|
||||
3. Insert your **API Key**
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
|
||||
### Step 3: Start Using Cohere's Models
|
||||
|
||||
1. Jump into any existing **Chat** or create a new one
|
||||
2. Select a Cohere model from **model selector** options
|
||||
3. Start chatting
|
||||
</ol>
|
||||
|
||||
|
||||
## Available Cohere Models
|
||||
|
||||
|
||||
@ -15,15 +15,14 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
|
||||
# Google
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
Jan supports [Google](https://ai.google/get-started/our-models/) API integration, allowing you to use Google models (like Gemini series) through Jan's interface.
|
||||
|
||||
## Integrate Google API with Jan
|
||||
|
||||
<ol>
|
||||
### Step 1: Get Your API Key
|
||||
|
||||
1. Visit [Google AI Studio](https://aistudio.google.com/app/apikey) and sign in
|
||||
2. Create & copy a new API key or copy your existing one
|
||||
|
||||
@ -33,20 +32,19 @@ Ensure your API key has sufficient credits
|
||||
|
||||
### Step 2: Configure Jan
|
||||
|
||||
1. Navigate to the **Settings** page (<Settings width={16} height={16} style={{display:"inline"}}/>)
|
||||
1. Navigate to the **Settings** page
|
||||
2. Under **Model Providers**, select **Gemini**
|
||||
3. Insert your **API Key**
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
|
||||
### Step 3: Start Using Google's Models
|
||||
|
||||
1. Got to any existing **Chat** or create a new one
|
||||
2. Select an Gemini model from **model selector**
|
||||
3. Start chatting
|
||||
</ol>
|
||||
|
||||
|
||||
## Available Google Models
|
||||
|
||||
|
||||
@ -15,15 +15,14 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
|
||||
# Groq
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
Jan supports [Groq](https://groq.com/) API integration, allowing you to use Groq's high-performance LLM models (LLaMA 2, Mixtral and more) through Jan's interface.
|
||||
|
||||
## Integrate Groq API with Jan
|
||||
|
||||
<ol>
|
||||
### Step 1: Get Your API Key
|
||||
|
||||
1. Visit [Groq Console](https://console.groq.com/keys) and sign in
|
||||
2. Create & copy a new API key or copy your existing one
|
||||
|
||||
@ -33,13 +32,11 @@ Ensure your API key has sufficient credits
|
||||
|
||||
### Step 2: Configure Jan
|
||||
|
||||
1. Navigate to the **Settings** (<Settings width={16} height={16} style={{display:"inline"}}/>)
|
||||
1. Navigate to the **Settings** page
|
||||
2. Under **Model Providers**, select **Groq**
|
||||
3. Insert your **API Key**
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
|
||||
### Step 3: Start Using Groq's Models
|
||||
@ -47,7 +44,6 @@ Ensure your API key has sufficient credits
|
||||
1. Jump into any existing **Chat** or create a new one
|
||||
2. Select a Groq model from **model selector**
|
||||
3. Start chatting
|
||||
</ol>
|
||||
|
||||
## Available Models Through Groq
|
||||
|
||||
|
||||
@ -16,17 +16,15 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
|
||||
# Mistral AI
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
Jan supports all models available via the [Mistral AI](https://mistral.ai/) API, allowing you to use Mistral's
|
||||
powerful models (Mistral Large, Mistral Medium, Mistral Small and more) through Jan's interface.
|
||||
|
||||
## Integrate Mistral AI with Jan
|
||||
|
||||
<ol>
|
||||
|
||||
### Step 1: Get Your API Key
|
||||
|
||||
1. Visit the [Mistral AI Platform](https://console.mistral.ai/api-keys/) and sign in
|
||||
2. Create & copy a new API key or copy your existing one
|
||||
|
||||
@ -36,20 +34,18 @@ Ensure your API key has sufficient credits
|
||||
|
||||
### Step 2: Configure Jan
|
||||
|
||||
1. Navigate to the **Settings** page (<Settings width={16} height={16} style={{display:"inline"}}/>)
|
||||
1. Navigate to the **Settings** page
|
||||
2. Under **Model Providers**, select **Mistral AI**
|
||||
3. Insert your **API Key**
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
### Step 3: Start Using Mistral's Models
|
||||
|
||||
1. Open any existing **Chat** or create a new one
|
||||
2. Select a Mistral model from **model selector**
|
||||
3. Start chatting
|
||||
</ol>
|
||||
|
||||
|
||||
## Available Mistral Models
|
||||
|
||||
|
||||
@ -17,7 +17,7 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
# OpenAI
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
Jan supports most [OpenAI](https://openai.com/) as well as the many OpenAI-compatible APIs out there,
|
||||
allowing you to use all models from OpenAI (GPT-4o, o3 and even those from Together AI, DeepSeek, Fireworks
|
||||
@ -25,8 +25,6 @@ and more) through Jan's interface.
|
||||
|
||||
## Integrate OpenAI API with Jan
|
||||
|
||||
<ol>
|
||||
|
||||
### Step 1: Get Your API Key
|
||||
1. Visit the [OpenAI Platform](https://platform.openai.com/api-keys) and sign in
|
||||
2. Create & copy a new API key or copy your existing one
|
||||
@ -37,13 +35,12 @@ Ensure your API key has sufficient credits
|
||||
|
||||
### Step 2: Configure Jan
|
||||
|
||||
1. Navigate to the Settings page (<Settings width={16} height={16} style={{display:"inline"}}/>)
|
||||
1. Navigate to the **Settings** page
|
||||
2. Under Remote Engines, select OpenAI
|
||||
3. Insert your API Key
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
|
||||
### Step 3: Start Using OpenAI's Models
|
||||
|
||||
@ -51,7 +48,6 @@ In any existing Threads or create a new one
|
||||
Select an OpenAI model from model selector
|
||||
Start chatting
|
||||
|
||||
</ol>
|
||||
|
||||
## Available OpenAI Models
|
||||
|
||||
|
||||
@ -19,8 +19,6 @@ keywords:
|
||||
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
# OpenRouter
|
||||
|
||||
## Integrate OpenRouter with Jan
|
||||
|
||||
[OpenRouter](https://openrouter.ai/) is a tool that gathers AI model APIs and provides access to all
|
||||
@ -34,8 +32,6 @@ OpenRouter even offers a few free models! 🙌
|
||||
|
||||
## Integrate OpenRouter with Jan
|
||||
|
||||
<ol>
|
||||
|
||||
### Step 1: Get Your API Key
|
||||
1. Visit [OpenRouter](https://openrouter.ai/keys) and sign in
|
||||
2. Create & copy a new API key or copy your existing one
|
||||
@ -46,20 +42,17 @@ Ensure your API key has sufficient credits. OpenRouter credits work across all a
|
||||
|
||||
### Step 2: Configure Jan
|
||||
|
||||
1. Navigate to the **Settings** page (<Settings width={16} height={16} style={{display:"inline"}}/>)
|
||||
1. Navigate to the **Settings** page
|
||||
2. Under **Model Providers**, select **OpenRouter**
|
||||
3. Insert your **API Key**
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
### Step 3: Start Using OpenRouter Models
|
||||
|
||||
1. Pick any existing **Chat** or create a new one
|
||||
2. Select any model from **model selector** under OpenRouter
|
||||
3. Start chatting
|
||||
</ol>
|
||||
|
||||
## Available Models Through OpenRouter
|
||||
|
||||
|
||||
@ -20,11 +20,9 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
|
||||
|
||||
# Settings
|
||||
|
||||
To access the **Settings**, click <Settings width={16} height={16} style={{display:"inline"}}/> icon in the bottom left corner of Jan.
|
||||
To access the **Settings**, click icon in the bottom left corner of Jan.
|
||||
|
||||
## Model Management
|
||||
|
||||
@ -33,7 +31,7 @@ Manage your installed AI models in **Settings** > **Model Providers**:
|
||||
### Import Models
|
||||
- **From Hugging Face:**
|
||||
- Enter a model's Hugging Face ID (e.g., `org/model_name_or_id`) in the Hub's search bar.
|
||||
- **Note:** Some models require a Hugging Face Access Token. Enter your token in **Settings > Model Providers > Hugging Face Access Token**.
|
||||
- **Note:** Some models require a Hugging Face Access Token. Enter your token in **Settings > Model Providers > Hugging Face**.
|
||||
- **From Local Files:**
|
||||
- Click **Import Model** and select your GGUF files.
|
||||
|
||||
@ -75,29 +73,26 @@ Monitor and manage system resources at **Settings > Hardware**:
|
||||
- **CPU, RAM, GPU**: View usage and specs
|
||||
- **GPU Acceleration**: Enable/disable and configure GPU settings
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
|
||||
## Preferences
|
||||
|
||||
### Appearance & Theme
|
||||
|
||||
Control the visual theme of Jan's interface with any color combo you'd like. You can also control the color use in the code blocks.
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
### Spell Check
|
||||
|
||||
Jan includes a built-in spell check feature to help catch typing errors in your messages.
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
|
||||
## Privacy
|
||||
At **Settings** (<Settings width={16} height={16} style={{display:"inline"}}/>) > **Privacy**, you can control analytics & logs in Jan:
|
||||
|
||||
At **Settings** > **Privacy**, you can control anonymous analytics in Jan:
|
||||
|
||||
### Analytics
|
||||
Jan is built with privacy at its core. By default, no data is collected. Everything stays local on your device.
|
||||
@ -105,13 +100,11 @@ You can help improve Jan by sharing anonymous usage data:
|
||||
1. Toggle on **Analytics** to share anonymous data
|
||||
2. You can change this setting at any time
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Read more about that we collect with opt-in users at [Privacy](/docs/privacy).
|
||||
</Aside>
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
### Log Management
|
||||
|
||||
@ -119,38 +112,33 @@ Read more about that we collect with opt-in users at [Privacy](/docs/privacy).
|
||||
- Logs are stored at:
|
||||
- App log: `~/Library/Application\ Support/jan/data/logs/app.log`
|
||||
- Cortex log: `~/Library/Application\ Support/jan/data/logs/cortex.log`
|
||||
- To open logs from Jan's interface: at **Logs**, click <FolderOpen width={16} height={16} style={{display:"inline"}}/> icon to open App Logs & Cortex Logs:
|
||||
- To open logs from Jan's interface: at **Logs**, click icon to open App Logs & Cortex Logs:
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
**2. Clear Logs**
|
||||
|
||||
Jan retains your logs for only **24 hours**. To remove all logs from Jan, at **Clear Logs**, click the **Clear** button:
|
||||
|
||||
<Aside type="warning">
|
||||
<Aside type="caution">
|
||||
This action cannot be undone.
|
||||
</Aside>
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
|
||||
### Jan Data Folder
|
||||
Jan stores your data locally in your own filesystem in a universal file format. See detailed [Jan Folder Structure](docs/data-folder#folder-structure).
|
||||
Jan stores your data locally in your own filesystem in a universal file format. See detailed [Jan Folder Structure](./data-folder#folder-structure).
|
||||
|
||||
**1. Open Jan Data Folder**
|
||||
|
||||
At **Jan Data Folder**, click <FolderOpen width={16} height={16} style={{display:"inline"}}/> icon to open Jan application's folder:
|
||||
<br/>
|
||||
At **Jan Data Folder**, click icon to open Jan application's folder:
|
||||
|
||||

|
||||
<br/>
|
||||
|
||||
**2. Edit Jan Data Folder**
|
||||
|
||||
1. At **Jan Data Folder**, click <Pencil width={16} height={16} style={{display:"inline"}}/> icon to edit Jan application's folder
|
||||
1. At **Jan Data Folder** icon to edit Jan application's folder
|
||||
2. Choose a new directory & click **Select**, make sure the new folder is empty
|
||||
3. Confirmation pop-up shows up:
|
||||
|
||||
@ -160,20 +148,21 @@ An app restart will be required afterward.
|
||||
|
||||
4. Click **Yes, Proceed**
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
### HTTPs Proxy
|
||||
HTTPS Proxy encrypts data between your browser and the internet, making it hard for outsiders to intercept or read. It also helps you maintain your privacy and security while bypassing regional restrictions on the internet.
|
||||
|
||||
<Aside type="info">
|
||||
HTTPS Proxy encrypts data between your browser and the internet, making it hard for outsiders to intercept
|
||||
or read. It also helps you maintain your privacy and security while bypassing regional restrictions on the internet.
|
||||
|
||||
<Aside type="note">
|
||||
- Model download speeds may be affected due to the encryption/decryption process and your cloud service provider's networking
|
||||
- HTTPS Proxy does not affect the remote model usage.
|
||||
</Aside>
|
||||
|
||||
1. **Enable** the proxy toggle
|
||||
2. Enter your proxy server details in the following format:
|
||||
|
||||
```
|
||||
http://<user>:<password>@<domain or IP>:<port>
|
||||
```
|
||||
@ -183,9 +172,7 @@ Where:
|
||||
- `<domain or IP>`: Your proxy server's domain name or IP address
|
||||
- `<port>`: The port number for the proxy server
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
**Ignore SSL Certificates**
|
||||
|
||||
@ -194,19 +181,19 @@ This setting allows Jan to accept self-signed or unverified SSL certificates. Th
|
||||
- Testing in development environments
|
||||
- Connecting through specialized network security setups
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Only enable this option if you trust your network environment.
|
||||
</Aside>
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
|
||||
### Factory Reset
|
||||
Reset to Factory Settings restores Jan to its initial state by erasing all user data, including downloaded models and chat history. This action is irreversible and should only be used as a last resort when experiencing serious application issues.
|
||||
|
||||
<Aside type="warning">
|
||||
Reset to Factory Settings restores Jan to its initial state by erasing all user data, including downloaded
|
||||
models and chat history. This action is irreversible and should only be used as a last resort when experiencing
|
||||
serious application issues.
|
||||
|
||||
<Aside type="caution">
|
||||
This action cannot be undone. All data will be permanently deleted.
|
||||
</Aside>
|
||||
|
||||
@ -217,14 +204,12 @@ Only use factory reset if:
|
||||
|
||||
To begin the process:
|
||||
1. At **Reset to Factory Settings**, click **Reset** button
|
||||
<br/>
|
||||
|
||||

|
||||
<br/>
|
||||
|
||||
2. In the confirmation dialog:
|
||||
- Type the word **RESET** to confirm
|
||||
- Optionally check **Keep the current app data location** to maintain the same data folder
|
||||
- Click **Reset Now**
|
||||
3. App restart is required upon confirmation
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
@ -1,399 +1,344 @@
|
||||
---
|
||||
title: Troubleshooting
|
||||
description: Explore solutions for common issues and optimize Jan's performance with this comprehensive troubleshooting guide.
|
||||
description: Fix common issues and optimize Jan's performance with this comprehensive guide.
|
||||
keywords:
|
||||
[
|
||||
Jan,
|
||||
Customizable Intelligence, LLM,
|
||||
local AI,
|
||||
privacy focus,
|
||||
free and open source,
|
||||
private and offline,
|
||||
conversational AI,
|
||||
no-subscription fee,
|
||||
large language models,
|
||||
troubleshooting,
|
||||
error codes,
|
||||
broken build,
|
||||
something amiss,
|
||||
unexpected token,
|
||||
undefined issue,
|
||||
permission denied,
|
||||
error fixes,
|
||||
performance issues,
|
||||
GPU problems,
|
||||
installation issues,
|
||||
common errors,
|
||||
local AI,
|
||||
technical support,
|
||||
]
|
||||
---
|
||||
|
||||
import { Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
import { Steps } from '@astrojs/starlight/components';
|
||||
|
||||
## Getting Help: Error Logs
|
||||
|
||||
# Troubleshooting
|
||||
When Jan isn't working properly, error logs help identify the problem. Here's how to get them:
|
||||
|
||||
## How to Get Error Logs
|
||||
### Quick Access to Logs
|
||||
|
||||
Error logs are essential for troubleshooting issues and getting help from Jan team. To get error logs from Jan, follow the steps below:
|
||||
**In Jan Interface:**
|
||||
1. Look for **System Monitor** in the footer
|
||||
2. Click **App Log**
|
||||
|
||||
#### Through Jan Interface
|
||||
|
||||
1. Open **System Monitor** in the footer
|
||||
2. Choose **App Log**
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
#### Through Terminal
|
||||
**Application Logs**
|
||||
**Via Terminal:**
|
||||
|
||||
<Tabs>
|
||||
<TabItem label="macOS/Linux">
|
||||
|
||||
**Application Logs:**
|
||||
```bash
|
||||
tail -n 50 ~/Library/Application\ Support/Jan/data/logs/app.log
|
||||
```
|
||||
**Server Logs**
|
||||
|
||||
**Server Logs:**
|
||||
```bash
|
||||
tail -n 50 ~/Library/Application\ Support/Jan/data/logs/cortex.log
|
||||
```
|
||||
|
||||
<Aside type="warning">
|
||||
Ensure to redact any private or sensitive information when sharing logs or error details. We retain your logs for only 24 hours.
|
||||
</Aside>
|
||||
</TabItem>
|
||||
<TabItem label="Windows">
|
||||
|
||||
**Application Logs:**
|
||||
```cmd
|
||||
type %APPDATA%\Jan\data\logs\app.log
|
||||
```
|
||||
|
||||
## Broken Build
|
||||
|
||||
To resolve the issue where Jan is stuck in a broken build after installation:
|
||||
|
||||
<Tabs items={['Mac', 'Windows', 'Linux']}>
|
||||
<Tabs.Tab>
|
||||
1. **Uninstall** Jan
|
||||
|
||||
2. **Delete** Application Data, Cache, and User Data:
|
||||
|
||||
```zsh
|
||||
rm -rf ~/Library/Application\ Support/Jan
|
||||
```
|
||||
|
||||
3. If you are using a version before `0.4.2`, you need to run the following commands:
|
||||
|
||||
```zsh
|
||||
ps aux | grep nitro
|
||||
# Looks for processes like `nitro` and `nitro_arm_64`, and kill them one by one by process ID
|
||||
kill -9 <PID>
|
||||
```
|
||||
|
||||
4. **Download** the [latest version of Jan](/download)
|
||||
</Tabs.Tab>
|
||||
|
||||
<Tabs.Tab>
|
||||
1. **Uninstall** Jan, using the [Windows Control Panel](https://support.microsoft.com/en-us/windows/uninstall-or-remove-apps-and-programs-in-windows-4b55f974-2cc6-2d2b-d092-5905080eaf98)
|
||||
|
||||
2. **Delete** Application Data, Cache, and User Data:
|
||||
|
||||
```cmd
|
||||
cd C:\Users\%USERNAME%\AppData\Roaming
|
||||
rmdir /S Jan
|
||||
```
|
||||
|
||||
3. If you are using a version before `0.4.2`, you need to run the following commands:
|
||||
|
||||
```bash
|
||||
# Find the process ID (PID) of the nitro process by filtering the list by process name
|
||||
tasklist | findstr "nitro"
|
||||
# Once you have the PID of the process you want to terminate, run the `taskkill`
|
||||
taskkill /F /PID <PID>
|
||||
```
|
||||
|
||||
4. **Download** the [latest version of Jan](/download)
|
||||
</Tabs.Tab>
|
||||
|
||||
<Tabs.Tab>
|
||||
1. **Uninstall** Jan
|
||||
|
||||
Choose the appropriate method based on how you installed Jan:
|
||||
|
||||
**For Debian/Ubuntu:**
|
||||
```
|
||||
sudo apt-get remove Jan
|
||||
```
|
||||
**For Others:** Delete the Jan `.AppImage` file from your system
|
||||
|
||||
2. Delete Application Data, Cache, and User Data:
|
||||
|
||||
```bash
|
||||
# Default dir
|
||||
~/.config/Jan
|
||||
# Custom installation directory
|
||||
$XDG_CONFIG_HOME = /home/username/custom_config/Jan
|
||||
```
|
||||
|
||||
3. If you are using a version before `0.4.2`, you need to run the following commands:
|
||||
|
||||
```zsh
|
||||
ps aux | grep nitro
|
||||
# Looks for processes like `nitro` and `nitro_arm_64`, and kill them one by one by process ID
|
||||
kill -9 <PID>
|
||||
```
|
||||
|
||||
4. **Download** the [latest version of Jan](/download)
|
||||
</Tabs.Tab>
|
||||
**Server Logs:**
|
||||
```cmd
|
||||
type %APPDATA%\Jan\data\logs\cortex.log
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
Following these steps, you can cleanly uninstall and reinstall Jan, ensuring a smooth and error-free experience with the latest version.
|
||||
|
||||
<Aside type="info">
|
||||
Before reinstalling Jan, ensure it's completely removed from all shared spaces if installed on multiple user accounts on your device.
|
||||
<Aside type="caution">
|
||||
Remove any personal information before sharing logs. We only keep logs for 24 hours.
|
||||
</Aside>
|
||||
|
||||
## Troubleshooting NVIDIA GPU
|
||||
To resolve issues when Jan does not utilize the NVIDIA GPU on Windows and Linux systems.
|
||||
## Common Issues & Solutions
|
||||
|
||||
<ol>
|
||||
### Jan Won't Start (Broken Installation)
|
||||
|
||||
### Step 1: Verify Hardware and System Requirements
|
||||
If Jan gets stuck after installation or won't start properly:
|
||||
|
||||
#### 1.1. Check GPU Detection
|
||||
First, verify that your system recognizes the NVIDIA GPU:
|
||||
**Windows:**
|
||||
- Right-click desktop → NVIDIA Control Panel
|
||||
- Or check Device Manager → Display Adapters
|
||||
**Linux:**
|
||||
```
|
||||
lspci | grep -i nvidia
|
||||
```
|
||||
#### 1.2. Install Required components
|
||||
**NVIDIA Driver:**
|
||||
1. Install [NVIDIA Driver](https://www.nvidia.com/en-us/drivers/) for your GPU (NVIDIA driver **470.63.01 or higher**).
|
||||
2. Verify installation:
|
||||
<Tabs>
|
||||
<TabItem label="macOS">
|
||||
|
||||
```
|
||||
nvidia-smi
|
||||
```
|
||||
Expected output should show your GPU model and driver version.
|
||||
**Clean Reinstall Steps:**
|
||||
|
||||
**CUDA Toolkit:**
|
||||
1. Download and install [CUDA toolkit](https://developer.nvidia.com/cuda-downloads) (**CUDA 11.7 or higher**)
|
||||
2. Verify installation:
|
||||
1. **Uninstall Jan** from Applications folder
|
||||
|
||||
2. **Delete all Jan data:**
|
||||
```bash
|
||||
rm -rf ~/Library/Application\ Support/Jan
|
||||
```
|
||||
nvcc --version
|
||||
|
||||
3. **Kill any background processes** (for versions before 0.4.2):
|
||||
```bash
|
||||
ps aux | grep nitro
|
||||
# Find process IDs and kill them:
|
||||
kill -9 <PID>
|
||||
```
|
||||
|
||||
4. **Download fresh copy** from [jan.ai](/download)
|
||||
|
||||
</TabItem>
|
||||
<TabItem label="Windows">
|
||||
|
||||
**Clean Reinstall Steps:**
|
||||
|
||||
1. **Uninstall Jan** via Control Panel
|
||||
|
||||
2. **Delete application data:**
|
||||
```cmd
|
||||
cd C:\Users\%USERNAME%\AppData\Roaming
|
||||
rmdir /S Jan
|
||||
```
|
||||
|
||||
3. **Kill background processes** (for versions before 0.4.2):
|
||||
```cmd
|
||||
# Find nitro processes
|
||||
tasklist | findstr "nitro"
|
||||
# Kill them by PID
|
||||
taskkill /F /PID <PID>
|
||||
```
|
||||
|
||||
4. **Download fresh copy** from [jan.ai](/download)
|
||||
|
||||
</TabItem>
|
||||
<TabItem label="Linux">
|
||||
|
||||
**Clean Reinstall Steps:**
|
||||
|
||||
1. **Uninstall Jan:**
|
||||
```bash
|
||||
# For Debian/Ubuntu
|
||||
sudo apt-get remove jan
|
||||
|
||||
# For AppImage - just delete the file
|
||||
```
|
||||
|
||||
2. **Delete application data:**
|
||||
```bash
|
||||
# Default location
|
||||
rm -rf ~/.config/Jan
|
||||
|
||||
# Or custom location
|
||||
rm -rf $XDG_CONFIG_HOME/Jan
|
||||
```
|
||||
|
||||
3. **Kill background processes** (for versions before 0.4.2):
|
||||
```bash
|
||||
ps aux | grep nitro
|
||||
kill -9 <PID>
|
||||
```
|
||||
|
||||
4. **Download fresh copy** from [jan.ai](/download)
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
<Aside type="note">
|
||||
Make sure Jan is completely removed from all user accounts before reinstalling.
|
||||
</Aside>
|
||||
|
||||
### NVIDIA GPU Not Working
|
||||
|
||||
If Jan isn't using your NVIDIA graphics card for acceleration:
|
||||
|
||||
#### Step 1: Verify Hardware and System Requirements
|
||||
|
||||
**Check GPU Detection:**
|
||||
|
||||
*Windows:* Right-click desktop → NVIDIA Control Panel, or check Device Manager → Display Adapters
|
||||
|
||||
*Linux:* Run `lspci | grep -i nvidia`
|
||||
|
||||
**Install Required Software:**
|
||||
|
||||
**NVIDIA Driver (470.63.01 or newer):**
|
||||
1. Download from [nvidia.com/drivers](https://www.nvidia.com/drivers/)
|
||||
2. Test: Run `nvidia-smi` in terminal
|
||||
|
||||
**CUDA Toolkit (11.7 or newer):**
|
||||
1. Download from [CUDA Downloads](https://developer.nvidia.com/cuda-downloads)
|
||||
2. Test: Run `nvcc --version`
|
||||
|
||||
**Linux Additional Requirements:**
|
||||
1. Required packages are installed:
|
||||
```
|
||||
sudo apt update
|
||||
sudo apt install gcc-11 g++-11 cpp-11
|
||||
```
|
||||
See [detailed instructions](https://gcc.gnu.org/projects/cxx-status.html#cxx17).
|
||||
```bash
|
||||
# Install required packages
|
||||
sudo apt update && sudo apt install gcc-11 g++-11 cpp-11
|
||||
|
||||
2. Set up CUDA environment:
|
||||
```
|
||||
# Set CUDA environment
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda/lib64
|
||||
```
|
||||
See [detailed instructions](https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html#post-installation-actions).
|
||||
|
||||
<Aside type="info">
|
||||
Ensure your (V)RAM is accessible; some users with virtual RAM may require additional configuration.
|
||||
</Aside>
|
||||
#### Step 2: Enable GPU Acceleration in Jan
|
||||
|
||||
### Step 2: Turn on GPU acceleration
|
||||
|
||||
Jan manages GPU usage automatically:
|
||||
- Switches to GPU mode when supported
|
||||
- Automatically selects GPU with highest VRAM
|
||||
|
||||
To verify GPU acceleration is turned on:
|
||||
1. Open **Settings** > **Hardware**
|
||||
2. Verify that **GPU Acceleration** is turned on
|
||||
3. Verify your selected GPU(s) are visible in **System Monitor** from Jan's footer
|
||||
2. Turn on **GPU Acceleration**
|
||||
3. Check **System Monitor** (footer) to verify GPU is detected
|
||||
|
||||
<br/>
|
||||

|
||||
<br/>
|
||||
|
||||
#### Step 3: Verify Configuration
|
||||
|
||||
### Step 3: GPU Settings Check
|
||||
1. Go to **Settings** > **Advanced Settings** > **Data Folder**
|
||||
2. Open `settings.json` file
|
||||
3. Check these settings:
|
||||
|
||||
1. Go to **Settings** > **General** > **Data Folder**
|
||||
2. Click on **Open Containing Folder**
|
||||
3. Open `settings.json` file
|
||||
|
||||
Example `settings.json`:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"notify": true,
|
||||
"run_mode": "gpu",
|
||||
"run_mode": "gpu", // Should be "gpu"
|
||||
"nvidia_driver": {
|
||||
"exist": true,
|
||||
"exist": true, // Should be true
|
||||
"version": "531.18"
|
||||
},
|
||||
"cuda": {
|
||||
"exist": true,
|
||||
"exist": true, // Should be true
|
||||
"version": "12"
|
||||
},
|
||||
"gpus": [
|
||||
{
|
||||
"id": "0",
|
||||
"vram": "12282"
|
||||
},
|
||||
{
|
||||
"id": "1",
|
||||
"vram": "6144"
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"vram": "6144"
|
||||
"vram": "12282" // Your GPU memory in MB
|
||||
}
|
||||
],
|
||||
"gpu_highest_vram": "0"
|
||||
]
|
||||
}
|
||||
```
|
||||
**Key Configuration Values:**
|
||||
- `run_mode`: Should be "gpu" for GPU acceleration
|
||||
- `nvidia_driver`: Shows driver status and version
|
||||
- `cuda`: Shows CUDA toolkit status and version
|
||||
- `gpus`: Lists available GPUs and their VRAM (in MB)
|
||||
- `gpu_highest_vram`: ID of GPU with most VRAM
|
||||
|
||||
#### Step 4: Restart Jan
|
||||
|
||||
Close and restart Jan to apply changes.
|
||||
|
||||
### Step 4: Restart Jan
|
||||
#### Tested Working Configurations
|
||||
|
||||
Restart Jan to make sure it works.
|
||||
**Desktop Systems:**
|
||||
- Windows 11 + RTX 4070Ti + CUDA 12.2 + Driver 531.18
|
||||
- Ubuntu 22.04 + RTX 4070Ti + CUDA 12.2 + Driver 545
|
||||
|
||||
</ol>
|
||||
**Virtual Machines:**
|
||||
- Ubuntu on Proxmox + GTX 1660Ti + CUDA 12.1 + Driver 535
|
||||
|
||||
### Tested Configurations
|
||||
These configurations have been verified to work with Jan's GPU acceleration. You can use them as reference points for your setup.
|
||||
<Aside type="note">
|
||||
Desktop installations perform better than virtual machines. VMs need proper GPU passthrough setup.
|
||||
</Aside>
|
||||
|
||||
**Bare Metal Installations**
|
||||
### "Failed to Fetch" or "Something's Amiss" Errors
|
||||
|
||||
Windows 11 Pro (64-bit)
|
||||
| Component | Version/Model |
|
||||
|-----------|--------------|
|
||||
| GPU | NVIDIA GeForce RTX 4070Ti |
|
||||
| CUDA | 12.2 |
|
||||
| NVIDIA Driver | 531.18 |
|
||||
| OS | Windows 11 Pro 64-bit |
|
||||
| RAM | 32GB |
|
||||
When models won't respond or show these errors:
|
||||
|
||||
Ubuntu 22.04 LTS
|
||||
| Component | Version/Model |
|
||||
|-----------|--------------|
|
||||
| GPU | NVIDIA GeForce RTX 4070Ti |
|
||||
| CUDA | 12.2 |
|
||||
| NVIDIA Driver | 545 |
|
||||
| OS | Ubuntu 22.04 LTS |
|
||||
**1. Check System Requirements**
|
||||
- **RAM:** Use models under 80% of available memory
|
||||
- 8GB system: Use models under 6GB
|
||||
- 16GB system: Use models under 13GB
|
||||
- **Hardware:** Verify your system meets [minimum requirements](/docs/desktop/)
|
||||
|
||||
**Virtual Machine Setups**
|
||||
**2. Adjust Model Settings**
|
||||
- Open model settings in the chat sidebar
|
||||
- Lower the **GPU Layers (ngl)** setting
|
||||
- Start low and increase gradually
|
||||
|
||||
Ubuntu on Proxmox VM
|
||||
| Component | Version/Model |
|
||||
|-----------|--------------|
|
||||
| GPU | NVIDIA GeForce GTX 1660Ti |
|
||||
| CUDA | 12.1 |
|
||||
| NVIDIA Driver | 535 |
|
||||
| OS | Ubuntu 20.04/18.04 LTS |
|
||||
| VM Type | Proxmox |
|
||||
|
||||
**Performance Notes**
|
||||
- Bare metal installations provide better performance
|
||||
- VM setups require proper GPU passthrough configuration
|
||||
- Some laptop GPUs may have reduced performance
|
||||
- Hybrid graphics (Optimus) may need additional configuration
|
||||
|
||||
## Permission Denied
|
||||
|
||||
When running Jan, you might encounter the following error message:
|
||||
|
||||
```
|
||||
Uncaught (in promise) Error: Error invoking layout-480796bff433a3a3.js:538 remote method 'installExtension':
|
||||
Error Package /Applications/Jan.app/Contents/Resources/app.asar.unpacked/pre-install/janhq-assistant-extension-1.0.0.tgz does not contain a valid manifest:
|
||||
Error EACCES: permission denied, mkdtemp '/Users/username/.npm/_cacache/tmp/ueCMn4'
|
||||
```
|
||||
|
||||
Permission problems mainly cause this error during installation. To resolve this issue, follow these steps:
|
||||
|
||||
1. Open your **Terminal**
|
||||
|
||||
2. Execute the following command to change ownership of the `~/.npm` directory to the current user:
|
||||
**3. Check Port Conflicts**
|
||||
If logs show "Bind address failed":
|
||||
|
||||
```bash
|
||||
sudo chown -R $(whoami) ~/.npm
|
||||
```
|
||||
|
||||
This command ensures that the necessary permissions are granted for Jan's installation.
|
||||
|
||||
|
||||
## "Failed to fetch" or "Something's Amiss" errors
|
||||
|
||||
When you start a chat with a model and encounter a **Failed to Fetch** or **Something's Amiss** error, here are some possible solutions to resolve it:
|
||||
|
||||
**1. Check System & Hardware Requirements**
|
||||
- Hardware dependencies: Ensure your device meets all [hardware requirements](docs/troubleshooting#step-1-verify-hardware-and-system-requirements)
|
||||
- OS: Ensure your operating system meets the minimum requirements ([Mac](/docs/desktop/mac#minimum-requirements), [Windows](/docs/desktop/windows#compatibility), [Linux](docs/desktop/linux#compatibility))
|
||||
- RAM: Choose models that use less than 80% of your available RAM
|
||||
- For 8GB systems: Use models under 6GB
|
||||
- For 16GB systems: Use models under 13GB
|
||||
|
||||
**2. Check Model Parameters**
|
||||
- In **Engine Settings** in right sidebar, check your `ngl` ([number of GPU layers](/docs/models/model-parameters#engine-parameters)) setting to see if it's too high
|
||||
- Start with a lower NGL value and increase gradually based on your GPU memory
|
||||
|
||||
**3. Port Conflicts**
|
||||
|
||||
If you check your [app logs](/docs/troubleshooting#how-to-get-error-logs) & see "Bind address failed at 127.0.0.1:39291", check port availability:
|
||||
```
|
||||
# Mac
|
||||
netstat -an | grep 39291
|
||||
# Check if ports are in use
|
||||
# macOS/Linux
|
||||
netstat -an | grep 1337
|
||||
|
||||
# Windows
|
||||
netstat -ano | find "39291"
|
||||
tasklist /fi "PID eq 39291"
|
||||
|
||||
# Linux
|
||||
netstat -anpe | grep "39291"
|
||||
netstat -ano | find "1337"
|
||||
```
|
||||
<Aside type="info">
|
||||
`Netstat` displays the contents of various network-related data structures for active connections.
|
||||
|
||||
**Default Jan ports:**
|
||||
- API Server: `1337`
|
||||
- Documentation: `3001`
|
||||
|
||||
**4. Try Factory Reset**
|
||||
1. **Settings** > **Advanced Settings**
|
||||
2. Click **Reset** under "Reset To Factory Settings"
|
||||
|
||||
<Aside type="caution">
|
||||
This deletes all chat history, models, and settings.
|
||||
</Aside>
|
||||
|
||||
Default Jan ports:
|
||||
- Jan and Cortex API Server: `1337`
|
||||
- Jan Documentation: `3001`
|
||||
**5. Clean Reinstall**
|
||||
If problems persist, do a complete clean installation (see "Jan Won't Start" section above).
|
||||
|
||||
**4. Factory Reset**
|
||||
### Permission Denied Errors
|
||||
|
||||
A factory reset can resolve persistent issues by returning Jan to its original state. This will remove all custom settings, downloaded models, and chat history.
|
||||
1. Go to **Settings** > **Advanced Settings**
|
||||
2. At **Reset To Factory Settings**, click **Reset**
|
||||
If you see permission errors during installation:
|
||||
|
||||
<Aside type="warning">
|
||||
This will delete all chat history, models, and settings.
|
||||
</Aside>
|
||||
|
||||
**5. Try a clean installation**
|
||||
- Uninstall Jan & clean Jan data folders ([Mac](/docs/desktop/mac#uninstall-jan), [Windows](/docs/desktop/windows#uninstall-jan), [Linux](docs/desktop/linux#uninstall-jan))
|
||||
- Install the latest [stable release](/download)
|
||||
|
||||
<Aside type="warning">
|
||||
This will delete all your Jan data.
|
||||
</Aside>
|
||||
|
||||
## OpenAI Unexpected Token Issue
|
||||
The "Unexpected token" error usually relates to OpenAI API authentication or regional restrictions.
|
||||
|
||||
**Step 1: API Key Sepup**
|
||||
1. Get a valid API key from [OpenAI's developer platform](https://platform.openai.com/)
|
||||
2. Ensure the key has sufficient credits & appropriate permissions
|
||||
|
||||
**Step 2: Regional Access**
|
||||
1. If you're in a region with restricted access, use a VPN service from a supported region
|
||||
2. Verify your network can reach OpenAI's API endpoints
|
||||
|
||||
|
||||
## Need Further Support?
|
||||
If you can't find what you need in our troubleshooting guide, feel free reach out to us for extra help:
|
||||
- **Copy** your [app logs](/docs/troubleshooting#how-to-get-error-logs)
|
||||
- Go to our [Discord](https://discord.com/invite/FTk2MvZwJH) & send it to **#🆘|jan-help** channel for further support.
|
||||
|
||||
|
||||
<Aside type="info">
|
||||
Check the logs to ensure the information is what you intend to send. We retain your logs for only **24 hours**, so report any issues promptly.
|
||||
```bash
|
||||
# Fix npm permissions (macOS/Linux)
|
||||
sudo chown -R $(whoami) ~/.npm
|
||||
|
||||
# Windows - run as administrator
|
||||
```
|
||||
|
||||
### OpenAI API Issues ("Unexpected Token")
|
||||
|
||||
For OpenAI connection problems:
|
||||
|
||||
**1. Verify API Key**
|
||||
- Get valid key from [OpenAI Platform](https://platform.openai.com/)
|
||||
- Ensure sufficient credits and permissions
|
||||
|
||||
**2. Check Regional Access**
|
||||
- Some regions have API restrictions
|
||||
- Try using a VPN from a supported region
|
||||
- Test network connectivity to OpenAI endpoints
|
||||
|
||||
### Performance Issues
|
||||
|
||||
**Models Running Slowly:**
|
||||
- Enable GPU acceleration (see NVIDIA section)
|
||||
- Use appropriate model size for your hardware
|
||||
- Close other memory-intensive applications
|
||||
- Check Task Manager/Activity Monitor for resource usage
|
||||
|
||||
**High Memory Usage:**
|
||||
- Switch to smaller model variants
|
||||
- Reduce context length in model settings
|
||||
- Enable model offloading in engine settings
|
||||
|
||||
**Frequent Crashes:**
|
||||
- Update graphics drivers
|
||||
- Check system temperature
|
||||
- Reduce GPU layers if using GPU acceleration
|
||||
- Verify adequate power supply (desktop systems)
|
||||
|
||||
## Need More Help?
|
||||
|
||||
If these solutions don't work:
|
||||
|
||||
**1. Gather Information:**
|
||||
- Copy your error logs (see top of this page)
|
||||
- Note your system specifications
|
||||
- Describe what you were trying to do when the problem occurred
|
||||
|
||||
**2. Get Community Support:**
|
||||
- Join our [Discord](https://discord.com/invite/FTk2MvZwJH)
|
||||
- Post in the **#🆘|jan-help** channel
|
||||
- Include your logs and system info
|
||||
|
||||
**3. Check Resources:**
|
||||
- [System requirements](./installation)
|
||||
- [Model compatibility guides](./manage-models)
|
||||
- [Hardware setup guides](./installation)
|
||||
|
||||
<Aside type="note">
|
||||
When sharing logs, remove personal information first. We only keep logs for 24 hours, so report issues promptly.
|
||||
</Aside>
|
||||
|
||||
@ -1,16 +0,0 @@
|
||||
---
|
||||
title: Creative Writing
|
||||
description: Download models and manage your conversations with AI models locally.
|
||||
keywords:
|
||||
[
|
||||
Jan,
|
||||
local AI,
|
||||
LLM,
|
||||
chat,
|
||||
threads,
|
||||
models,
|
||||
download,
|
||||
installation,
|
||||
conversations,
|
||||
]
|
||||
---
|
||||
@ -1,17 +0,0 @@
|
||||
---
|
||||
title: Translation
|
||||
description: Download models and manage your conversations with AI models locally.
|
||||
keywords:
|
||||
[
|
||||
Jan,
|
||||
local AI,
|
||||
LLM,
|
||||
chat,
|
||||
threads,
|
||||
models,
|
||||
translation,
|
||||
download,
|
||||
installation,
|
||||
conversations,
|
||||
]
|
||||
---
|
||||
@ -15,13 +15,10 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
import { Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
import { Steps } from '@astrojs/starlight/components';
|
||||
import { Aside, Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
|
||||
# Jan Data Folder
|
||||
|
||||
Jan stores all your data locally on your computer. No cloud storage, no external servers - everything stays on your machine.
|
||||
Jan stores all your data locally on your computer. No cloud storage, no external servers -
|
||||
everything stays on your machine.
|
||||
|
||||
## Quick Access
|
||||
|
||||
@ -45,6 +42,7 @@ Jan stores all your data locally on your computer. No cloud storage, no external
|
||||
```
|
||||
</TabItem>
|
||||
<TabItem label="Linux">
|
||||
|
||||
```bash
|
||||
# Default installation
|
||||
~/.config/Jan/data
|
||||
@ -52,7 +50,9 @@ Jan stores all your data locally on your computer. No cloud storage, no external
|
||||
# Custom installation
|
||||
$XDG_CONFIG_HOME/Jan/data
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
|
||||
</Tabs>
|
||||
|
||||
## Monitoring Server Logs
|
||||
@ -141,7 +141,7 @@ Configuration files that define how your AI assistants behave, including their i
|
||||
- Standard user-level permissions
|
||||
- No elevated access required
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
When using cloud AI services through Jan, those conversations follow the cloud provider's data policies. Local model conversations never leave your computer.
|
||||
</Aside>
|
||||
|
||||
@ -176,4 +176,4 @@ If you need to completely remove Jan and all data:
|
||||
Detailed uninstall guides:
|
||||
- [macOS](/docs/desktop/mac#step-2-clean-up-data-optional)
|
||||
- [Windows](/docs/desktop/windows#step-2-handle-jan-data)
|
||||
- [Linux](/docs/desktop/linux#uninstall-jan)
|
||||
- [Linux](/docs/desktop/linux#uninstall-jan)
|
||||
|
||||
@ -18,7 +18,6 @@ keywords:
|
||||
---
|
||||
|
||||
import { Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
import { Steps } from '@astrojs/starlight/components';
|
||||
|
||||
## Integrate with Continue VS Code
|
||||
|
||||
@ -26,29 +25,17 @@ import { Steps } from '@astrojs/starlight/components';
|
||||
|
||||
To integrate Jan with a local AI language model, follow the steps below:
|
||||
|
||||
<ol>
|
||||
1. **Installing Continue on Visual Studio Code**
|
||||
|
||||
Follow this [guide](https://continue.dev/docs/quickstart) to install the Continue extension on Visual Studio Code.
|
||||
|
||||
- Follow this [guide](https://continue.dev/docs/quickstart) to install the Continue extension on Visual Studio Code.
|
||||
2. **Enable the Jan API Server**
|
||||
|
||||
To set up Continue for use with Jan's Local Server, you must activate the Jan API Server with your chosen model.
|
||||
|
||||
1. Press the `⚙️ Settings` button.
|
||||
|
||||
2. Locate `Local API Server`.
|
||||
|
||||
3. Setup the server, which includes the **IP Port**, **Cross-Origin-Resource-Sharing (CORS)** and **Verbose Server Logs**.
|
||||
|
||||
4. Include your user-defined API Key.
|
||||
|
||||
5. Press the **Start Server** button
|
||||
|
||||
3. **Configure Continue to Use Jan's Local Server**
|
||||
|
||||
1. Go to the `~/.continue` directory.
|
||||
|
||||
<Tabs>
|
||||
<TabItem label="Mac">
|
||||
```bash
|
||||
@ -86,20 +73,16 @@ To integrate Jan with a local AI language model, follow the steps below:
|
||||
- provider: folder
|
||||
- provider: codebase
|
||||
```
|
||||
|
||||
2. Ensure the file has the following configurations:
|
||||
- Ensure `openai` is selected as the `provider`.
|
||||
- Match the `model` with the one enabled in the Jan API Server.
|
||||
- Set `apiBase` to `http://localhost:1337/v1`.
|
||||
|
||||
4. **Ensure the Using Model Is Activated in Jan**
|
||||
|
||||
1. Navigate to `Settings` > `Model Providers`.
|
||||
2. Under Llama.cpp, find the model that you would want to use.
|
||||
3. Select the **Start Model** button to activate the model.
|
||||
</ol>
|
||||
|
||||
## How to Use Jan Integration with Continue in Visual Studio Code
|
||||
## Use Jan with Continue in Visual Studio Code
|
||||
|
||||
### 1. Exploring Code with Jan
|
||||
|
||||
|
||||
@ -18,10 +18,7 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
import { Steps } from '@astrojs/starlight/components';
|
||||
|
||||
# llmcord (Discord)
|
||||
import { Aside, Steps } from '@astrojs/starlight/components';
|
||||
|
||||
## Integrate llmcord.py with Jan
|
||||
|
||||
@ -29,35 +26,26 @@ import { Steps } from '@astrojs/starlight/components';
|
||||
|
||||
To integrate Jan with llmcord.py, follow the steps below:
|
||||
|
||||
<ol>
|
||||
<Steps>
|
||||
|
||||
1. **Clone the Repository**
|
||||
|
||||
Clone the discord bot's [repository](https://github.com/jakobdylanc/discord-llm-chatbot) by using the following command:
|
||||
```bash
|
||||
git clone https://github.com/jakobdylanc/discord-llm-chatbot.git
|
||||
```
|
||||
|
||||
2. **Install the Required Libraries**
|
||||
|
||||
After cloning the repository, run the following command:
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
<Aside type="info">
|
||||
A valid Python installation is required.
|
||||
</Aside>
|
||||
|
||||
3. **Set the Environment**
|
||||
|
||||
1. Create a copy of `.env.example`.
|
||||
2. Change the name to `.env`.
|
||||
3. Set the environment with the following options:
|
||||
|
||||
| Setting | Instructions |
|
||||
| ----------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| :----------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `DISCORD_BOT_TOKEN` | Create a new Discord bot at [discord.com/developers/applications](https://discord.com/developers/applications), obtain a token from the Bot tab, and enable MESSAGE CONTENT INTENT. |
|
||||
| `DISCORD_CLIENT_ID` | Found under the OAuth2 tab of the Discord bot you just made. |
|
||||
| `LLM` | For Jan, set to `local/openai/(MODEL_NAME)`, where `(MODEL_NAME)` is your loaded model's name. |
|
||||
@ -65,15 +53,11 @@ To integrate Jan with llmcord.py, follow the steps below:
|
||||
| `LOCAL_SERVER_URL` | URL of your local API server. For Jan, set it to `http://localhost:1337/v1`. |
|
||||
|
||||
For more configuration options, refer to llmcord.py's [README](https://github.com/jakobdylanc/discord-llm-chatbot/tree/main?tab=readme-ov-file#instructions).
|
||||
|
||||
4. **Run the Bot**
|
||||
|
||||
Run the bot by using the following command in your command prompt:
|
||||
|
||||
```bash
|
||||
python llmcord.py
|
||||
```
|
||||
|
||||
The bot's invite URL will be printed in the console. Use it to add the bot to your server.
|
||||
|
||||
</ol>
|
||||
</Steps>
|
||||
|
||||
@ -19,34 +19,26 @@ description: A step-by-step guide on integrating Jan with n8n.
|
||||
|
||||
import { Steps } from '@astrojs/starlight/components';
|
||||
|
||||
# n8n
|
||||
|
||||
## Integrate n8n with Jan
|
||||
|
||||
[n8n](https://n8n.io/) is an open-source workflow automation tool that allows you to connect to more than 400+ integrations and services to automate repetitive tasks. With its visual interface, you can create complex workflows conveniently. To integrate n8n with Jan, follow the steps below:
|
||||
|
||||
<ol>
|
||||
<Steps>
|
||||
1. **Run your preferred model with Jan server**
|
||||
|
||||
1. Open Jan app.
|
||||
2. Go to the **Hub** and download your preferred model
|
||||
3. Run the Jan server
|
||||
|
||||
3. Run the Jan server
|
||||
2. **Start n8n service**
|
||||
Start n8n immediately using npx:
|
||||
|
||||
```
|
||||
```sh
|
||||
npx n8n
|
||||
```
|
||||
|
||||
Or deploy with Docker:
|
||||
|
||||
```
|
||||
```sh
|
||||
docker run -it --rm --name n8n -p 5678:5678 docker.n8n.io/n8nio/n8n
|
||||
```
|
||||
|
||||
3. **Integrate Jan with n8n service using HTTP Request**
|
||||
|
||||
Integrate Jan by selecting the HTTP Request node in n8n and importing the following cURL command:
|
||||
|
||||
```bash
|
||||
@ -77,4 +69,4 @@ import { Steps } from '@astrojs/starlight/components';
|
||||
"top_p": 0.95
|
||||
}'
|
||||
```
|
||||
</ol>
|
||||
</Steps>
|
||||
|
||||
@ -18,11 +18,8 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
import { Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
import { Steps } from '@astrojs/starlight/components';
|
||||
|
||||
# Tabby
|
||||
|
||||
## Integrate Jan with Tabby and Your Favorite IDEs
|
||||
|
||||
[Tabby](https://www.tabbyml.com/) is an open-source, self-hosted AI coding assistant.
|
||||
@ -33,56 +30,46 @@ and it can be used with various LLM services, including Jan.
|
||||
|
||||
To integrate Jan with Tabby, follow these steps:
|
||||
|
||||
<ol>
|
||||
<Steps>
|
||||
|
||||
1. **Enable the Jan API Server**
|
||||
|
||||
To set up Tabby with Jan's Local Server, you must activate the Jan API Server with your chosen model.
|
||||
|
||||
1. Click the `Local API Server` (`<>`) button above the Settings. Jan will direct you to the **Local API Server** section.
|
||||
2. Configure the server, including the **IP Port**, **Cross-Origin Resource Sharing (CORS)**, and **Verbose Server Logs**.
|
||||
3. Press the **Start Server** button.
|
||||
|
||||
2. **Find the Model ID and Ensure the Model is Activated**
|
||||
|
||||
1. Go to `Settings` > `My Models`.
|
||||
2. Models are listed with their **Model ID** beneath their names.
|
||||
3. Click the **three dots (⋮)** button next to the model.
|
||||
4. Select **Start Model** to activate the model.
|
||||
|
||||
3. **Installing Tabby Server**
|
||||
|
||||
Use the following documentation to install the Tabby server:
|
||||
- [Docker](https://tabby.tabbyml.com/docs/quick-start/installation/docker/)
|
||||
- [Apple Silicon](https://tabby.tabbyml.com/docs/quick-start/installation/apple/)
|
||||
- [Linux](https://tabby.tabbyml.com/docs/quick-start/installation/linux/)
|
||||
- [Windows](https://tabby.tabbyml.com/docs/quick-start/installation/windows/)
|
||||
|
||||
Then, follow the steps to connect Jan with the Tabby server:
|
||||
[Connect Jan with Tabby](https://tabby.tabbyml.com/docs/references/models-http-api/jan.ai/).
|
||||
|
||||
For example, to connect Jan with Tabby, save the following configuration under `~/.tabby/config.toml`:
|
||||
|
||||
```toml title="~/.tabby/config.toml"
|
||||
# Chat model
|
||||
```toml
|
||||
# ~/.tabby/config.toml
|
||||
[model.chat.http]
|
||||
kind = "openai/chat"
|
||||
model_name = "model_id"
|
||||
api_endpoint = "http://localhost:1337/v1"
|
||||
api_key = ""
|
||||
```
|
||||
|
||||
Currently, the Jan completion and embedding API is under construction.
|
||||
Once completed, you can also connect Jan with Tabby for completion and embedding tasks.
|
||||
|
||||
4. **Installing Tabby on Your Favorite IDEs**
|
||||
|
||||
Refer to the following documentation to install the Tabby extension on your favorite IDEs:
|
||||
- [Visual Studio Code](https://tabby.tabbyml.com/docs/extensions/installation/vscode/)
|
||||
- [JetBrains IntelliJ Platform](https://tabby.tabbyml.com/docs/extensions/installation/intellij/)
|
||||
- [VIM / NeoVIM](https://tabby.tabbyml.com/docs/extensions/installation/vim/)
|
||||
|
||||
</ol>
|
||||
</Steps>
|
||||
|
||||
## How to Use Tabby with Jan Integration
|
||||
|
||||
@ -98,4 +85,4 @@ Simply open the Tabby homepage at [localhost:8080](http://localhost:8080) and as
|
||||
After installing the Tabby extension on your preferred IDEs, you can engage in a conversation with Jan to:
|
||||
|
||||
1. Discuss your code, receive suggestions, and seek assistance.
|
||||
2. Request Jan to inline edit your code, and then review and accept the proposed changes.
|
||||
2. Request Jan to inline edit your code, and then review and accept the proposed changes.
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
---
|
||||
title: llama.cpp Engine
|
||||
title: llama.cpp Server
|
||||
description: Configure Jan's local AI engine for optimal performance.
|
||||
keywords:
|
||||
[
|
||||
@ -15,13 +15,10 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
import { Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
import { Steps } from '@astrojs/starlight/components';
|
||||
import { Aside, Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
|
||||
# Local AI Engine (llama.cpp)
|
||||
|
||||
llama.cpp is the engine that runs AI models locally on your computer. It's what makes Jan work without needing internet or cloud services.
|
||||
llama.cpp is the engine that runs AI models locally on your computer. It's what makes Jan work without
|
||||
needing internet or cloud services.
|
||||
|
||||
## Accessing Engine Settings
|
||||
|
||||
@ -29,7 +26,7 @@ Find llama.cpp settings at **Settings** (⚙️) > **Local Engine** > **llama.cp
|
||||
|
||||

|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Most users don't need to change these settings. Jan picks good defaults for your hardware automatically.
|
||||
</Aside>
|
||||
|
||||
@ -94,7 +91,7 @@ Different backends are optimized for different hardware. Pick the one that match
|
||||
### Intel Macs
|
||||
- `llama.cpp-mac-amd64`
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
Apple Silicon automatically uses GPU acceleration through Metal.
|
||||
</Aside>
|
||||
|
||||
@ -162,6 +159,6 @@ Apple Silicon automatically uses GPU acceleration through Metal.
|
||||
2. Use appropriate backend for hardware
|
||||
3. Monitor memory usage
|
||||
|
||||
<Aside type="info">
|
||||
<Aside type="note">
|
||||
The default settings work well for most hardware. Only adjust these if you're experiencing specific issues or want to optimize for your particular setup.
|
||||
</Aside>
|
||||
</Aside>
|
||||
|
||||
@ -1,328 +0,0 @@
|
||||
---
|
||||
title: Troubleshooting
|
||||
description: Fix common issues and optimize Jan's performance with this comprehensive guide.
|
||||
keywords:
|
||||
[
|
||||
Jan,
|
||||
troubleshooting,
|
||||
error fixes,
|
||||
performance issues,
|
||||
GPU problems,
|
||||
installation issues,
|
||||
common errors,
|
||||
local AI,
|
||||
technical support,
|
||||
]
|
||||
---
|
||||
|
||||
import { Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
import { Aside } from '@astrojs/starlight/components';
|
||||
import { Steps } from '@astrojs/starlight/components';
|
||||
|
||||
# Troubleshooting
|
||||
|
||||
## Getting Help: Error Logs
|
||||
|
||||
When Jan isn't working properly, error logs help identify the problem. Here's how to get them:
|
||||
|
||||
### Quick Access to Logs
|
||||
|
||||
**In Jan Interface:**
|
||||
1. Look for **System Monitor** in the footer
|
||||
2. Click **App Log**
|
||||
|
||||

|
||||
|
||||
**Via Terminal:**
|
||||
```bash
|
||||
# macOS/Linux
|
||||
tail -n 50 ~/Library/Application\ Support/Jan/data/logs/app.log
|
||||
|
||||
# Windows
|
||||
type %APPDATA%\Jan\data\logs\app.log
|
||||
```
|
||||
|
||||
<Aside type="caution">
|
||||
Remove any personal information before sharing logs. We only keep logs for 24 hours.
|
||||
</Aside>
|
||||
|
||||
## Common Issues & Solutions
|
||||
|
||||
### Jan Won't Start (Broken Installation)
|
||||
|
||||
If Jan gets stuck after installation or won't start properly:
|
||||
|
||||
<Tabs>
|
||||
<TabItem label="macOS">
|
||||
|
||||
**Clean Reinstall Steps:**
|
||||
|
||||
1. **Uninstall Jan** from Applications folder
|
||||
|
||||
2. **Delete all Jan data:**
|
||||
```bash
|
||||
rm -rf ~/Library/Application\ Support/Jan
|
||||
```
|
||||
|
||||
3. **Kill any background processes** (for versions before 0.4.2):
|
||||
```bash
|
||||
ps aux | grep nitro
|
||||
# Find process IDs and kill them:
|
||||
kill -9 <PID>
|
||||
```
|
||||
|
||||
4. **Download fresh copy** from [jan.ai](/download)
|
||||
|
||||
</TabItem>
|
||||
<TabItem label="Windows">
|
||||
|
||||
**Clean Reinstall Steps:**
|
||||
|
||||
1. **Uninstall Jan** via Control Panel
|
||||
|
||||
2. **Delete application data:**
|
||||
```cmd
|
||||
cd C:\Users\%USERNAME%\AppData\Roaming
|
||||
rmdir /S Jan
|
||||
```
|
||||
|
||||
3. **Kill background processes** (for versions before 0.4.2):
|
||||
```cmd
|
||||
# Find nitro processes
|
||||
tasklist | findstr "nitro"
|
||||
# Kill them by PID
|
||||
taskkill /F /PID <PID>
|
||||
```
|
||||
|
||||
4. **Download fresh copy** from [jan.ai](/download)
|
||||
|
||||
</TabItem>
|
||||
<TabItem label="Linux">
|
||||
|
||||
**Clean Reinstall Steps:**
|
||||
|
||||
1. **Uninstall Jan:**
|
||||
```bash
|
||||
# For Debian/Ubuntu
|
||||
sudo apt-get remove jan
|
||||
|
||||
# For AppImage - just delete the file
|
||||
```
|
||||
|
||||
2. **Delete application data:**
|
||||
```bash
|
||||
# Default location
|
||||
rm -rf ~/.config/Jan
|
||||
|
||||
# Or custom location
|
||||
rm -rf $XDG_CONFIG_HOME/Jan
|
||||
```
|
||||
|
||||
3. **Kill background processes** (for versions before 0.4.2):
|
||||
```bash
|
||||
ps aux | grep nitro
|
||||
kill -9 <PID>
|
||||
```
|
||||
|
||||
4. **Download fresh copy** from [jan.ai](/download)
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
<Aside type="info">
|
||||
Make sure Jan is completely removed from all user accounts before reinstalling.
|
||||
</Aside>
|
||||
|
||||
### NVIDIA GPU Not Working
|
||||
|
||||
If Jan isn't using your NVIDIA graphics card for acceleration:
|
||||
|
||||
<ol>
|
||||
|
||||
1. **Check Your Hardware Setup**
|
||||
|
||||
**Verify GPU Detection:**
|
||||
|
||||
*Windows:* Right-click desktop → NVIDIA Control Panel, or check Device Manager → Display Adapters
|
||||
|
||||
*Linux:* Run `lspci | grep -i nvidia`
|
||||
|
||||
**Install Required Software:**
|
||||
|
||||
**NVIDIA Driver (470.63.01 or newer):**
|
||||
1. Download from [nvidia.com/drivers](https://www.nvidia.com/drivers/)
|
||||
2. Test: Run `nvidia-smi` in terminal
|
||||
|
||||
**CUDA Toolkit (11.7 or newer):**
|
||||
1. Download from [CUDA Downloads](https://developer.nvidia.com/cuda-downloads)
|
||||
2. Test: Run `nvcc --version`
|
||||
|
||||
**Linux Additional Requirements:**
|
||||
```bash
|
||||
# Install required packages
|
||||
sudo apt update && sudo apt install gcc-11 g++-11 cpp-11
|
||||
|
||||
# Set CUDA environment
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda/lib64
|
||||
```
|
||||
|
||||
2. **Enable GPU Acceleration in Jan**
|
||||
|
||||
1. Open **Settings** > **Hardware**
|
||||
2. Turn on **GPU Acceleration**
|
||||
3. Check **System Monitor** (footer) to verify GPU is detected
|
||||
|
||||

|
||||
|
||||
3. **Verify Configuration**
|
||||
|
||||
1. Go to **Settings** > **Advanced Settings** > **Data Folder**
|
||||
2. Open `settings.json` file
|
||||
3. Check these settings:
|
||||
|
||||
```json
|
||||
{
|
||||
"run_mode": "gpu", // Should be "gpu"
|
||||
"nvidia_driver": {
|
||||
"exist": true, // Should be true
|
||||
"version": "531.18"
|
||||
},
|
||||
"cuda": {
|
||||
"exist": true, // Should be true
|
||||
"version": "12"
|
||||
},
|
||||
"gpus": [
|
||||
{
|
||||
"id": "0",
|
||||
"vram": "12282" // Your GPU memory in MB
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
4. **Restart Jan**
|
||||
|
||||
Close and restart Jan to apply changes.
|
||||
|
||||
</ol>
|
||||
|
||||
#### Tested Working Configurations
|
||||
|
||||
**Desktop Systems:**
|
||||
- Windows 11 + RTX 4070Ti + CUDA 12.2 + Driver 531.18
|
||||
- Ubuntu 22.04 + RTX 4070Ti + CUDA 12.2 + Driver 545
|
||||
|
||||
**Virtual Machines:**
|
||||
- Ubuntu on Proxmox + GTX 1660Ti + CUDA 12.1 + Driver 535
|
||||
|
||||
<Aside type="info">
|
||||
Desktop installations perform better than virtual machines. VMs need proper GPU passthrough setup.
|
||||
</Aside>
|
||||
|
||||
### "Failed to Fetch" or "Something's Amiss" Errors
|
||||
|
||||
When models won't respond or show these errors:
|
||||
|
||||
**1. Check System Requirements**
|
||||
- **RAM:** Use models under 80% of available memory
|
||||
- 8GB system: Use models under 6GB
|
||||
- 16GB system: Use models under 13GB
|
||||
- **Hardware:** Verify your system meets [minimum requirements](/docs/troubleshooting#step-1-verify-hardware-and-system-requirements)
|
||||
|
||||
**2. Adjust Model Settings**
|
||||
- Open model settings in the chat sidebar
|
||||
- Lower the **GPU Layers (ngl)** setting
|
||||
- Start low and increase gradually
|
||||
|
||||
**3. Check Port Conflicts**
|
||||
If logs show "Bind address failed":
|
||||
|
||||
```bash
|
||||
# Check if ports are in use
|
||||
# macOS/Linux
|
||||
netstat -an | grep 1337
|
||||
|
||||
# Windows
|
||||
netstat -ano | find "1337"
|
||||
```
|
||||
|
||||
**Default Jan ports:**
|
||||
- API Server: `1337`
|
||||
- Documentation: `3001`
|
||||
|
||||
**4. Try Factory Reset**
|
||||
1. **Settings** > **Advanced Settings**
|
||||
2. Click **Reset** under "Reset To Factory Settings"
|
||||
|
||||
<Aside type="caution">
|
||||
This deletes all chat history, models, and settings.
|
||||
</Aside>
|
||||
|
||||
**5. Clean Reinstall**
|
||||
If problems persist, do a complete clean installation (see "Jan Won't Start" section above).
|
||||
|
||||
### Permission Denied Errors
|
||||
|
||||
If you see permission errors during installation:
|
||||
|
||||
```bash
|
||||
# Fix npm permissions (macOS/Linux)
|
||||
sudo chown -R $(whoami) ~/.npm
|
||||
|
||||
# Windows - run as administrator
|
||||
```
|
||||
|
||||
### OpenAI API Issues ("Unexpected Token")
|
||||
|
||||
For OpenAI connection problems:
|
||||
|
||||
**1. Verify API Key**
|
||||
- Get valid key from [OpenAI Platform](https://platform.openai.com/)
|
||||
- Ensure sufficient credits and permissions
|
||||
|
||||
**2. Check Regional Access**
|
||||
- Some regions have API restrictions
|
||||
- Try using a VPN from a supported region
|
||||
- Test network connectivity to OpenAI endpoints
|
||||
|
||||
### Performance Issues
|
||||
|
||||
**Models Running Slowly:**
|
||||
- Enable GPU acceleration (see NVIDIA section)
|
||||
- Use appropriate model size for your hardware
|
||||
- Close other memory-intensive applications
|
||||
- Check Task Manager/Activity Monitor for resource usage
|
||||
|
||||
**High Memory Usage:**
|
||||
- Switch to smaller model variants
|
||||
- Reduce context length in model settings
|
||||
- Enable model offloading in engine settings
|
||||
|
||||
**Frequent Crashes:**
|
||||
- Update graphics drivers
|
||||
- Check system temperature
|
||||
- Reduce GPU layers if using GPU acceleration
|
||||
- Verify adequate power supply (desktop systems)
|
||||
|
||||
## Need More Help?
|
||||
|
||||
If these solutions don't work:
|
||||
|
||||
**1. Gather Information:**
|
||||
- Copy your error logs (see top of this page)
|
||||
- Note your system specifications
|
||||
- Describe what you were trying to do when the problem occurred
|
||||
|
||||
**2. Get Community Support:**
|
||||
- Join our [Discord](https://discord.com/invite/FTk2MvZwJH)
|
||||
- Post in the **#🆘|jan-help** channel
|
||||
- Include your logs and system info
|
||||
|
||||
**3. Check Resources:**
|
||||
- [System requirements](/docs/troubleshooting#step-1-verify-hardware-and-system-requirements)
|
||||
- [Model compatibility guides](/docs/manage-models)
|
||||
- [Hardware setup guides](/docs/desktop/)
|
||||
|
||||
<Aside type="info">
|
||||
When sharing logs, remove personal information first. We only keep logs for 24 hours, so report issues promptly.
|
||||
</Aside>
|
||||
35
website/src/content/docs/mobile/index.mdx
Normal file
35
website/src/content/docs/mobile/index.mdx
Normal file
@ -0,0 +1,35 @@
|
||||
---
|
||||
title: Jan Mobile
|
||||
description: Your AI assistant, on the go. Get ready for a seamless mobile experience with local and cloud capabilities.
|
||||
keywords:
|
||||
[
|
||||
Jan Mobile,
|
||||
Jan AI,
|
||||
mobile AI,
|
||||
local AI on phone,
|
||||
private AI app,
|
||||
iOS,
|
||||
Android,
|
||||
offline AI,
|
||||
ChatGPT alternative mobile
|
||||
]
|
||||
banner:
|
||||
content: 'Coming Q4 2025: Jan Mobile is currently in development.'
|
||||
---
|
||||
import { Aside, Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
## Your AI, Everywhere
|
||||
|
||||
Jan Mobile brings the full power of a private, local-first AI to your iOS and Android devices. Connect to your home desktop, your company's server, or run models directly on your phone for complete offline privacy.
|
||||
|
||||
<Aside type="note">
|
||||
**Jan Mobile is not yet available.** We are working hard to bring you a native, privacy-focused AI assistant for your phone.
|
||||
</Aside>
|
||||
|
||||
The goal is a seamless experience that adapts to your environment without requiring you to change settings.
|
||||
|
||||
### Core Features Planned:
|
||||
- **Three Connection Modes**: Seamlessly switch between Local, Desktop, and Server modes.
|
||||
- **Offline Capability**: Run `Jan Nano` or other small models directly on your device.
|
||||
- **Voice-First Interface**: Interact with your AI naturally through voice commands.
|
||||
- **Privacy by Design**: End-to-end encryption and full control over your data.
|
||||
@ -1,121 +0,0 @@
|
||||
---
|
||||
title: Jan's Product Vision
|
||||
description: AI that runs where you need it, how you need it
|
||||
sidebar:
|
||||
order: 0
|
||||
banner:
|
||||
content: 👋Jan is OPEN SUPERINTELLIGENCE that you can Self Host! You can connect to the 🌍 while staying local. 👀
|
||||
---
|
||||
|
||||
import { Aside, Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
|
||||
Jan is evolving from a local AI application to a complete full-stack AI solution that you can
|
||||
self-host. This includes models, applications, and tools to solve real problems.
|
||||
|
||||
## What We're Building
|
||||
|
||||
**Jan Agent** = Jan Models + Jan Application + Jan Tools
|
||||
|
||||
Unlike other AI assistants that do specific tasks with one model or have many models with
|
||||
a myriad of solutions, Jan provides:
|
||||
- Our own models optimized for local and private use. As of now, we have Jan Nano in 2 flavors and
|
||||
Lucy but more are coming soon
|
||||
- Applications that work across all your devices
|
||||
- Tools that actually get things done
|
||||
|
||||
## Two Modes, One Experience
|
||||
|
||||
### Local (Incognito) Mode
|
||||
This mode allows you to run AI models entirely on your device, giving you complete
|
||||
privacy with no internet required.
|
||||
|
||||
### Cloud Mode
|
||||
Connect to more powerful models when needed - either self-hosted or via jan.ai.
|
||||
|
||||
<Aside type="tip" title="We understand that...">
|
||||
Users shouldn't need to understand models, APIs, or technical details. Just choose Local for privacy or Cloud for power.
|
||||
</Aside>
|
||||
|
||||
## Available on Every Device
|
||||
|
||||
<CardGrid>
|
||||
<Card title="Desktop/Laptop" icon="laptop">
|
||||
**Available Now**
|
||||
- Runs models locally
|
||||
- Optional cloud connection
|
||||
- Powers other devices
|
||||
</Card>
|
||||
|
||||
<Card title="Server" icon="bars">
|
||||
**Coming Soon**
|
||||
- Self-hosted for teams
|
||||
- 5-10 concurrent users
|
||||
- Your own private cloud
|
||||
</Card>
|
||||
|
||||
<Card title="Mobile" icon="phone">
|
||||
**In Development**
|
||||
- Connect to Desktop/Server
|
||||
- Local mode with Jan Nano
|
||||
- Same experience everywhere
|
||||
</Card>
|
||||
|
||||
<Card title="jan.ai Web" icon="up-arrow">
|
||||
**Beta Launch Soon**
|
||||
- SaaS version of Jan Server
|
||||
- Default for mobile/desktop cloud mode
|
||||
- No setup required
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Our Product Principles
|
||||
|
||||
### It Just Works
|
||||
1. Open Jan, start chatting.
|
||||
2. Onboarding is fully available but optional.
|
||||
3. Setting up an API key is optional
|
||||
4. Selecting a local model is optional.
|
||||
|
||||
> We handle the complexity.
|
||||
|
||||
### Privacy First, Cloud When Needed
|
||||
Start with complete privacy by default. Add cloud capabilities only when you choose to.
|
||||
|
||||
### Solve Problems, Not Settings
|
||||
Users want answers, not configuration options. Power users can dig deeper, but it's never required.
|
||||
|
||||
## What Makes Jan Different
|
||||
|
||||
| Feature | Other AI Assistants | Jan |
|
||||
|---------|---------------------|-----|
|
||||
| Models | Wrapper around Claude/GPT | Our own models + others |
|
||||
| Dual mode | Your data on their servers | Your data stays yours |
|
||||
| Deployment | Cloud only | Local, self-hosted, or cloud |
|
||||
| Cost | Subscription forever | Free locally, pay for cloud |
|
||||
|
||||
## The Roadmap Simplified
|
||||
|
||||
### Today
|
||||
- Desktop app with local models
|
||||
- Basic cloud connections
|
||||
- Developer-friendly API
|
||||
|
||||
### Next 6 Months
|
||||
- Simplified Local/Cloud modes
|
||||
- jan.ai cloud service
|
||||
- Mobile apps
|
||||
- Self-hosted server
|
||||
|
||||
### Future Vision
|
||||
- Complete AI Agent platform
|
||||
- Compete directly with Claude/ChatGPT
|
||||
- Open superintelligence
|
||||
|
||||
<Aside type="tip">
|
||||
We're building AI that respects your choices. Run it locally for privacy, connect to cloud for power, or self-host for both.
|
||||
</Aside>
|
||||
|
||||
---
|
||||
|
||||
[Download Jan Desktop](/download) | [Try jan.ai (Beta)](/beta) | [Documentation](/docs)
|
||||
@ -1,194 +0,0 @@
|
||||
---
|
||||
title: Jan V1
|
||||
description: Our own family of AI models, not another wrapper
|
||||
sidebar:
|
||||
order: 1
|
||||
---
|
||||
|
||||
import { Aside, Card, CardGrid, Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
|
||||
Jan V1 is our own model family designed to compete directly with Claude and GPT-4. We're not just fine-tuning someone else's work - we're building models that solve real problems.
|
||||
|
||||
## Why Jan V1 Matters
|
||||
|
||||
Most AI applications are just wrappers around Claude or OpenAI. We're different. Jan V1 models are:
|
||||
- Trained by us for real-world use cases
|
||||
- Optimized to run locally or in the cloud
|
||||
- Designed for both power and privacy
|
||||
|
||||
<Aside type="note">
|
||||
Jan V1 models power everything in the Jan ecosystem. When you use Jan, you're using our models.
|
||||
</Aside>
|
||||
|
||||
## Model Lineup
|
||||
|
||||
| Model | Size | Best For | Availability |
|
||||
|-------|------|----------|--------------|
|
||||
| Jan V1-7B | 4-8GB | Quick tasks, older hardware | Now |
|
||||
| Jan V1-13B | 8-16GB | Daily use, good balance | Now |
|
||||
| Jan V1-70B | 40-64GB | Professional work | Now |
|
||||
| Jan V1-180B | 100GB+ | Research, complex tasks | Coming 2026 |
|
||||
|
||||
## Three Ways to Run
|
||||
|
||||
<Tabs syncKey="deployment-mode">
|
||||
<TabItem label="Local">
|
||||
Run on your own hardware for complete privacy.
|
||||
|
||||
```bash
|
||||
# In Jan Desktop
|
||||
# Models download automatically when needed
|
||||
```
|
||||
|
||||
**Requirements:**
|
||||
- 7B: Any modern computer
|
||||
- 13B: 16GB RAM
|
||||
- 70B: 64GB RAM + GPU
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="jan.ai Cloud">
|
||||
Access via our API for maximum convenience.
|
||||
|
||||
```python
|
||||
# Same API, cloud power
|
||||
response = client.chat.completions.create(
|
||||
model="jan-v1-70b",
|
||||
messages=[...]
|
||||
)
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- No hardware requirements
|
||||
- Always latest version
|
||||
- Scale as needed
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="Self-Hosted">
|
||||
Deploy on your infrastructure for teams.
|
||||
|
||||
```yaml
|
||||
# Your server, your rules
|
||||
jan-server:
|
||||
model: jan-v1-70b
|
||||
users: 50
|
||||
gpu: A100
|
||||
```
|
||||
|
||||
**Perfect for:**
|
||||
- Compliance requirements
|
||||
- Team collaboration
|
||||
- Custom deployments
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
## What Makes V1 Different
|
||||
|
||||
### Actually Understands Context
|
||||
Jan V1 maintains conversation context better than most open models. No more repeating yourself every few messages.
|
||||
|
||||
### Trained for Real Work
|
||||
- Writing that sounds human
|
||||
- Code that actually runs
|
||||
- Analysis that makes sense
|
||||
- Answers that help
|
||||
|
||||
### Optimized for Deployment
|
||||
- Quantized versions (Q4, Q5, Q8) for different needs
|
||||
- Hardware acceleration support
|
||||
- Efficient memory usage
|
||||
- Fast inference
|
||||
|
||||
<Aside type="tip">
|
||||
Most users should start with V1-13B. It's the sweet spot of performance and hardware requirements.
|
||||
</Aside>
|
||||
|
||||
## Performance Reality
|
||||
|
||||
| Task | V1-7B | V1-13B | V1-70B | GPT-3.5 | GPT-4 |
|
||||
|------|-------|--------|--------|---------|-------|
|
||||
| General Chat | Good | Great | Excellent | Great | Excellent |
|
||||
| Coding | Basic | Good | Great | Good | Excellent |
|
||||
| Analysis | Basic | Good | Excellent | Good | Excellent |
|
||||
| Speed (local) | Very Fast | Fast | Slower | N/A | N/A |
|
||||
| Privacy | Complete | Complete | Complete | None | None |
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
<CardGrid>
|
||||
<Card title="Professional Writing" icon="document">
|
||||
V1-13B and above handle emails, reports, and documentation with natural language.
|
||||
</Card>
|
||||
|
||||
<Card title="Code Assistant" icon="code">
|
||||
All V1 models understand code. Larger models can handle complex refactoring.
|
||||
</Card>
|
||||
|
||||
<Card title="Research & Analysis" icon="chart">
|
||||
V1-70B excels at synthesizing information and drawing insights.
|
||||
</Card>
|
||||
|
||||
<Card title="Customer Support" icon="support">
|
||||
V1 models can be fine-tuned for your specific domain and terminology.
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## For Developers
|
||||
|
||||
### Local Inference
|
||||
```python
|
||||
# Runs on your machine
|
||||
from jan import Client
|
||||
client = Client(base_url="http://localhost:1337")
|
||||
|
||||
response = client.chat.completions.create(
|
||||
model="jan-v1-13b",
|
||||
messages=[{"role": "user", "content": "Explain async/await"}]
|
||||
)
|
||||
```
|
||||
|
||||
### Fine-Tuning
|
||||
```python
|
||||
# Make it yours
|
||||
jan.finetune(
|
||||
base_model="jan-v1-13b",
|
||||
dataset="your-data.jsonl",
|
||||
output="custom-model"
|
||||
)
|
||||
```
|
||||
|
||||
### Model Switching
|
||||
```python
|
||||
# Use the right tool for the job
|
||||
simple_query → "jan-v1-7b" # Fast
|
||||
normal_work → "jan-v1-13b" # Balanced
|
||||
complex_task → "jan-v1-70b" # Powerful
|
||||
```
|
||||
|
||||
## The Future
|
||||
|
||||
### V1 Series Roadmap
|
||||
- **Now**: 7B, 13B, 70B models
|
||||
- **2025**: Improved versions with better tool use
|
||||
- **2026**: 180B+ models competing with GPT-4
|
||||
- **Beyond**: Agentic capabilities built-in
|
||||
|
||||
### Our Commitment
|
||||
We're building models that:
|
||||
- Respect user privacy
|
||||
- Run where you need them
|
||||
- Solve real problems
|
||||
- Keep improving
|
||||
|
||||
<Aside type="caution">
|
||||
Jan V1 models are not toys. They're production-ready AI that happens to respect your privacy.
|
||||
</Aside>
|
||||
|
||||
## Why We Built This
|
||||
|
||||
Every major AI lab keeps their best models locked in the cloud. We believe you should be able to run competitive AI on your own terms. Jan V1 is our answer to that belief.
|
||||
|
||||
We're not trying to win benchmarks. We're trying to build AI that actually helps.
|
||||
|
||||
---
|
||||
|
||||
[Try Jan V1](https://jan.ai/download) | [Model Benchmarks](https://jan.ai/benchmarks) | [API Documentation](https://jan.ai/docs/api)
|
||||
@ -1,162 +0,0 @@
|
||||
---
|
||||
title: jan.ai
|
||||
description: Cloud AI that respects your privacy
|
||||
sidebar:
|
||||
order: 1
|
||||
---
|
||||
|
||||
import { Aside, Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
jan.ai is our cloud service that brings powerful AI to your browser and powers Cloud Mode in Jan Desktop and Mobile.
|
||||
|
||||
## What is jan.ai?
|
||||
|
||||
jan.ai is the hosted version of Jan Server. Same models, same experience, but running in the cloud when you need more power than your device can provide.
|
||||
|
||||
<Aside type="note">
|
||||
Currently in beta. Join the waitlist at [jan.ai/beta](https://jan.ai/beta).
|
||||
</Aside>
|
||||
|
||||
## How It Works
|
||||
|
||||
<CardGrid>
|
||||
<Card title="For Desktop Users" icon="laptop">
|
||||
When you switch to Cloud Mode in Jan Desktop, it connects to jan.ai automatically. No configuration needed.
|
||||
</Card>
|
||||
|
||||
<Card title="For Mobile Users" icon="mobile">
|
||||
jan.ai is the default cloud provider for Jan Mobile when not connected to your desktop.
|
||||
</Card>
|
||||
|
||||
<Card title="For Web Users" icon="globe">
|
||||
Visit jan.ai directly in your browser for instant access to AI without downloading anything.
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Key Features
|
||||
|
||||
### No Setup Required
|
||||
Open jan.ai, start chatting. No API keys, no credit cards, no account required for basic use.
|
||||
|
||||
### Privacy-Respecting Cloud
|
||||
- Anonymous usage by default
|
||||
- No training on your data
|
||||
- Encrypted connections
|
||||
- Auto-delete options
|
||||
|
||||
### Powerful Models
|
||||
Access our best models without needing expensive hardware:
|
||||
- **Jan V1-70B**: Our most capable model
|
||||
- **Jan V1-13B**: Fast and efficient
|
||||
- **Jan Nano**: Quick responses for simple tasks
|
||||
|
||||
### Seamless Integration
|
||||
Works perfectly with:
|
||||
- Jan Desktop (Cloud Mode)
|
||||
- Jan Mobile
|
||||
- API access for developers
|
||||
- Browser-based chat
|
||||
|
||||
## Pricing
|
||||
|
||||
| Tier | Features | Price |
|
||||
|------|----------|--------|
|
||||
| Free | 50 queries/day, Jan Nano & V1-13B | $0 |
|
||||
| Pro | Unlimited queries, all models, priority | Coming Soon |
|
||||
| Team | Multiple users, admin controls | Coming Soon |
|
||||
|
||||
<Aside type="tip">
|
||||
Free tier is actually free. No trial period, no credit card required.
|
||||
</Aside>
|
||||
|
||||
## Why Use jan.ai?
|
||||
|
||||
### When Local Isn't Enough
|
||||
- Need more powerful models than your device can run
|
||||
- Want to access your AI from any device
|
||||
- Require faster responses for complex tasks
|
||||
|
||||
### But Still Want Privacy
|
||||
- No account required for basic use
|
||||
- Anonymous by default
|
||||
- Your data isn't used for training
|
||||
- Clear data deletion policies
|
||||
|
||||
### Without the Hassle
|
||||
- No API keys to manage
|
||||
- No complex pricing calculators
|
||||
- No surprise bills
|
||||
- Just AI that works
|
||||
|
||||
## For Developers
|
||||
|
||||
### API Access
|
||||
```javascript
|
||||
// Same API as local Jan
|
||||
const response = await fetch('https://api.jan.ai/v1/chat/completions', {
|
||||
headers: { 'Authorization': 'Bearer YOUR_KEY' },
|
||||
body: JSON.stringify({
|
||||
model: 'jan-v1-70b',
|
||||
messages: [{ role: 'user', content: 'Hello' }]
|
||||
})
|
||||
});
|
||||
```
|
||||
|
||||
### OpenAI Compatible
|
||||
Drop-in replacement for OpenAI API:
|
||||
```python
|
||||
# Just change the base URL
|
||||
client = OpenAI(
|
||||
base_url="https://api.jan.ai/v1",
|
||||
api_key="your-jan-key"
|
||||
)
|
||||
```
|
||||
|
||||
## Common Questions
|
||||
|
||||
### How is this different from ChatGPT?
|
||||
- We don't train on your data
|
||||
- Anonymous usage available
|
||||
- Same experience as local Jan
|
||||
- You own your conversations
|
||||
|
||||
### Can I use my own models?
|
||||
Not on jan.ai. For custom models, use Jan Desktop or self-host Jan Server.
|
||||
|
||||
### Is it really private?
|
||||
More private than most cloud AI:
|
||||
- Optional accounts
|
||||
- No behavioral tracking
|
||||
- Encrypted everything
|
||||
- Regular data purges
|
||||
|
||||
### When will it fully launch?
|
||||
Beta starts Q3 2025. Full launch Q4 2025.
|
||||
|
||||
## Coming Soon
|
||||
|
||||
### Beta Features
|
||||
- Basic chat interface
|
||||
- API access
|
||||
- Jan Desktop/Mobile integration
|
||||
|
||||
### Full Launch
|
||||
- Team accounts
|
||||
- Advanced tools (search, browser use)
|
||||
- Model customization
|
||||
- Usage analytics (your own data)
|
||||
|
||||
<Aside type="note">
|
||||
jan.ai is designed to feel exactly like local Jan, just faster. If you notice the difference (besides speed), we've failed.
|
||||
</Aside>
|
||||
|
||||
## The Philosophy
|
||||
|
||||
Cloud AI doesn't have to mean giving up control. jan.ai proves you can have:
|
||||
- Convenience without surveillance
|
||||
- Power without privacy invasion
|
||||
- Simplicity without lock-in
|
||||
|
||||
---
|
||||
|
||||
[Join Beta Waitlist](https://jan.ai/beta) | [Pricing Details](https://jan.ai/pricing) | [API Documentation](https://jan.ai/docs/api)
|
||||
@ -1,160 +0,0 @@
|
||||
---
|
||||
title: Jan Mobile
|
||||
description: Your AI assistant on the go
|
||||
sidebar:
|
||||
order: 3
|
||||
---
|
||||
|
||||
import { Aside, Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
Jan Mobile brings the same AI experience to your phone. Connect to your desktop, your server, or run models locally.
|
||||
|
||||
|
||||

|
||||
|
||||
|
||||
<Aside type="note">
|
||||
Coming Q4 2025. Sign up here to get a notification when it's available [Newsletter](https://jan.ai/mobile).
|
||||
</Aside>
|
||||
|
||||
## How It Works
|
||||
|
||||
Jan Mobile adapts to your situation:
|
||||
|
||||
At Home, you can connect to your Jan Desktop over WiFi
|
||||
|
||||

|
||||
|
||||
At Work, you can connect to your company Jan Server
|
||||
|
||||

|
||||
|
||||
On the Go, you can run Jan Nano on your phone or talk to your favourite cloud-based model
|
||||
|
||||

|
||||
|
||||
**No configuration needed. It just works.**
|
||||
|
||||
## Three Modes, One Experience
|
||||
|
||||
### Desktop Mode
|
||||
When you're near your computer at home, you an toggle phone uses its models and processing power.
|
||||
|
||||
```
|
||||
Your Phone → WiFi → Your Desktop → Response
|
||||
(automatic) (powerful models)
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- Access to larger models
|
||||
- Faster processing
|
||||
- Shared conversations
|
||||
- No phone battery drain
|
||||
|
||||
### Server Mode
|
||||
Connect to your organization's Jan Server for team collaboration.
|
||||
|
||||
```
|
||||
Your Phone → Internet → Company Server → Response
|
||||
(secure) (shared models)
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- Team knowledge base
|
||||
- Consistent models
|
||||
- Central management
|
||||
- Work anywhere
|
||||
|
||||
### Local Mode
|
||||
No connection? No problem. Jan Nano runs directly on your phone.
|
||||
|
||||
```
|
||||
Your Phone → Jan Nano (6GB) → Response
|
||||
(private & offline)
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- Complete privacy
|
||||
- Works offline
|
||||
- No data usage
|
||||
- Always available
|
||||
|
||||
<Aside type="tip">
|
||||
The app automatically switches between modes based on what's available. You don't need to do anything.
|
||||
</Aside>
|
||||
|
||||
## Key Features
|
||||
|
||||
<Card title="Seamless Switching" icon="random">
|
||||
Move from home to office to airplane. One-click and Jan adapts immediately.
|
||||
|
||||

|
||||
</Card>
|
||||
|
||||
<Card title="Voice First" icon="phone">
|
||||
Talk to Jan naturally. Responses can be spoken too.
|
||||
|
||||

|
||||
</Card>
|
||||
|
||||
<Card title="Sync Everything" icon="up-arrow">
|
||||
Conversations, settings, and preferences follow you across devices.
|
||||

|
||||
</Card>
|
||||
|
||||
## Privacy & Security
|
||||
|
||||
### Your Data, Your Control
|
||||
- Local Mode: Everything stays on your phone
|
||||
- Desktop Mode: Direct encrypted connection
|
||||
- Server Mode: Your organization's policies apply
|
||||
|
||||
### No Compromises
|
||||
- Biometric app lock
|
||||
- Encrypted storage
|
||||
- No cloud backups without permission
|
||||
- Clear data anytime
|
||||
|
||||
## Why Mobile Matters
|
||||
|
||||
Your phone is with you always. Your AI assistant should be too. But that doesn't mean sacrificing
|
||||
privacy or control.
|
||||
|
||||
Jan Mobile proves you can have:
|
||||
- Powerful AI anywhere
|
||||
- Complete privacy when needed
|
||||
- Simple experience always
|
||||
- No subscriptions or tracking
|
||||
|
||||
## Coming Features
|
||||
|
||||
### Launch (Q4 2025)
|
||||
- iOS and Android apps
|
||||
- Basic chat interface
|
||||
- Three connection modes
|
||||
|
||||
### Post-Launch
|
||||
- voice input
|
||||
- Background sync
|
||||
|
||||
<Aside type="note">
|
||||
Jan Mobile uses Tauri for native performance with a small app size (~50MB).
|
||||
</Aside>
|
||||
|
||||
## The Vision
|
||||
|
||||
Most mobile AI apps are just cloud wrappers. Jan Mobile is different:
|
||||
|
||||
| Feature | Other AI Apps | Jan Mobile |
|
||||
|---------|---------------|------------|
|
||||
| Offline Mode | ❌ | ✅ Jan Nano |
|
||||
| Desktop Connection | ❌ | ✅ Your models |
|
||||
| Privacy | Your data in cloud | Your data stays yours |
|
||||
| Cost | Monthly subscription | Free with your hardware |
|
||||
|
||||
## While you wait for Jan Mobile:
|
||||
|
||||
1. **Set up Jan Desktop** - It will power your mobile experience
|
||||
2. **Try jan.ai** - Get familiar with the interface
|
||||
3. **Join our newsletter** - Be first to know when it launches
|
||||
4. **Tell us what you need** - Shape the mobile experience
|
||||
@ -1,235 +0,0 @@
|
||||
---
|
||||
title: Jan Server
|
||||
description: Your own private AI cloud
|
||||
sidebar:
|
||||
order: 4
|
||||
---
|
||||
|
||||
import { Aside, Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
Jan Server is Jan Desktop with multi-user support. Deploy it on your hardware to create your own private AI cloud for your team or organization.
|
||||
|
||||
<Aside type="note">
|
||||
Coming soon. Join the early access list at [jan.ai/server](https://jan.ai/server).
|
||||
</Aside>
|
||||
|
||||
## What is Jan Server?
|
||||
|
||||
```
|
||||
Jan Server = Jan Desktop + Multi-user support + Real hardware
|
||||
```
|
||||
|
||||
It's the same engine that powers Jan Desktop, scaled up for teams. Your data stays on your servers, your models run on your GPUs, your AI remains yours.
|
||||
|
||||
## Why Organizations Need This
|
||||
|
||||
### The Problem
|
||||
Every API call to ChatGPT or Claude is:
|
||||
- Your intellectual property leaving your network
|
||||
- Potential training data for someone else's model
|
||||
- A compliance nightmare waiting to happen
|
||||
- A monthly bill that never ends
|
||||
|
||||
### The Solution
|
||||
Jan Server gives you:
|
||||
- **Complete control**: Your hardware, your rules
|
||||
- **Total privacy**: Nothing leaves your network
|
||||
- **Predictable costs**: One-time hardware investment
|
||||
- **Compliance ready**: GDPR, HIPAA, SOC2 friendly
|
||||
|
||||
## Deployment Options
|
||||
|
||||
<CardGrid>
|
||||
<Card title="Small Team (5-10 users)" icon="users">
|
||||
**Hardware**: Single RTX 6000 Ada (48GB)
|
||||
**RAM**: 128GB
|
||||
**Models**: Up to 70B parameters
|
||||
**Cost**: ~$15k one-time
|
||||
</Card>
|
||||
|
||||
<Card title="Department (10-50 users)" icon="building">
|
||||
**Hardware**: 2-4 GPU nodes
|
||||
**RAM**: 256GB per node
|
||||
**Models**: Multiple concurrent
|
||||
**Cost**: ~$50-100k one-time
|
||||
</Card>
|
||||
|
||||
<Card title="Enterprise (50+ users)" icon="corporate">
|
||||
**Hardware**: DGX cluster
|
||||
**RAM**: As needed
|
||||
**Models**: Full range
|
||||
**Cost**: Custom quote
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Simple Deployment
|
||||
|
||||
### Docker (Recommended)
|
||||
```yaml
|
||||
version: '3'
|
||||
services:
|
||||
jan-server:
|
||||
image: jan.ai/server:latest
|
||||
ports:
|
||||
- "80:80"
|
||||
- "1337:1337"
|
||||
volumes:
|
||||
- ./models:/models
|
||||
- ./data:/data
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- capabilities: [gpu]
|
||||
```
|
||||
|
||||
### Kubernetes
|
||||
For larger deployments with auto-scaling and high availability.
|
||||
|
||||
### Bare Metal
|
||||
For maximum performance and custom configurations.
|
||||
|
||||
## Key Features
|
||||
|
||||
### Multi-User Management
|
||||
- Individual accounts and API keys
|
||||
- Usage tracking and quotas
|
||||
- Model access controls
|
||||
- Team collaboration
|
||||
|
||||
### Same API as Desktop
|
||||
```python
|
||||
# Your code doesn't change
|
||||
client = OpenAI(
|
||||
base_url="https://jan.company.internal/v1",
|
||||
api_key="user-specific-key"
|
||||
)
|
||||
```
|
||||
|
||||
### Model Governance
|
||||
- Control which models are available
|
||||
- Set user permissions
|
||||
- Monitor usage
|
||||
- Ensure compliance
|
||||
|
||||
<Aside type="tip">
|
||||
Jan Server uses the same models as Desktop. No special "enterprise" versions with inflated prices.
|
||||
</Aside>
|
||||
|
||||
## Real Deployments
|
||||
|
||||
| Use Case | Setup | Result |
|
||||
|----------|-------|---------|
|
||||
| Law Firm | 2x RTX 6000, 200 users | Client data never leaves network |
|
||||
| Hospital | DGX node, 500 users | HIPAA compliant AI assistant |
|
||||
| Tech Startup | 4x RTX 4090, 50 users | 90% cost reduction vs. OpenAI |
|
||||
| University | Multi-node cluster | Unrestricted research |
|
||||
|
||||
## Hardware Guide
|
||||
|
||||
### Minimum Requirements
|
||||
- **GPU**: RTX 3090 or better (24GB VRAM)
|
||||
- **CPU**: 16+ cores
|
||||
- **RAM**: 64GB minimum
|
||||
- **Storage**: 1TB NVMe SSD
|
||||
|
||||
### Recommended Setup
|
||||
- **GPU**: RTX 6000 Ada or A100
|
||||
- **CPU**: Dual socket Xeon/EPYC
|
||||
- **RAM**: 128-256GB
|
||||
- **Storage**: RAID NVMe array
|
||||
|
||||
### Scaling Considerations
|
||||
- 1 GPU can serve ~5-10 concurrent users
|
||||
- 70B models need 40-80GB VRAM
|
||||
- CPU inference possible for smaller models
|
||||
- Network: 10Gbps recommended
|
||||
|
||||
## Why Self-Host?
|
||||
|
||||
### For IT Teams
|
||||
- No data leaves your network
|
||||
- Complete audit trails
|
||||
- Integrate with existing auth (LDAP/AD)
|
||||
- Predictable resource usage
|
||||
|
||||
### For Security Teams
|
||||
- Air-gapped deployment options
|
||||
- End-to-end encryption
|
||||
- No third-party access
|
||||
- Full compliance control
|
||||
|
||||
### For Finance Teams
|
||||
- One-time hardware cost
|
||||
- No per-token pricing
|
||||
- Predictable TCO
|
||||
- Use existing infrastructure
|
||||
|
||||
## Coming Features
|
||||
|
||||
### Phase 1 (Launch)
|
||||
- Basic multi-user support
|
||||
- Web interface
|
||||
- API compatibility
|
||||
- Usage monitoring
|
||||
|
||||
### Phase 2 (Post-Launch)
|
||||
- Advanced governance
|
||||
- Fine-tuning interface
|
||||
- Automated scaling
|
||||
- Backup/restore
|
||||
|
||||
### Phase 3 (Future)
|
||||
- Federated deployments
|
||||
- Cross-region sync
|
||||
- Advanced analytics
|
||||
- Custom model training
|
||||
|
||||
<Aside type="caution">
|
||||
Jan Server requires proper cooling and power for GPU hardware. Plan your infrastructure accordingly.
|
||||
</Aside>
|
||||
|
||||
## Migration Path
|
||||
|
||||
### From Cloud AI
|
||||
1. Deploy Jan Server
|
||||
2. Import your workflows
|
||||
3. Update API endpoints
|
||||
4. Migrate users gradually
|
||||
|
||||
### From Jan Desktop
|
||||
1. Same models work instantly
|
||||
2. Add user management
|
||||
3. Scale as needed
|
||||
|
||||
## The Philosophy
|
||||
|
||||
We believe organizations should own their AI infrastructure just like they own their data. Jan Server makes this possible without compromising on capabilities.
|
||||
|
||||
This isn't about avoiding the cloud - it's about having a choice. Run your AI where it makes sense for your organization.
|
||||
|
||||
## Support Options
|
||||
|
||||
### Community Edition
|
||||
- Full features
|
||||
- Community support
|
||||
- Perfect for small teams
|
||||
|
||||
### Enterprise Edition
|
||||
- Priority support
|
||||
- Custom deployment help
|
||||
- SLA guarantees
|
||||
- Training included
|
||||
|
||||
## Get Started
|
||||
|
||||
Jan Server is coming soon. While you wait:
|
||||
|
||||
1. **Plan your hardware**: Check our requirements above
|
||||
2. **Join early access**: Get notified when available
|
||||
3. **Test with Desktop**: Same models, same experience
|
||||
4. **Prepare your team**: AI that respects your infrastructure
|
||||
|
||||
---
|
||||
|
||||
[Join Early Access](https://jan.ai/server) | [Hardware Guide](https://jan.ai/docs/server/hardware) | [Enterprise Contact](https://jan.ai/enterprise)
|
||||
@ -1,276 +0,0 @@
|
||||
---
|
||||
title: Browser Use
|
||||
description: Web automation capabilities for AI assistants
|
||||
sidebar:
|
||||
order: 2
|
||||
---
|
||||
|
||||
import { Aside, Card, CardGrid, Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
|
||||
|
||||
Browser Use enables Jan to interact with web pages programmatically. It can fill forms, extract data, and automate multi-step workflows while respecting site policies.
|
||||
|
||||
## Overview
|
||||
|
||||
Browser Use operates in two modes:
|
||||
|
||||
| Mode | Execution | Use Case | Requirements |
|
||||
|------|-----------|----------|--------------|
|
||||
| Local | Your device | Personal automation | Browser installed |
|
||||
| Cloud | Jan servers | Scheduled tasks | Internet connection |
|
||||
|
||||
## Capabilities
|
||||
|
||||
<CardGrid>
|
||||
<Card title="Form Automation" icon="document">
|
||||
- Fill application forms
|
||||
- Extract structured data
|
||||
- Handle multi-page flows
|
||||
- Screenshot verification
|
||||
</Card>
|
||||
|
||||
<Card title="Data Collection" icon="chart">
|
||||
- Price monitoring
|
||||
- Content aggregation
|
||||
- Competitor analysis
|
||||
- Research automation
|
||||
</Card>
|
||||
|
||||
<Card title="Task Automation" icon="rocket">
|
||||
- Appointment booking
|
||||
- Status checking
|
||||
- Report generation
|
||||
- Workflow execution
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Safety Boundaries
|
||||
|
||||
### Allowed Actions
|
||||
- Read public content
|
||||
- Fill non-financial forms
|
||||
- Click buttons and links
|
||||
- Take screenshots
|
||||
- Extract text and data
|
||||
- Handle JavaScript sites
|
||||
|
||||
### Restricted Actions
|
||||
- Payment processing
|
||||
- Financial transactions
|
||||
- CAPTCHA bypassing
|
||||
- Terms of Service violations
|
||||
- Credential harvesting
|
||||
- Malicious automation
|
||||
|
||||
<Aside type="caution">
|
||||
High-risk actions require explicit user confirmation. Browser Use will never complete financial transactions automatically.
|
||||
</Aside>
|
||||
|
||||
## Implementation
|
||||
|
||||
<Tabs syncKey="execution-mode">
|
||||
<TabItem label="Local Execution">
|
||||
Runs on your device using Playwright.
|
||||
|
||||
```javascript
|
||||
await jan.browser.use({
|
||||
task: "Fill job application",
|
||||
url: "https://careers.example.com",
|
||||
mode: "local",
|
||||
headless: false // Watch it work
|
||||
});
|
||||
```
|
||||
|
||||
**Advantages:**
|
||||
- Uses your sessions
|
||||
- Access internal tools
|
||||
- Complete privacy
|
||||
- Visual feedback
|
||||
|
||||
**Limitations:**
|
||||
- Uses local resources
|
||||
- Requires browser
|
||||
- Limited to one task
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="Cloud Execution">
|
||||
Runs on Jan's infrastructure.
|
||||
|
||||
```javascript
|
||||
await jan.browser.use({
|
||||
task: "Monitor prices daily",
|
||||
urls: ["site1.com", "site2.com"],
|
||||
mode: "cloud",
|
||||
schedule: "0 9 * * *"
|
||||
});
|
||||
```
|
||||
|
||||
**Advantages:**
|
||||
- Runs 24/7
|
||||
- Parallel execution
|
||||
- No local resources
|
||||
- Scheduled tasks
|
||||
|
||||
**Limitations:**
|
||||
- No personal accounts
|
||||
- Public sites only
|
||||
- Internet required
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
### Research Automation
|
||||
```javascript
|
||||
// Gather pricing from multiple vendors
|
||||
const prices = await jan.browser.use({
|
||||
task: "Extract pricing tables",
|
||||
urls: competitorUrls,
|
||||
extract: ["price", "features", "limits"]
|
||||
});
|
||||
```
|
||||
|
||||
### Form Processing
|
||||
```javascript
|
||||
// Fill repetitive applications
|
||||
await jan.browser.use({
|
||||
task: "Complete application",
|
||||
data: applicationData,
|
||||
confirmation: "review" // Stop before submit
|
||||
});
|
||||
```
|
||||
|
||||
### Monitoring
|
||||
```javascript
|
||||
// Track availability
|
||||
await jan.browser.use({
|
||||
task: "Check stock status",
|
||||
url: "https://store.example.com/product",
|
||||
notify: "when_available"
|
||||
});
|
||||
```
|
||||
|
||||
## Integration with Other Tools
|
||||
|
||||
### Search + Browser Use
|
||||
```
|
||||
User: "Book a restaurant for Saturday"
|
||||
Jan: [searches restaurants] → [checks availability] →
|
||||
[fills reservation form] → [confirms details]
|
||||
```
|
||||
|
||||
### Deep Research + Browser Use
|
||||
```
|
||||
User: "Compare SaaS pricing across competitors"
|
||||
Jan: [identifies competitors] → [visits pricing pages] →
|
||||
[extracts data] → [creates comparison]
|
||||
```
|
||||
|
||||
## Technical Architecture
|
||||
|
||||
### Execution Pipeline
|
||||
1. **Task Analysis**: Understand user intent
|
||||
2. **Site Navigation**: Load and interact with pages
|
||||
3. **Action Execution**: Click, type, select
|
||||
4. **Data Extraction**: Capture results
|
||||
5. **Verification**: Screenshot confirmation
|
||||
|
||||
### Performance Metrics
|
||||
| Operation | Local Time | Cloud Time | Success Rate |
|
||||
|-----------|------------|------------|--------------|
|
||||
| Page load | 1-3s | 2-4s | 95% |
|
||||
| Form fill | 5-10s | 8-15s | 90% |
|
||||
| Multi-page | 30-60s | 45-90s | 85% |
|
||||
| Data extract | 10-20s | 15-30s | 92% |
|
||||
|
||||
<Aside type="note">
|
||||
Success rates depend on site complexity and anti-automation measures. Simple sites have higher success rates.
|
||||
</Aside>
|
||||
|
||||
## Configuration
|
||||
|
||||
### Basic Settings
|
||||
```json
|
||||
{
|
||||
"browserUse": {
|
||||
"defaultMode": "local",
|
||||
"timeout": 60000,
|
||||
"retries": 3,
|
||||
"screenshotOnError": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced Options
|
||||
```json
|
||||
{
|
||||
"browserUse": {
|
||||
"userAgent": "Mozilla/5.0...",
|
||||
"viewport": { "width": 1920, "height": 1080 },
|
||||
"cloudWorkers": 5,
|
||||
"rateLimit": {
|
||||
"requests": 100,
|
||||
"window": "1h"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
| Error Type | Cause | Resolution |
|
||||
|------------|-------|------------|
|
||||
| Navigation failed | Site down/blocked | Retry or alternative site |
|
||||
| Element not found | Page structure changed | Update selectors |
|
||||
| Timeout | Slow site/connection | Increase timeout |
|
||||
| Blocked by site | Anti-bot measures | Use local mode |
|
||||
|
||||
## Privacy Considerations
|
||||
|
||||
### Local Mode
|
||||
- All data stays on device
|
||||
- Uses your browser profile
|
||||
- No external connections
|
||||
- Complete user control
|
||||
|
||||
### Cloud Mode
|
||||
- Tasks are anonymized
|
||||
- No personal data stored
|
||||
- Results encrypted
|
||||
- Auto-deletion after delivery
|
||||
|
||||
<Aside type="tip">
|
||||
For sensitive tasks, always use local mode. Cloud mode is best for public data collection and non-personal automation.
|
||||
</Aside>
|
||||
|
||||
## Roadmap
|
||||
|
||||
### v0.8.0 (Q4 2025)
|
||||
- Basic browser automation
|
||||
- Form filling
|
||||
- Screenshot capture
|
||||
- Local mode only
|
||||
|
||||
### v0.8.x (Q1 2026)
|
||||
- Cloud execution
|
||||
- Scheduled tasks
|
||||
- Parallel processing
|
||||
- Result caching
|
||||
|
||||
### v0.9.0 (Q2 2026)
|
||||
- Visual element detection
|
||||
- Natural language navigation
|
||||
- Advanced error recovery
|
||||
- Workflow templates
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Start Simple**: Test with single-page tasks first
|
||||
2. **Use Confirmations**: Review before critical actions
|
||||
3. **Handle Errors**: Expect and plan for failures
|
||||
4. **Respect Sites**: Follow robots.txt and rate limits
|
||||
5. **Local First**: Use cloud only when necessary
|
||||
|
||||
---
|
||||
|
||||
[Documentation](https://jan.ai/docs/browser-use) | [Examples](https://github.com/janhq/browser-use-examples) | [API Reference](https://jan.ai/docs/api/browser)
|
||||
@ -1,292 +0,0 @@
|
||||
---
|
||||
title: Deep Research
|
||||
description: Comprehensive research and analysis capabilities
|
||||
sidebar:
|
||||
order: 3
|
||||
---
|
||||
|
||||
import { Aside, Card, CardGrid, Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
|
||||
|
||||
Deep Research provides comprehensive investigation capabilities beyond simple search. It reads sources, synthesizes information, and produces analysis with proper citations.
|
||||
|
||||
## Research Levels
|
||||
|
||||
| Level | Duration | Sources | Output | Use Case |
|
||||
|-------|----------|---------|--------|----------|
|
||||
| Quick | 5 min | 10-20 | Summary with key points | Overview needed |
|
||||
| Standard | 30 min | 20-50 | Detailed analysis | Work projects |
|
||||
| Comprehensive | 2+ hours | 50-200 | Full literature review | Academic research |
|
||||
|
||||
## How It Works
|
||||
|
||||
### Research Pipeline
|
||||
1. **Query Analysis**: Understand scope and requirements
|
||||
2. **Source Discovery**: Find relevant materials
|
||||
3. **Content Extraction**: Read and process sources
|
||||
4. **Synthesis**: Connect information across sources
|
||||
5. **Output Generation**: Create structured analysis
|
||||
|
||||
### Source Types
|
||||
<CardGrid>
|
||||
<Card title="Primary Sources" icon="document">
|
||||
- Academic papers (arXiv, PubMed)
|
||||
- Technical documentation
|
||||
- Patent databases
|
||||
- Code repositories
|
||||
- Official reports
|
||||
</Card>
|
||||
|
||||
<Card title="Secondary Sources" icon="open-book">
|
||||
- Expert blog posts
|
||||
- Conference proceedings
|
||||
- Tutorial content
|
||||
- Forum discussions
|
||||
- News articles
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Research Modes
|
||||
|
||||
<Tabs syncKey="research-depth">
|
||||
<TabItem label="Quick Research">
|
||||
Fast overview for immediate needs.
|
||||
|
||||
```javascript
|
||||
await jan.research({
|
||||
query: "quantum computing recent advances",
|
||||
depth: "quick",
|
||||
maxSources: 20
|
||||
});
|
||||
```
|
||||
|
||||
**Output includes:**
|
||||
- Executive summary
|
||||
- Key developments
|
||||
- Major players
|
||||
- 5-10 citations
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="Standard Research">
|
||||
Balanced depth for professional work.
|
||||
|
||||
```javascript
|
||||
await jan.research({
|
||||
query: "comparison of vector databases",
|
||||
depth: "standard",
|
||||
includeBenchmarks: true
|
||||
});
|
||||
```
|
||||
|
||||
**Output includes:**
|
||||
- Detailed analysis
|
||||
- Comparative tables
|
||||
- Performance metrics
|
||||
- 20-50 citations
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="Comprehensive Research">
|
||||
Academic-level investigation.
|
||||
|
||||
```javascript
|
||||
await jan.research({
|
||||
query: "mRNA vaccine mechanisms",
|
||||
depth: "comprehensive",
|
||||
timeRange: "2020-2024",
|
||||
includePatents: true
|
||||
});
|
||||
```
|
||||
|
||||
**Output includes:**
|
||||
- Literature review
|
||||
- Methodology analysis
|
||||
- Future directions
|
||||
- 100+ citations
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
## Key Features
|
||||
|
||||
### Citation Management
|
||||
- Every claim backed by sources
|
||||
- Proper academic formatting
|
||||
- Direct links to materials
|
||||
- Citation graph exploration
|
||||
|
||||
### Quality Assurance
|
||||
| Check Type | Description | Automatic |
|
||||
|------------|-------------|-----------|
|
||||
| Source verification | Validates publication status | Yes |
|
||||
| Date checking | Ensures currency of information | Yes |
|
||||
| Contradiction detection | Flags conflicting claims | Yes |
|
||||
| Bias identification | Notes potential conflicts | Partial |
|
||||
| Retraction alerts | Warns about retracted papers | Yes |
|
||||
|
||||
### Output Formats
|
||||
- **Markdown**: With inline citations
|
||||
- **PDF**: Formatted report
|
||||
- **LaTeX**: Academic papers
|
||||
- **JSON**: Structured data
|
||||
- **Bibtex**: Reference management
|
||||
|
||||
<Aside type="note">
|
||||
All research includes a limitations section acknowledging scope boundaries and potential gaps.
|
||||
</Aside>
|
||||
|
||||
## Real-World Examples
|
||||
|
||||
### Technical Research
|
||||
```
|
||||
Query: "Rust async runtime implementations"
|
||||
Duration: 45 minutes
|
||||
Sources analyzed: 67
|
||||
|
||||
Output structure:
|
||||
1. Current landscape overview
|
||||
2. Implementation comparison table
|
||||
3. Performance benchmarks
|
||||
4. Code examples
|
||||
5. Best practices
|
||||
6. 43 verified citations
|
||||
```
|
||||
|
||||
### Market Analysis
|
||||
```
|
||||
Query: "Edge AI hardware market 2024"
|
||||
Duration: 2 hours
|
||||
Sources analyzed: 124
|
||||
|
||||
Output structure:
|
||||
1. Market size and projections
|
||||
2. Key players analysis
|
||||
3. Technology comparison
|
||||
4. Investment trends
|
||||
5. Future outlook
|
||||
6. 89 citations from reports, papers, and news
|
||||
```
|
||||
|
||||
### Literature Review
|
||||
```
|
||||
Query: "CRISPR applications in agriculture"
|
||||
Duration: 3 hours
|
||||
Sources analyzed: 198
|
||||
|
||||
Output structure:
|
||||
1. Historical development
|
||||
2. Current applications
|
||||
3. Technical challenges
|
||||
4. Regulatory landscape
|
||||
5. Future possibilities
|
||||
6. 156 academic citations
|
||||
```
|
||||
|
||||
## Configuration Options
|
||||
|
||||
### Basic Settings
|
||||
```json
|
||||
{
|
||||
"research": {
|
||||
"defaultDepth": "standard",
|
||||
"cacheResults": true,
|
||||
"verifyDates": true,
|
||||
"includePreprints": false
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced Options
|
||||
```json
|
||||
{
|
||||
"research": {
|
||||
"sources": {
|
||||
"academic": ["arxiv", "pubmed", "ieee"],
|
||||
"exclude": ["reddit", "quora"],
|
||||
"requirePeerReview": true
|
||||
},
|
||||
"output": {
|
||||
"style": "academic",
|
||||
"citationFormat": "APA",
|
||||
"includeSummaries": true
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Integration with Other Tools
|
||||
|
||||
### Search + Deep Research
|
||||
```
|
||||
User: "Latest transformer architecture improvements"
|
||||
Jan: [searches for papers] → [reads 30 papers] →
|
||||
[analyzes improvements] → [creates taxonomy]
|
||||
```
|
||||
|
||||
### Browser Use + Deep Research
|
||||
```
|
||||
User: "Analyze all YC startups in AI space"
|
||||
Jan: [crawls YC directory] → [visits company sites] →
|
||||
[extracts data] → [produces market analysis]
|
||||
```
|
||||
|
||||
<Aside type="tip">
|
||||
For best results, be specific about scope, time range, and desired output format in your research query.
|
||||
</Aside>
|
||||
|
||||
## Limitations
|
||||
|
||||
### Cannot Access
|
||||
- Paywalled content (in local mode)
|
||||
- Private databases
|
||||
- Real-time data streams
|
||||
- Proprietary research
|
||||
|
||||
### Won't Perform
|
||||
- Original research
|
||||
- Data collection experiments
|
||||
- Statistical analysis of raw data
|
||||
- Peer review functions
|
||||
|
||||
## Performance Expectations
|
||||
|
||||
| Research Type | Sources | Time | Accuracy |
|
||||
|---------------|---------|------|----------|
|
||||
| News summary | 10-20 | 5 min | High |
|
||||
| Technical review | 30-50 | 30 min | Very High |
|
||||
| Academic survey | 100+ | 2+ hours | Excellent |
|
||||
| Patent analysis | 50-100 | 1-2 hours | High |
|
||||
|
||||
## Local vs Cloud Processing
|
||||
|
||||
### Local Mode
|
||||
- Downloads papers to cache
|
||||
- Processes on your device
|
||||
- Complete privacy
|
||||
- Slower processing
|
||||
|
||||
### Cloud Mode
|
||||
- Access to more sources
|
||||
- Faster processing
|
||||
- Parallel analysis
|
||||
- Anonymous queries
|
||||
|
||||
## Roadmap
|
||||
|
||||
### v0.7.0 (Current)
|
||||
- Basic research pipeline
|
||||
- Academic paper parsing
|
||||
- Simple citation management
|
||||
|
||||
### v0.8.0
|
||||
- Visual paper understanding
|
||||
- Code extraction from papers
|
||||
- Patent search integration
|
||||
|
||||
### v0.9.0
|
||||
- Multi-language sources
|
||||
- Collaborative research
|
||||
- Real-time monitoring
|
||||
- Custom research agents
|
||||
|
||||
---
|
||||
|
||||
[Examples](https://jan.ai/docs/deep-research/examples) | [API Reference](https://jan.ai/docs/api/research) | [Research Templates](https://github.com/janhq/research-templates)
|
||||
@ -1,249 +0,0 @@
|
||||
---
|
||||
title: Search
|
||||
description: Privacy-respecting web search for AI assistants
|
||||
sidebar:
|
||||
order: 1
|
||||
---
|
||||
|
||||
import { Aside, Card, CardGrid, Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
|
||||
|
||||
Jan Search provides web search capabilities while maintaining user privacy. It offers local crawling, anonymized cloud search, and intelligent routing.
|
||||
|
||||
## Architecture
|
||||
|
||||
Jan Search operates in three modes:
|
||||
|
||||
| Mode | Description | Privacy | Speed |
|
||||
|------|-------------|---------|--------|
|
||||
| Local | Direct web crawling from your device | Maximum | Slower |
|
||||
| Cloud | Anonymized search via Jan API | High | Fast |
|
||||
| Hybrid | Automatic routing based on query | Variable | Optimal |
|
||||
|
||||
## Implementation
|
||||
|
||||
<Tabs syncKey="search-mode">
|
||||
<TabItem label="Local Search">
|
||||
Everything happens on your machine using Crawl4AI.
|
||||
|
||||
```javascript
|
||||
const results = await jan.tools.search({
|
||||
query: "your search query",
|
||||
mode: "local",
|
||||
maxResults: 10
|
||||
});
|
||||
```
|
||||
|
||||
**Characteristics:**
|
||||
- No data leaves your device
|
||||
- Direct website crawling
|
||||
- Limited to public content
|
||||
- 2-5 seconds per query
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="Cloud Search">
|
||||
Anonymized queries through Jan's search infrastructure.
|
||||
|
||||
```javascript
|
||||
const results = await jan.tools.search({
|
||||
query: "your search query",
|
||||
mode: "cloud"
|
||||
});
|
||||
```
|
||||
|
||||
**Characteristics:**
|
||||
- No user accounts required
|
||||
- Cached common queries
|
||||
- Sub-second response time
|
||||
- No search history stored
|
||||
</TabItem>
|
||||
|
||||
<TabItem label="Hybrid Mode">
|
||||
Intelligent routing based on query sensitivity.
|
||||
|
||||
```javascript
|
||||
const results = await jan.tools.search({
|
||||
query: userQuery,
|
||||
mode: "auto"
|
||||
});
|
||||
```
|
||||
|
||||
**Routing logic:**
|
||||
- Medical/financial → Local only
|
||||
- General queries → Cloud for speed
|
||||
- User preferences → Configurable
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
## MCP Integration
|
||||
|
||||
Search functions as a Model Context Protocol tool, enabling:
|
||||
|
||||
### Basic Usage
|
||||
```
|
||||
User: "What's the latest on AI regulations?"
|
||||
Jan: [searches web] → [reads articles] → [summarizes findings]
|
||||
```
|
||||
|
||||
### Advanced Workflows
|
||||
```
|
||||
User: "Compare Python web frameworks from 2024"
|
||||
Jan: [searches multiple sources] → [analyzes documentation] →
|
||||
[creates comparison table] → [provides recommendations]
|
||||
```
|
||||
|
||||
### Developer Integration
|
||||
```python
|
||||
async def research_topic(topic):
|
||||
# Search for information
|
||||
results = await jan.search(topic)
|
||||
|
||||
# Read top results
|
||||
content = await jan.read_urls(results[:5])
|
||||
|
||||
# Generate analysis
|
||||
summary = await jan.analyze(content)
|
||||
|
||||
return summary
|
||||
```
|
||||
|
||||
<Aside type="note">
|
||||
Search results include proper attribution with direct links to sources.
|
||||
</Aside>
|
||||
|
||||
## Features
|
||||
|
||||
<CardGrid>
|
||||
<Card title="Real-time Access" icon="rocket">
|
||||
- Current information, not training data
|
||||
- JavaScript rendering support
|
||||
- Dynamic content handling
|
||||
- Multi-page crawling
|
||||
</Card>
|
||||
|
||||
<Card title="Privacy Protection" icon="shield">
|
||||
- No user tracking
|
||||
- Anonymous queries
|
||||
- Local storage only
|
||||
- No cookies or fingerprinting
|
||||
</Card>
|
||||
|
||||
<Card title="Smart Processing" icon="brain">
|
||||
- Semantic understanding
|
||||
- Result ranking
|
||||
- Content extraction
|
||||
- Source verification
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Search Types
|
||||
|
||||
### Quick Answer
|
||||
Fast factual responses for simple queries.
|
||||
```
|
||||
Input: "weather tokyo"
|
||||
Output: "18°C, cloudy, 70% humidity"
|
||||
Time: < 1 second
|
||||
```
|
||||
|
||||
### Deep Research
|
||||
Comprehensive analysis with multiple sources.
|
||||
```
|
||||
Input: "impact of LLMs on software development"
|
||||
Output: Multi-page report with citations
|
||||
Time: 30-60 seconds
|
||||
```
|
||||
|
||||
### Continuous Monitoring
|
||||
Track topics over time (coming v0.8.0).
|
||||
```
|
||||
Input: "monitor AI model releases"
|
||||
Output: Daily summaries of new models
|
||||
```
|
||||
|
||||
## Privacy Details
|
||||
|
||||
### Local Mode Data
|
||||
| Data Type | Storage | Duration | Access |
|
||||
|-----------|---------|----------|--------|
|
||||
| Queries | ~/jan/search | Session | Local only |
|
||||
| Results | Memory | Temporary | Process only |
|
||||
| Cache | ~/jan/cache | 7 days | User controlled |
|
||||
|
||||
### Cloud Mode Data
|
||||
| Data Type | Collection | Storage | Usage |
|
||||
|-----------|------------|---------|-------|
|
||||
| Query text | Anonymized | None | Processing only |
|
||||
| IP address | Country only | None | Rate limiting |
|
||||
| User ID | Not collected | N/A | N/A |
|
||||
|
||||
<Aside type="caution">
|
||||
Sensitive queries automatically route to local mode. This includes medical symptoms, financial information, and personal identifiers.
|
||||
</Aside>
|
||||
|
||||
## Performance Benchmarks
|
||||
|
||||
| Query Type | Local Mode | Cloud Mode | Hybrid |
|
||||
|------------|------------|------------|--------|
|
||||
| Simple fact | 2-3s | < 0.5s | < 0.5s |
|
||||
| News search | 3-5s | < 1s | < 1s |
|
||||
| Deep research | 20-30s | 5-10s | 5-15s |
|
||||
| Multi-site | 30-60s | 10-20s | 10-30s |
|
||||
|
||||
## Configuration
|
||||
|
||||
### Basic Settings
|
||||
```json
|
||||
{
|
||||
"search": {
|
||||
"defaultMode": "hybrid",
|
||||
"maxResults": 20,
|
||||
"timeout": 30000,
|
||||
"cacheEnabled": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced Options
|
||||
```json
|
||||
{
|
||||
"search": {
|
||||
"localOnly": ["medical", "finance", "personal"],
|
||||
"preferCloud": ["news", "weather", "general"],
|
||||
"customRouting": {
|
||||
"enabled": true,
|
||||
"rules": [...]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Roadmap
|
||||
|
||||
### v0.6.7 (Current)
|
||||
- Basic search MCP
|
||||
- Local crawling
|
||||
- Simple cloud search
|
||||
|
||||
### v0.7.0
|
||||
- Semantic search
|
||||
- Multi-source aggregation
|
||||
- Improved result ranking
|
||||
|
||||
### v0.8.0
|
||||
- Visual search
|
||||
- Continuous monitoring
|
||||
- Custom search engines
|
||||
|
||||
### v0.9.0
|
||||
- Federated search networks
|
||||
- User-contributed indices
|
||||
- Advanced filtering
|
||||
|
||||
<Aside type="tip">
|
||||
For best results, use specific queries rather than broad topics. The search tool performs better with "Python async web frameworks comparison 2024" than "web frameworks".
|
||||
</Aside>
|
||||
|
||||
---
|
||||
|
||||
[API Reference](https://jan.ai/docs/api/search) | [MCP Documentation](https://jan.ai/docs/mcp) | [Privacy Policy](https://jan.ai/privacy)
|
||||
37
website/src/content/docs/server/index.mdx
Normal file
37
website/src/content/docs/server/index.mdx
Normal file
@ -0,0 +1,37 @@
|
||||
---
|
||||
title: Jan Server
|
||||
description: Your self-hosted, private AI cloud for teams and enterprises.
|
||||
keywords:
|
||||
[
|
||||
Jan Server,
|
||||
Jan AI,
|
||||
self-hosted AI,
|
||||
private AI cloud,
|
||||
local LLM server,
|
||||
enterprise AI,
|
||||
Docker,
|
||||
Kubernetes,
|
||||
on-premise AI
|
||||
]
|
||||
banner:
|
||||
content: 'Coming Q3 2025: Jan Server is currently in development.'
|
||||
---
|
||||
import { Aside, Card } from '@astrojs/starlight/components';
|
||||
|
||||
## Your Private AI Cloud
|
||||
|
||||
Jan Server allows you to deploy a powerful, multi-user AI environment on your
|
||||
own infrastructure. It's designed for teams and enterprises that require full
|
||||
data control, privacy, and predictable costs without sacrificing performance.
|
||||
|
||||
<Aside type="note">
|
||||
**Jan Server is not yet available.** We are building a robust, scalable solution for self-hosting.
|
||||
</Aside>
|
||||
|
||||
By self-hosting, you ensure that your sensitive data and intellectual property never leave your network.
|
||||
|
||||
### Core Features Planned:
|
||||
- **Multi-User Management**: Control access with individual accounts and API keys.
|
||||
- **Enterprise Authentication**: Integrate with your existing SSO, LDAP, or AD.
|
||||
- **Flexible Deployment**: Deploy easily via Docker, Kubernetes, or on bare metal.
|
||||
- **Centralized Admin Dashboard**: Monitor usage, manage models, and oversee system health.
|
||||
@ -1,139 +0,0 @@
|
||||
---
|
||||
title: Jan API Integration for Developers
|
||||
description: Learn to build applications using Jan's local server API
|
||||
video:
|
||||
type: video
|
||||
link: https://www.youtube.com/watch?v=L_jWHffIx5E
|
||||
duration: 420
|
||||
---
|
||||
|
||||
import { List } from 'starlight-videos/components';
|
||||
|
||||
# Jan API Integration for Developers
|
||||
|
||||
Ready to build applications powered by Jan? In this advanced course, you'll learn how to integrate Jan's local server API into your own applications, enabling you to create AI-powered tools and services.
|
||||
|
||||
## What You'll Learn
|
||||
|
||||
- Understanding Jan's OpenAI-compatible API
|
||||
- Setting up the local server for development
|
||||
- Making your first API calls
|
||||
- Authentication and security considerations
|
||||
- Building a simple AI-powered application
|
||||
- Best practices for production deployment
|
||||
|
||||
## Prerequisites
|
||||
|
||||
<List title="Developer Requirements" variant="caution" icon="warning">
|
||||
- Jan installed and working properly
|
||||
- Downloaded at least one model
|
||||
- Local server enabled in Jan settings
|
||||
- Basic programming knowledge (any language)
|
||||
- Understanding of REST APIs and HTTP requests
|
||||
- Familiarity with JSON data format
|
||||
</List>
|
||||
|
||||
## Course Outline
|
||||
|
||||
### 1. API Overview
|
||||
- Jan's local server architecture
|
||||
- OpenAI compatibility layer
|
||||
- Available endpoints and methods
|
||||
- Rate limiting and resource management
|
||||
|
||||
### 2. Development Setup
|
||||
- Enabling the local server
|
||||
- Configuring ports and security
|
||||
- Testing connectivity with curl
|
||||
- Using API testing tools like Postman
|
||||
|
||||
### 3. Core API Operations
|
||||
- Chat completions endpoint
|
||||
- Streaming responses
|
||||
- Model management
|
||||
- System monitoring
|
||||
|
||||
### 4. Building Your First App
|
||||
We'll build a simple chat application that demonstrates:
|
||||
- Connecting to Jan's API
|
||||
- Sending messages and receiving responses
|
||||
- Handling streaming responses
|
||||
- Error handling and reconnection logic
|
||||
|
||||
### 5. Advanced Integration Patterns
|
||||
- Async/await patterns for better performance
|
||||
- Caching strategies for repeated queries
|
||||
- Integration with popular frameworks (Express.js, FastAPI, etc.)
|
||||
- WebSocket integration for real-time apps
|
||||
|
||||
## Code Examples
|
||||
|
||||
### Basic Python Integration
|
||||
```python
|
||||
import requests
|
||||
import json
|
||||
|
||||
def chat_with_jan(message):
|
||||
url = "http://localhost:1337/v1/chat/completions"
|
||||
headers = {"Content-Type": "application/json"}
|
||||
data = {
|
||||
"model": "your-model-name",
|
||||
"messages": [{"role": "user", "content": message}]
|
||||
}
|
||||
|
||||
response = requests.post(url, headers=headers, json=data)
|
||||
return response.json()
|
||||
```
|
||||
|
||||
### JavaScript/Node.js Example
|
||||
```javascript
|
||||
const axios = require('axios');
|
||||
|
||||
async function chatWithJan(message) {
|
||||
const response = await axios.post('http://localhost:1337/v1/chat/completions', {
|
||||
model: 'your-model-name',
|
||||
messages: [{ role: 'user', content: message }]
|
||||
});
|
||||
|
||||
return response.data.choices[0].message.content;
|
||||
}
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- Keeping API endpoints local and secure
|
||||
- Managing API keys and authentication
|
||||
- Rate limiting to prevent abuse
|
||||
- Monitoring resource usage
|
||||
- Backup and recovery strategies
|
||||
|
||||
## Real-World Applications
|
||||
|
||||
Examples of what you can build:
|
||||
- **Customer Service Bots**: Automated support using your data
|
||||
- **Content Generation Tools**: AI-powered writing assistants
|
||||
- **Code Analysis Tools**: Automated code review and suggestions
|
||||
- **Personal Assistants**: Custom AI helpers for specific tasks
|
||||
|
||||
## Production Deployment
|
||||
|
||||
- Scaling considerations for multiple users
|
||||
- Load balancing strategies
|
||||
- Monitoring and logging
|
||||
- Error handling and fallback mechanisms
|
||||
- Performance optimization techniques
|
||||
|
||||
## Next Steps
|
||||
|
||||
After completing this course, consider exploring:
|
||||
- **Custom Model Integration**: Adding your own fine-tuned models
|
||||
- **Multi-Modal Applications**: Integrating with vision and audio models
|
||||
- **Enterprise Scaling**: Deploying Jan for team use
|
||||
|
||||
## Resources
|
||||
|
||||
- [Jan API Documentation](https://jan.ai/api-reference)
|
||||
- [Example Applications GitHub Repository](https://github.com/menloresearch/jan-examples)
|
||||
- [Community Discord](https://discord.gg/qSwXFx6Krr) for developer support
|
||||
|
||||
Ready to start building? Watch the video above and follow along with the code examples!
|
||||
@ -1,200 +0,0 @@
|
||||
---
|
||||
title: MCP Introduction & Setup
|
||||
description: Understanding Model Context Protocol and getting it configured
|
||||
video:
|
||||
type: collection-video
|
||||
link: https://www.youtube.com/watch?v=L_jWHffIx5E
|
||||
duration: 480
|
||||
collection: using-mcps
|
||||
order: 1
|
||||
difficulty: Intermediate
|
||||
---
|
||||
|
||||
import { List, Quiz, QuizOption } from 'starlight-videos/components';
|
||||
|
||||
# MCP Introduction & Setup
|
||||
|
||||
Welcome to the world of Model Context Protocol! This foundational video explains what MCPs are, why they're revolutionary for AI applications, and how to get them set up in your Jan installation.
|
||||
|
||||
<List title="Prerequisites" variant="caution" icon="warning">
|
||||
- Jan installed and working properly
|
||||
- Downloaded at least one model
|
||||
- Basic understanding of APIs and web services
|
||||
- Administrative access to your computer
|
||||
</List>
|
||||
|
||||
## What You'll Learn
|
||||
|
||||
- **MCP Fundamentals**: What MCPs are and how they work
|
||||
- **Architecture Overview**: How MCPs connect AI to external systems
|
||||
- **Setup Process**: Enabling MCP support in Jan
|
||||
- **Configuration Basics**: Understanding MCP settings and options
|
||||
- **Verification**: Testing that MCPs are working correctly
|
||||
|
||||
## Understanding MCPs
|
||||
|
||||
### What is Model Context Protocol?
|
||||
Model Context Protocol is a standardized way for AI models to interact with external tools, APIs, and services. Think of it as a bridge that allows your AI to:
|
||||
|
||||
- **Access Real-Time Data**: Get current information from the web
|
||||
- **Execute Code**: Run programs and scripts safely
|
||||
- **Interact with APIs**: Connect to third-party services
|
||||
- **Process Files**: Work with documents and media
|
||||
- **Perform Actions**: Make changes to external systems
|
||||
|
||||
### Why MCPs Matter
|
||||
- **Extended Capabilities**: Go beyond text generation
|
||||
- **Real-World Integration**: Connect AI to your existing tools
|
||||
- **Customization**: Build workflows specific to your needs
|
||||
- **Standardization**: Use a consistent protocol across services
|
||||
|
||||
## MCP Architecture Deep Dive
|
||||
|
||||
### Core Components
|
||||
1. **MCP Client**: Jan's built-in MCP handler
|
||||
2. **MCP Server**: External services that provide capabilities
|
||||
3. **Transport Layer**: How messages are exchanged
|
||||
4. **Schema Definition**: How capabilities are described
|
||||
|
||||
### Communication Flow
|
||||
```
|
||||
Jan (MCP Client) ←→ Transport ←→ MCP Server ←→ External Service
|
||||
```
|
||||
|
||||
### Security Model
|
||||
- **Sandboxed Execution**: MCPs run in isolated environments
|
||||
- **Permission System**: Granular control over what MCPs can access
|
||||
- **Authentication**: Secure connection to external services
|
||||
- **Audit Logging**: Track all MCP interactions
|
||||
|
||||
## Step-by-Step Setup
|
||||
|
||||
### 1. Check Jan Version
|
||||
<List title="Version Requirements" variant="note" icon="information">
|
||||
- Jan 0.5.0 or later required
|
||||
- Latest version recommended for best features
|
||||
- Check Help → About Jan for your version
|
||||
</List>
|
||||
|
||||
### 2. Enable MCP Support
|
||||
- Navigate to Jan Settings
|
||||
- Find the "Extensions" or "MCP" section
|
||||
- Toggle "Enable Model Context Protocol"
|
||||
- Restart Jan if prompted
|
||||
|
||||
### 3. Configure MCP Directory
|
||||
- Set the directory where MCP servers will be stored
|
||||
- Choose a location with adequate disk space
|
||||
- Ensure Jan has read/write permissions
|
||||
|
||||
### 4. Initial MCP Installation
|
||||
- Browse the MCP marketplace
|
||||
- Install a simple MCP (like system info)
|
||||
- Verify the installation completed successfully
|
||||
|
||||
### 5. Test Your Setup
|
||||
- Start a new conversation
|
||||
- Try using the installed MCP
|
||||
- Verify responses include MCP data
|
||||
|
||||
## Configuration Options
|
||||
|
||||
### MCP Settings Overview
|
||||
- **Enable/Disable**: Global MCP toggle
|
||||
- **Server Directory**: Where MCP servers are stored
|
||||
- **Timeout Settings**: How long to wait for MCP responses
|
||||
- **Security Settings**: Permission and access controls
|
||||
- **Logging Level**: How much detail to log
|
||||
|
||||
### Performance Tuning
|
||||
- **Concurrent Connections**: How many MCPs can run simultaneously
|
||||
- **Memory Limits**: Resource constraints for MCP servers
|
||||
- **Cache Settings**: Storing MCP responses for faster access
|
||||
- **Network Configuration**: Proxy and firewall settings
|
||||
|
||||
## Common Setup Issues
|
||||
|
||||
### Problem: MCPs Not Loading
|
||||
**Solutions:**
|
||||
- Check Jan version compatibility
|
||||
- Verify MCP directory permissions
|
||||
- Review Jan logs for error messages
|
||||
- Restart Jan after configuration changes
|
||||
|
||||
### Problem: Connection Timeouts
|
||||
**Solutions:**
|
||||
- Increase timeout settings
|
||||
- Check internet connection
|
||||
- Verify API credentials
|
||||
- Test with simpler MCPs first
|
||||
|
||||
### Problem: Permission Errors
|
||||
**Solutions:**
|
||||
- Run Jan as administrator (if needed)
|
||||
- Check folder permissions
|
||||
- Review security software blocking
|
||||
- Verify antivirus exceptions
|
||||
|
||||
## Verification Checklist
|
||||
|
||||
<List title="Setup Verification" variant="success" icon="approve-check">
|
||||
- MCP toggle is enabled in settings
|
||||
- MCP directory is configured and accessible
|
||||
- At least one MCP is installed successfully
|
||||
- MCP responds correctly in conversations
|
||||
- No error messages in Jan logs
|
||||
</List>
|
||||
|
||||
## What's Next?
|
||||
|
||||
Once your MCP setup is complete, you're ready for **Enabling Your First MCP**, where we'll walk through installing and configuring your first real-world MCP integration.
|
||||
|
||||
## Pro Tips
|
||||
|
||||
- Start with simple MCPs before moving to complex integrations
|
||||
- Keep your MCPs updated for security and functionality
|
||||
- Monitor resource usage when running multiple MCPs
|
||||
- Join the Discord community for MCP-specific support
|
||||
|
||||
Ready to bridge the gap between AI and the real world? Let's get MCPs running in your Jan installation!
|
||||
|
||||
## MCP Fundamentals Quiz
|
||||
|
||||
Test your understanding of Model Context Protocol concepts:
|
||||
|
||||
<Quiz title="What does MCP stand for?" variant="note">
|
||||
<QuizOption>Model Computing Protocol</QuizOption>
|
||||
<QuizOption correct>Model Context Protocol</QuizOption>
|
||||
<QuizOption>Machine Context Processing</QuizOption>
|
||||
<QuizOption>Multi-Channel Protocol</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="What can MCPs enable your AI to do?" multiple variant="tip">
|
||||
<QuizOption correct>Access real-time data from the web</QuizOption>
|
||||
<QuizOption>Make AI responses faster</QuizOption>
|
||||
<QuizOption correct>Execute code safely</QuizOption>
|
||||
<QuizOption correct>Interact with APIs and external services</QuizOption>
|
||||
<QuizOption>Reduce memory usage</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="What is the minimum Jan version required for MCP support?" variant="caution">
|
||||
<QuizOption>Jan 0.3.0</QuizOption>
|
||||
<QuizOption>Jan 0.4.0</QuizOption>
|
||||
<QuizOption correct>Jan 0.5.0</QuizOption>
|
||||
<QuizOption>Jan 1.0.0</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="In the MCP architecture, what role does Jan play?" variant="success">
|
||||
<QuizOption>MCP Server</QuizOption>
|
||||
<QuizOption correct>MCP Client</QuizOption>
|
||||
<QuizOption>Transport Layer</QuizOption>
|
||||
<QuizOption>External Service</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="What should you do if MCPs are not loading properly?" multiple variant="caution">
|
||||
<QuizOption correct>Check Jan version compatibility</QuizOption>
|
||||
<QuizOption>Reinstall Jan completely</QuizOption>
|
||||
<QuizOption correct>Verify MCP directory permissions</QuizOption>
|
||||
<QuizOption correct>Review Jan logs for error messages</QuizOption>
|
||||
<QuizOption>Disable all other extensions</QuizOption>
|
||||
</Quiz>
|
||||
@ -1,194 +0,0 @@
|
||||
---
|
||||
title: Performance Optimization Masterclass
|
||||
description: Get maximum performance from Jan on your hardware
|
||||
video:
|
||||
type: video
|
||||
link: https://www.youtube.com/watch?v=kJQP7kiw5Fk
|
||||
duration: 480
|
||||
---
|
||||
|
||||
import { List } from 'starlight-videos/components';
|
||||
|
||||
# Performance Optimization Masterclass
|
||||
|
||||
Ready to squeeze every bit of performance from your Jan setup? This advanced course covers everything you need to know about optimizing Jan for maximum speed, efficiency, and throughput on your specific hardware configuration.
|
||||
|
||||
## What You'll Learn
|
||||
|
||||
- Hardware optimization strategies
|
||||
- Model selection for performance
|
||||
- Memory management techniques
|
||||
- GPU acceleration setup
|
||||
- System-level optimizations
|
||||
- Benchmarking and monitoring tools
|
||||
|
||||
## Prerequisites
|
||||
|
||||
<List title="Optimization Requirements" variant="tip" icon="star">
|
||||
- Jan installed and running smoothly
|
||||
- Downloaded models of different sizes for testing
|
||||
- Basic understanding of computer hardware
|
||||
- Administrative access to your system
|
||||
- Benchmarking tools installed (optional)
|
||||
- Willingness to experiment with settings
|
||||
</List>
|
||||
|
||||
## Course Sections
|
||||
|
||||
### 1. Hardware Assessment
|
||||
- Identifying your system's bottlenecks
|
||||
- CPU vs GPU performance characteristics
|
||||
- Memory and storage considerations
|
||||
- Thermal management for sustained performance
|
||||
|
||||
### 2. Model Optimization
|
||||
- Quantization levels and their trade-offs
|
||||
- Model size vs. speed considerations
|
||||
- Context length optimization
|
||||
- Batch processing techniques
|
||||
|
||||
### 3. System Configuration
|
||||
- Operating system optimizations
|
||||
- Process priority and CPU affinity
|
||||
- Memory allocation strategies
|
||||
- Disk I/O optimization
|
||||
|
||||
### 4. GPU Acceleration
|
||||
- CUDA setup for NVIDIA cards
|
||||
- Vulkan configuration for AMD/Intel
|
||||
- Multi-GPU configurations
|
||||
- Memory management on GPU
|
||||
|
||||
## Performance Metrics to Track
|
||||
|
||||
### Speed Metrics
|
||||
- **Tokens per second**: Raw generation speed
|
||||
- **Time to first token**: Response latency
|
||||
- **Context processing speed**: How fast Jan ingests your prompt
|
||||
|
||||
### Resource Metrics
|
||||
- **CPU utilization**: Processor efficiency
|
||||
- **Memory usage**: RAM and VRAM consumption
|
||||
- **Temperature monitoring**: Thermal throttling prevention
|
||||
- **Power consumption**: Energy efficiency
|
||||
|
||||
## Optimization Strategies by Hardware Type
|
||||
|
||||
### High-End Gaming PC
|
||||
- Maximize GPU utilization
|
||||
- Large model recommendations
|
||||
- Multi-threading optimization
|
||||
- Overclocking considerations
|
||||
|
||||
### MacBook (Apple Silicon)
|
||||
- Unified memory advantages
|
||||
- Model recommendations for M1/M2/M3
|
||||
- Power efficiency settings
|
||||
- Thermal management
|
||||
|
||||
### Business Laptop
|
||||
- Battery optimization
|
||||
- Smaller model selection
|
||||
- CPU-only optimization
|
||||
- Background process management
|
||||
|
||||
### Server/Workstation
|
||||
- Multi-model serving
|
||||
- Concurrent user optimization
|
||||
- Enterprise-grade monitoring
|
||||
- Cluster configuration basics
|
||||
|
||||
## Advanced Techniques
|
||||
|
||||
### Model Quantization Deep Dive
|
||||
- Understanding Q4, Q6, Q8 quantization
|
||||
- Custom quantization for your use case
|
||||
- Quality vs. speed trade-offs
|
||||
- Measuring quantization impact
|
||||
|
||||
### Memory Management
|
||||
- Model offloading strategies
|
||||
- Context caching optimization
|
||||
- Garbage collection tuning
|
||||
- Memory-mapped file usage
|
||||
|
||||
### Parallel Processing
|
||||
- Multi-threaded inference
|
||||
- Batch processing optimization
|
||||
- Pipeline parallelism
|
||||
- Model parallelism techniques
|
||||
|
||||
## Benchmarking Your Setup
|
||||
|
||||
We'll use these tools to measure performance:
|
||||
- Built-in Jan performance metrics
|
||||
- System monitoring tools
|
||||
- Custom benchmarking scripts
|
||||
- A/B testing different configurations
|
||||
|
||||
## Common Performance Issues
|
||||
|
||||
### Problem: Slow Response Times
|
||||
**Solutions**:
|
||||
- Reduce model size
|
||||
- Increase context cache
|
||||
- Optimize prompt length
|
||||
- Check system resources
|
||||
|
||||
### Problem: High Memory Usage
|
||||
**Solutions**:
|
||||
- Enable model offloading
|
||||
- Reduce context length
|
||||
- Close unnecessary applications
|
||||
- Consider smaller model variants
|
||||
|
||||
### Problem: Inconsistent Performance
|
||||
**Solutions**:
|
||||
- Monitor thermal throttling
|
||||
- Check background processes
|
||||
- Optimize power settings
|
||||
- Update drivers and software
|
||||
|
||||
## Real-World Case Studies
|
||||
|
||||
### Case Study 1: Content Creator Setup
|
||||
- Hardware: RTX 4080, 32GB RAM
|
||||
- Use case: Long-form content generation
|
||||
- Optimization: Large model with GPU acceleration
|
||||
- Results: 45 tokens/sec sustained
|
||||
|
||||
### Case Study 2: Developer Workstation
|
||||
- Hardware: MacBook Pro M2 Max
|
||||
- Use case: Code assistance and documentation
|
||||
- Optimization: Balanced model for quick responses
|
||||
- Results: Sub-200ms first token time
|
||||
|
||||
### Case Study 3: Enterprise Deployment
|
||||
- Hardware: Server with multiple GPUs
|
||||
- Use case: Team collaboration platform
|
||||
- Optimization: Multi-model serving with load balancing
|
||||
- Results: 20+ concurrent users supported
|
||||
|
||||
## Monitoring and Maintenance
|
||||
|
||||
- Setting up performance dashboards
|
||||
- Automated alert systems
|
||||
- Regular performance audits
|
||||
- Update and maintenance schedules
|
||||
|
||||
## Advanced Hardware Considerations
|
||||
|
||||
- NVMe SSD requirements for large models
|
||||
- Network optimization for remote deployments
|
||||
- Custom cooling solutions
|
||||
- Professional workstation recommendations
|
||||
|
||||
## Next Level: Custom Builds
|
||||
|
||||
For ultimate performance:
|
||||
- Building a dedicated Jan server
|
||||
- Custom GPU clusters
|
||||
- Edge deployment optimization
|
||||
- Cloud vs. on-premise trade-offs
|
||||
|
||||
Ready to unlock Jan's full potential? Follow along with the video and transform your Jan experience from good to exceptional!
|
||||
@ -1,269 +0,0 @@
|
||||
---
|
||||
title: Search Integration with Exa
|
||||
description: Connect real-time web search capabilities to your AI conversations
|
||||
video:
|
||||
type: collection-video
|
||||
link: https://www.youtube.com/watch?v=kJQP7kiw5Fk
|
||||
duration: 360
|
||||
collection: using-mcps
|
||||
order: 3
|
||||
difficulty: Intermediate
|
||||
---
|
||||
|
||||
import { List, Quiz, QuizOption } from 'starlight-videos/components';
|
||||
|
||||
# Search Integration with Exa
|
||||
|
||||
Transform your AI conversations with real-time web search capabilities! This video shows you how to integrate Exa search into Jan, giving your AI access to current information from across the internet.
|
||||
|
||||
<List title="Prerequisites" variant="caution" icon="warning">
|
||||
- Jan installed with MCP support enabled
|
||||
- Downloaded and activated at least one model
|
||||
- Exa API account and key (free tier available)
|
||||
- Basic understanding of API authentication
|
||||
</List>
|
||||
|
||||
## What You'll Learn
|
||||
|
||||
- **Exa Overview**: What makes Exa different from traditional search
|
||||
- **API Setup**: Getting your Exa account and API key
|
||||
- **MCP Installation**: Installing the Exa MCP server
|
||||
- **Configuration**: Setting up authentication and preferences
|
||||
- **Usage Patterns**: How to effectively use search in conversations
|
||||
- **Troubleshooting**: Common issues and solutions
|
||||
|
||||
## About Exa Search
|
||||
|
||||
### What is Exa?
|
||||
Exa is a search engine built specifically for AI applications. Unlike traditional search engines that return web pages, Exa provides:
|
||||
|
||||
- **Semantic Search**: Understanding context and meaning
|
||||
- **Structured Results**: Clean, AI-friendly data formats
|
||||
- **Real-Time Information**: Current data and recent developments
|
||||
- **High-Quality Sources**: Curated, reliable information
|
||||
|
||||
### Why Exa for AI?
|
||||
- **AI-Native Design**: Built for AI consumption
|
||||
- **Better Context**: More relevant results for AI reasoning
|
||||
- **Cleaner Data**: Less noise, more signal
|
||||
- **Developer-Friendly**: Easy integration with MCPs
|
||||
|
||||
## Setting Up Exa
|
||||
|
||||
### 1. Create Your Exa Account
|
||||
- Visit [Exa's website](https://exa.ai)
|
||||
- Sign up for a free developer account
|
||||
- Verify your email address
|
||||
- Access your dashboard
|
||||
|
||||
### 2. Generate API Key
|
||||
- Navigate to API Keys section
|
||||
- Create a new API key
|
||||
- Copy and securely store your key
|
||||
- Note your usage limits and pricing
|
||||
|
||||
<List title="Security Reminder" variant="danger" icon="warning">
|
||||
- Never share your API key publicly
|
||||
- Store it securely in Jan's configuration
|
||||
- Monitor your usage to avoid unexpected charges
|
||||
- Regenerate keys if compromised
|
||||
</List>
|
||||
|
||||
### 3. Understand Usage Limits
|
||||
- **Free Tier**: Usually includes limited searches per month
|
||||
- **Rate Limits**: Requests per second/minute restrictions
|
||||
- **Data Limits**: Amount of data returned per search
|
||||
- **Commercial Use**: Check terms for business applications
|
||||
|
||||
## Installing Exa MCP
|
||||
|
||||
### Installation Process
|
||||
1. Open Jan settings
|
||||
2. Navigate to MCP/Extensions section
|
||||
3. Search for "Exa" in the MCP marketplace
|
||||
4. Click "Install" on the Exa Search MCP
|
||||
5. Wait for installation to complete
|
||||
|
||||
### Configuration Steps
|
||||
1. **API Key Setup**:
|
||||
- Enter your Exa API key
|
||||
- Test the connection
|
||||
- Save configuration
|
||||
|
||||
2. **Search Preferences**:
|
||||
- Default result count (5-10 recommended)
|
||||
- Content types to include/exclude
|
||||
- Language preferences
|
||||
- Date range filters
|
||||
|
||||
3. **Security Settings**:
|
||||
- Enable/disable automatic searches
|
||||
- Set conversation context limits
|
||||
- Configure usage monitoring
|
||||
|
||||
## Using Exa in Conversations
|
||||
|
||||
### Basic Search Queries
|
||||
```
|
||||
"What are the latest developments in renewable energy?"
|
||||
"Find recent research on machine learning optimization"
|
||||
"Search for current news about Jan AI"
|
||||
```
|
||||
|
||||
### Advanced Search Techniques
|
||||
```
|
||||
"Search for academic papers about transformer models published in 2024"
|
||||
"Find technical documentation for Python FastAPI framework"
|
||||
"Look up recent startup funding in the AI space"
|
||||
```
|
||||
|
||||
### Search with Context
|
||||
```
|
||||
"I'm working on a climate change project. Search for the latest
|
||||
IPCC report findings and recent renewable energy breakthroughs."
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Effective Search Strategies
|
||||
<List title="Search Tips" variant="tip" icon="star">
|
||||
- Be specific about what information you need
|
||||
- Include time constraints ("recent", "2024", etc.)
|
||||
- Specify source types (academic, news, documentation)
|
||||
- Ask for comparisons between multiple sources
|
||||
- Request summaries of complex topics
|
||||
</List>
|
||||
|
||||
### Query Optimization
|
||||
- **Specific Keywords**: Use precise terminology
|
||||
- **Context Setting**: Provide background for better results
|
||||
- **Result Filtering**: Ask for specific types of content
|
||||
- **Follow-up Questions**: Build on previous search results
|
||||
|
||||
## Real-World Use Cases
|
||||
|
||||
### Research & Analysis
|
||||
```
|
||||
User: "Search for recent studies on the effectiveness of remote work"
|
||||
AI: [Searches and finds recent research papers and surveys]
|
||||
AI: "Based on recent studies I found, remote work effectiveness varies..."
|
||||
```
|
||||
|
||||
### Current Events & News
|
||||
```
|
||||
User: "What's happening with AI regulation in the EU?"
|
||||
AI: [Searches for latest EU AI Act developments]
|
||||
AI: "The latest developments in EU AI regulation include..."
|
||||
```
|
||||
|
||||
### Technical Documentation
|
||||
```
|
||||
User: "Find the latest API documentation for OpenAI's models"
|
||||
AI: [Searches for current API docs and changes]
|
||||
AI: "Here's what I found about OpenAI's latest API updates..."
|
||||
```
|
||||
|
||||
### Market Research
|
||||
```
|
||||
User: "Search for recent funding rounds in the AI startup space"
|
||||
AI: [Finds recent investment news and startup funding data]
|
||||
AI: "Recent AI startup funding shows several interesting trends..."
|
||||
```
|
||||
|
||||
## Monitoring and Management
|
||||
|
||||
### Usage Tracking
|
||||
- Monitor your API usage in Exa dashboard
|
||||
- Set up alerts for usage limits
|
||||
- Track costs and billing
|
||||
- Optimize query frequency
|
||||
|
||||
### Performance Optimization
|
||||
- **Cache Results**: Avoid duplicate searches
|
||||
- **Batch Queries**: Combine related searches
|
||||
- **Filter Results**: Request only needed information
|
||||
- **Rate Limiting**: Respect API limits
|
||||
|
||||
## Troubleshooting Common Issues
|
||||
|
||||
### Problem: Search Results Not Appearing
|
||||
**Solutions:**
|
||||
- Check API key configuration
|
||||
- Verify internet connection
|
||||
- Test API key in Exa dashboard
|
||||
- Review Jan logs for error messages
|
||||
|
||||
### Problem: Poor Search Quality
|
||||
**Solutions:**
|
||||
- Refine search queries with more specific terms
|
||||
- Adjust result count settings
|
||||
- Try different search approaches
|
||||
- Provide more context in queries
|
||||
|
||||
### Problem: API Limit Exceeded
|
||||
**Solutions:**
|
||||
- Monitor usage more closely
|
||||
- Upgrade to higher tier if needed
|
||||
- Optimize search frequency
|
||||
- Cache results to avoid duplicate searches
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Custom Search Domains
|
||||
- Restrict searches to specific websites
|
||||
- Academic database integration
|
||||
- News source filtering
|
||||
- Domain-specific search patterns
|
||||
|
||||
### Result Processing
|
||||
- Automatic summarization
|
||||
- Fact checking and verification
|
||||
- Cross-referencing multiple sources
|
||||
- Citation and source tracking
|
||||
|
||||
## What's Next?
|
||||
|
||||
Now that you have real-time search capabilities, you're ready for **Data Analysis with E2B**, where we'll add code execution and data processing capabilities to your AI toolkit.
|
||||
|
||||
Ready to give your AI real-time knowledge of the world? Let's connect Exa search to Jan!
|
||||
|
||||
## Exa Integration Knowledge Check
|
||||
|
||||
Test your understanding of Exa search integration:
|
||||
|
||||
<Quiz title="What makes Exa different from traditional search engines?" variant="tip">
|
||||
<QuizOption>It's faster than Google</QuizOption>
|
||||
<QuizOption>It's completely free</QuizOption>
|
||||
<QuizOption correct>It's built specifically for AI applications with semantic search</QuizOption>
|
||||
<QuizOption>It doesn't require an API key</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="What do you need to set up Exa search integration?" multiple variant="caution">
|
||||
<QuizOption correct>Jan with MCP support enabled</QuizOption>
|
||||
<QuizOption correct>Exa API account and key</QuizOption>
|
||||
<QuizOption>Google API credentials</QuizOption>
|
||||
<QuizOption correct>Downloaded and activated at least one model</QuizOption>
|
||||
<QuizOption>Premium Jan subscription</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="Which search query is most effective for getting recent information?" variant="note">
|
||||
<QuizOption>"Tell me about renewable energy"</QuizOption>
|
||||
<QuizOption correct>"Search for recent studies on renewable energy published in 2024"</QuizOption>
|
||||
<QuizOption>"What is renewable energy?"</QuizOption>
|
||||
<QuizOption>"Renewable energy information"</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="What should you do to avoid API limit issues?" multiple variant="success">
|
||||
<QuizOption correct>Monitor your API usage in Exa dashboard</QuizOption>
|
||||
<QuizOption>Make as many searches as possible quickly</QuizOption>
|
||||
<QuizOption correct>Cache results to avoid duplicate searches</QuizOption>
|
||||
<QuizOption correct>Set up alerts for usage limits</QuizOption>
|
||||
<QuizOption>Share your API key with others</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="If search results are not appearing, what should you check first?" variant="caution">
|
||||
<QuizOption>Your internet speed</QuizOption>
|
||||
<QuizOption correct>API key configuration</QuizOption>
|
||||
<QuizOption>Jan version number</QuizOption>
|
||||
<QuizOption>Computer memory</QuizOption>
|
||||
</Quiz>
|
||||
@ -1,109 +0,0 @@
|
||||
---
|
||||
title: Using MCPs (Model Context Protocol)
|
||||
description: Master Jan's MCP system to extend AI capabilities with external tools and services
|
||||
video:
|
||||
type: collection
|
||||
collection: using-mcps
|
||||
---
|
||||
|
||||
import { List } from 'starlight-videos/components';
|
||||
|
||||
# Using MCPs (Model Context Protocol)
|
||||
|
||||
Model Context Protocol (MCP) is Jan's powerful system for connecting AI models to external tools, APIs, and services. This advanced tutorial series teaches you how to enable, configure, and maximize MCPs to dramatically expand your AI's capabilities.
|
||||
|
||||
<List title="Prerequisites" variant="caution" icon="warning">
|
||||
- Jan installed and working
|
||||
- Downloaded at least one model
|
||||
- MCP feature enabled in Jan settings
|
||||
- Basic understanding of APIs and web services
|
||||
- Some technical comfort with configuration files
|
||||
</List>
|
||||
|
||||
## What You'll Master
|
||||
|
||||
By completing this collection, you'll be able to:
|
||||
|
||||
- **Enable and Configure MCPs**: Set up the MCP system in Jan
|
||||
- **Connect External Services**: Integrate with APIs, databases, and tools
|
||||
- **Custom MCP Development**: Build your own MCP integrations
|
||||
- **Troubleshoot Issues**: Debug and optimize MCP performance
|
||||
- **Security Best Practices**: Keep your integrations secure
|
||||
|
||||
## Series Overview
|
||||
|
||||
This comprehensive tutorial series contains 8 hands-on videos that progress from basic setup to advanced custom development. Each video includes practical examples and real-world applications.
|
||||
|
||||
### Videos in This Series
|
||||
|
||||
1. **MCP Introduction & Setup** - Understanding MCP and initial configuration
|
||||
2. **Enabling Your First MCP** - Step-by-step activation process
|
||||
3. **Search Integration with Exa** - Connect web search capabilities
|
||||
4. **Data Analysis with E2B** - Integrate code execution environments
|
||||
5. **File System Access** - Local file and folder operations
|
||||
6. **Custom API Integration** - Connect your own services
|
||||
7. **Building Custom MCPs** - Develop your own integrations
|
||||
8. **MCP Security & Best Practices** - Keep your integrations secure
|
||||
|
||||
## Who This Series Is For
|
||||
|
||||
- **Power Users**: Ready to extend Jan's capabilities
|
||||
- **Developers**: Want to integrate Jan with existing tools
|
||||
- **Business Users**: Need specialized AI workflows
|
||||
- **System Administrators**: Managing Jan for teams
|
||||
|
||||
## Technical Requirements
|
||||
|
||||
<List title="System Requirements" variant="note" icon="laptop">
|
||||
- Jan version 0.5.0 or later
|
||||
- Active internet connection for external services
|
||||
- API keys for third-party services (as needed)
|
||||
- Administrative privileges for some integrations
|
||||
</List>
|
||||
|
||||
## Popular MCP Use Cases
|
||||
|
||||
### Development Workflows
|
||||
- **Code Analysis**: Analyze codebases and repositories
|
||||
- **API Testing**: Test and document API endpoints
|
||||
- **Database Queries**: Connect to databases for data analysis
|
||||
- **CI/CD Integration**: Automate build and deployment tasks
|
||||
|
||||
### Business Applications
|
||||
- **CRM Integration**: Access customer data and insights
|
||||
- **Document Processing**: Analyze and process business documents
|
||||
- **Spreadsheet Operations**: Read and manipulate Excel/Google Sheets
|
||||
- **Email & Calendar**: Schedule meetings and send communications
|
||||
|
||||
### Research & Analysis
|
||||
- **Web Scraping**: Gather data from websites
|
||||
- **Academic Research**: Access scholarly databases
|
||||
- **Market Analysis**: Connect to financial and market data
|
||||
- **Social Media**: Analyze social media trends and data
|
||||
|
||||
## Time Investment
|
||||
|
||||
- **Total Runtime**: Approximately 2 hours
|
||||
- **Hands-on Practice**: 4-6 hours additional
|
||||
- **Recommended Schedule**: 2-3 videos per week
|
||||
- **Completion Time**: 3-4 weeks with practice
|
||||
|
||||
## Learning Path
|
||||
|
||||
### Beginner Track (Videos 1-4)
|
||||
Start here if you're new to MCPs. Learn the basics of setup and use popular pre-built integrations.
|
||||
|
||||
### Intermediate Track (Videos 5-6)
|
||||
Ready to customize? Learn to configure advanced integrations and connect your own services.
|
||||
|
||||
### Advanced Track (Videos 7-8)
|
||||
For developers and power users who want to build custom MCPs and implement enterprise-grade security.
|
||||
|
||||
## Support & Resources
|
||||
|
||||
- **Documentation**: Complete MCP reference guide
|
||||
- **Community**: Discord channel for MCP users
|
||||
- **Examples**: GitHub repository with sample integrations
|
||||
- **Troubleshooting**: Common issues and solutions guide
|
||||
|
||||
Ready to supercharge your AI capabilities? MCPs will transform how you work with Jan!
|
||||
@ -1,57 +0,0 @@
|
||||
---
|
||||
title: Getting to Know Jan
|
||||
description: Essential videos to understand Jan's core features and philosophy
|
||||
video:
|
||||
type: collection
|
||||
collection: getting-to-know-jan
|
||||
---
|
||||
|
||||
import { List } from 'starlight-videos/components';
|
||||
|
||||
# Getting to Know Jan
|
||||
|
||||
This series of videos introduces you to Jan's core concepts, features, and philosophy. Perfect for newcomers who want to understand what makes Jan special and how it fits into the AI landscape.
|
||||
|
||||
<List title="Prerequisites" variant="note" icon="open-book">
|
||||
- A computer with 8GB+ RAM
|
||||
- Basic familiarity with software installation
|
||||
- Curiosity about AI and privacy
|
||||
</List>
|
||||
|
||||
## What You'll Learn
|
||||
|
||||
By the end of this collection, you'll have a solid understanding of:
|
||||
|
||||
- **Jan's Mission**: Why Jan exists and what problems it solves
|
||||
- **Privacy-First AI**: How Jan keeps your data secure and private
|
||||
- **Local vs Cloud**: When to use local models vs cloud services
|
||||
- **Open Source Benefits**: How Jan's open-source nature benefits you
|
||||
- **Community & Support**: Where to get help and contribute back
|
||||
|
||||
## Series Overview
|
||||
|
||||
This tutorial series contains 6 essential videos that build upon each other. We recommend watching them in order for the best learning experience.
|
||||
|
||||
### Videos in This Series
|
||||
|
||||
1. **What is Jan?** - Introduction to Jan's mission and core features
|
||||
2. **Privacy-First AI** - Understanding Jan's privacy approach
|
||||
3. **Local AI Explained** - How local AI processing works
|
||||
4. **Open Source Advantage** - Benefits of open-source AI tools
|
||||
5. **Jan vs Alternatives** - How Jan compares to other AI tools
|
||||
6. **Community & Getting Help** - Resources and support available
|
||||
|
||||
## Who This Series Is For
|
||||
|
||||
- **AI Newcomers**: People new to artificial intelligence
|
||||
- **Privacy-Conscious Users**: Those concerned about data privacy
|
||||
- **Open Source Enthusiasts**: Users interested in open-source software
|
||||
- **Professionals**: Anyone considering AI for work applications
|
||||
|
||||
## Time Investment
|
||||
|
||||
- **Total Runtime**: Approximately 30 minutes
|
||||
- **Recommended Schedule**: One video per day over a week
|
||||
- **Completion Time**: 1 week at a relaxed pace
|
||||
|
||||
Ready to get started? The videos in this series will give you everything you need to understand Jan's unique approach to AI.
|
||||
@ -1,79 +0,0 @@
|
||||
---
|
||||
title: Installing and Setting Up Jan
|
||||
description: Get Jan running on your computer in minutes
|
||||
video:
|
||||
type: video
|
||||
link: https://www.youtube.com/watch?v=oHg5SJYRHA0
|
||||
duration: 300
|
||||
---
|
||||
|
||||
import { List } from 'starlight-videos/components';
|
||||
|
||||
# Installing and Setting Up Jan
|
||||
|
||||
Now that you understand what Jan is and how it can help you, let's get it installed and running on your computer. This step-by-step video guide will walk you through the entire process.
|
||||
|
||||
## What You'll Learn
|
||||
|
||||
- How to download Jan for your operating system
|
||||
- Installation process for Windows, macOS, and Linux
|
||||
- Initial setup and configuration
|
||||
- Downloading your first AI model
|
||||
- Verifying everything is working correctly
|
||||
|
||||
## Prerequisites
|
||||
|
||||
<List title="System Requirements" variant="note" icon="laptop">
|
||||
- A computer with at least 8GB of RAM (16GB recommended)
|
||||
- 10GB of free disk space for Jan and one model
|
||||
- A stable internet connection for initial download
|
||||
- Administrator privileges on your computer
|
||||
</List>
|
||||
|
||||
## Step-by-Step Process
|
||||
|
||||
### 1. Download Jan
|
||||
- Visit the official Jan website
|
||||
- Choose the correct version for your operating system
|
||||
- Download and verify the installer
|
||||
|
||||
### 2. Installation
|
||||
- Run the installer with administrator privileges
|
||||
- Choose installation location and options
|
||||
- Wait for the installation to complete
|
||||
|
||||
### 3. First Launch
|
||||
- Start Jan for the first time
|
||||
- Complete the initial setup wizard
|
||||
- Configure basic preferences
|
||||
|
||||
### 4. Download Your First Model
|
||||
- Browse the model library
|
||||
- Understand different model sizes and capabilities
|
||||
- Download a recommended beginner model
|
||||
- Wait for the download to complete
|
||||
|
||||
### 5. Test Your Setup
|
||||
- Start your first conversation
|
||||
- Verify the model is responding
|
||||
- Check system resources and performance
|
||||
|
||||
## Troubleshooting Common Issues
|
||||
|
||||
If you encounter problems during installation:
|
||||
- **Permission errors**: Make sure you're running as administrator
|
||||
- **Download failures**: Check your internet connection and firewall
|
||||
- **Model loading issues**: Verify you have enough RAM and disk space
|
||||
- **Performance problems**: Consider a smaller model for older hardware
|
||||
|
||||
## Next Steps
|
||||
|
||||
Once Jan is installed and your first model is downloaded, you'll be ready for **Your First AI Conversation**, where we'll learn the basics of communicating effectively with AI.
|
||||
|
||||
## System Requirements Reminder
|
||||
|
||||
- **Minimum**: 8GB RAM, 4GB free disk space
|
||||
- **Recommended**: 16GB+ RAM, 20GB+ free disk space
|
||||
- **Optimal**: 32GB+ RAM, SSD storage, dedicated GPU
|
||||
|
||||
Ready to install Jan? Follow along with the video and you'll be up and running in no time!
|
||||
@ -1,50 +0,0 @@
|
||||
---
|
||||
title: Introduction to AI and Jan
|
||||
description: Your first step into the world of artificial intelligence
|
||||
video:
|
||||
type: video
|
||||
link: https://www.youtube.com/watch?v=dQw4w9WgXcQ
|
||||
duration: 180
|
||||
---
|
||||
|
||||
# Introduction to AI and Jan
|
||||
|
||||
Welcome to your first lesson at Jan AI University! In this video, we'll explore what artificial intelligence is, how it can benefit you, and why Jan is the perfect tool to start your AI journey.
|
||||
|
||||
## What You'll Learn
|
||||
|
||||
- What is artificial intelligence and how does it work?
|
||||
- The difference between traditional software and AI
|
||||
- Why privacy matters in AI applications
|
||||
- An overview of Jan's key features and benefits
|
||||
- Real-world use cases for AI in your daily life
|
||||
|
||||
## Key Concepts Covered
|
||||
|
||||
### Artificial Intelligence Basics
|
||||
- Large Language Models (LLMs) explained simply
|
||||
- How AI "understands" and generates human language
|
||||
- The training process and what makes AI "intelligent"
|
||||
|
||||
### Jan's Unique Approach
|
||||
- Privacy-first design philosophy
|
||||
- Local vs. cloud AI processing
|
||||
- Open-source benefits and community
|
||||
|
||||
### Getting Started Mindset
|
||||
- Setting realistic expectations for AI
|
||||
- Understanding AI's strengths and limitations
|
||||
- Building good prompting habits from day one
|
||||
|
||||
## After This Video
|
||||
|
||||
Once you've watched this introduction, you'll be ready to move on to **Installing and Setting Up Jan**, where we'll get Jan running on your computer and ready for your first AI conversation.
|
||||
|
||||
## Discussion Points
|
||||
|
||||
Think about these questions as you watch:
|
||||
- What problems in your work or personal life could AI help solve?
|
||||
- How important is data privacy to you when using AI tools?
|
||||
- What are you most excited to try with Jan?
|
||||
|
||||
Ready to begin? Click play on the video above and let's start your AI journey together!
|
||||
@ -1,152 +0,0 @@
|
||||
---
|
||||
title: Privacy-First AI
|
||||
description: Understanding Jan's privacy approach and why it matters
|
||||
video:
|
||||
type: collection-video
|
||||
link: https://www.youtube.com/watch?v=oHg5SJYRHA0
|
||||
duration: 420
|
||||
collection: getting-to-know-jan
|
||||
order: 2
|
||||
difficulty: Beginner
|
||||
---
|
||||
|
||||
import { List, Quiz, QuizOption } from 'starlight-videos/components';
|
||||
|
||||
# Privacy-First AI
|
||||
|
||||
In this video, we dive deep into Jan's privacy-first philosophy and explore why keeping your AI interactions private matters more than ever.
|
||||
|
||||
<List title="Prerequisites" variant="note" icon="open-book">
|
||||
- Watched "What is Jan?"
|
||||
- Basic understanding of internet privacy concepts
|
||||
- Awareness of data collection practices
|
||||
</List>
|
||||
|
||||
## What You'll Learn
|
||||
|
||||
- Why privacy matters in AI applications
|
||||
- How traditional AI services handle your data
|
||||
- Jan's approach to keeping your conversations private
|
||||
- Technical aspects of local data processing
|
||||
- When and how to use cloud services safely with Jan
|
||||
|
||||
## Key Privacy Concepts
|
||||
|
||||
### The Privacy Problem with Cloud AI
|
||||
- **Data Collection**: What happens to your conversations
|
||||
- **Training Data**: How your inputs might be used
|
||||
- **Government Access**: Potential surveillance concerns
|
||||
- **Data Breaches**: Risks of centralized data storage
|
||||
|
||||
### Jan's Privacy Solutions
|
||||
- **Local Processing**: Your data never leaves your device
|
||||
- **No Logging**: Jan doesn't record your conversations
|
||||
- **Open Source**: Complete transparency in data handling
|
||||
- **User Control**: You decide what data to share and when
|
||||
|
||||
## Technical Deep Dive
|
||||
|
||||
### How Local Processing Works
|
||||
- Model files stored on your device
|
||||
- Computation happens on your hardware
|
||||
- No network requests for basic operations
|
||||
- Complete offline functionality
|
||||
|
||||
### Data Storage Locations
|
||||
- Where Jan stores your conversations
|
||||
- How to find and manage your data
|
||||
- Backup and security considerations
|
||||
- Deleting data permanently
|
||||
|
||||
### Hybrid Approach Benefits
|
||||
- Use local models for sensitive content
|
||||
- Cloud models for specialized tasks
|
||||
- User-controlled data sharing
|
||||
- Best of both worlds flexibility
|
||||
|
||||
## Real-World Privacy Scenarios
|
||||
|
||||
### Scenario 1: Medical Information
|
||||
- Asking health-related questions
|
||||
- Why local processing matters
|
||||
- Regulatory compliance considerations
|
||||
|
||||
### Scenario 2: Business Confidential Data
|
||||
- Company information and strategies
|
||||
- Legal and competitive implications
|
||||
- Corporate privacy policies
|
||||
|
||||
### Scenario 3: Personal Creative Work
|
||||
- Writing and creative projects
|
||||
- Intellectual property protection
|
||||
- Personal expression privacy
|
||||
|
||||
## Making Privacy Choices
|
||||
|
||||
### When to Use Local Models
|
||||
- Sensitive personal information
|
||||
- Confidential business data
|
||||
- Creative and original work
|
||||
- Learning and experimentation
|
||||
|
||||
### When Cloud Models Might Be Appropriate
|
||||
- General knowledge questions
|
||||
- Public information research
|
||||
- Non-sensitive creative tasks
|
||||
- Performance-critical applications
|
||||
|
||||
## Privacy Best Practices
|
||||
|
||||
<List title="Privacy Checklist" variant="success" icon="approve-check">
|
||||
- Review what data you're sharing
|
||||
- Understand model capabilities and limitations
|
||||
- Use local models for sensitive content
|
||||
- Regularly review and clean your data
|
||||
- Stay informed about privacy updates
|
||||
</List>
|
||||
|
||||
## After This Video
|
||||
|
||||
You'll have a solid understanding of AI privacy concerns and Jan's solutions. Next up: **Local AI Explained** - where we'll explore the technical aspects of running AI on your own device.
|
||||
|
||||
## Reflection Questions
|
||||
|
||||
- What types of information do you consider private?
|
||||
- How do current AI tools handle your data?
|
||||
- What privacy features matter most to you?
|
||||
- How would local AI change your usage patterns?
|
||||
|
||||
Ready to become a privacy-conscious AI user? Let's explore why privacy-first AI is the future!
|
||||
|
||||
## Privacy Knowledge Check
|
||||
|
||||
Test your understanding of AI privacy concepts:
|
||||
|
||||
<Quiz title="What happens to your conversations when using Jan locally?" variant="success">
|
||||
<QuizOption>They are sent to Jan's servers for analysis</QuizOption>
|
||||
<QuizOption>They are uploaded to the cloud for backup</QuizOption>
|
||||
<QuizOption correct>They stay completely on your device</QuizOption>
|
||||
<QuizOption>They are shared with model providers</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="Which scenarios are best suited for local AI processing?" multiple variant="caution">
|
||||
<QuizOption correct>Discussing confidential business strategies</QuizOption>
|
||||
<QuizOption>Asking general knowledge questions</QuizOption>
|
||||
<QuizOption correct>Working with sensitive medical information</QuizOption>
|
||||
<QuizOption correct>Creating personal creative work</QuizOption>
|
||||
<QuizOption>Looking up public information</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="What is a key advantage of Jan's hybrid approach?" variant="note">
|
||||
<QuizOption>It's always faster than other solutions</QuizOption>
|
||||
<QuizOption correct>You can choose local for privacy and cloud for performance when needed</QuizOption>
|
||||
<QuizOption>It requires no internet connection ever</QuizOption>
|
||||
<QuizOption>It's completely free to use</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="When using cloud services through Jan, whose privacy policies apply?" variant="caution">
|
||||
<QuizOption>Only Jan's privacy policy</QuizOption>
|
||||
<QuizOption>No privacy policies apply</QuizOption>
|
||||
<QuizOption correct>The third-party service's privacy policies</QuizOption>
|
||||
<QuizOption>Your company's privacy policies</QuizOption>
|
||||
</Quiz>
|
||||
@ -1,97 +0,0 @@
|
||||
---
|
||||
title: What is Jan?
|
||||
description: Introduction to Jan's mission and core features
|
||||
video:
|
||||
type: collection-video
|
||||
link: https://www.youtube.com/watch?v=dQw4w9WgXcQ
|
||||
duration: 300
|
||||
collection: getting-to-know-jan
|
||||
order: 1
|
||||
difficulty: Beginner
|
||||
---
|
||||
|
||||
import { List, Quiz, QuizOption } from 'starlight-videos/components';
|
||||
|
||||
# What is Jan?
|
||||
|
||||
Welcome to your first video in the "Getting to Know Jan" series! This foundational video introduces you to Jan's core mission, key features, and what makes it different from other AI tools.
|
||||
|
||||
<List title="Prerequisites" variant="note" icon="open-book">
|
||||
- No technical background required
|
||||
- Basic computer literacy
|
||||
- Interest in AI and privacy
|
||||
</List>
|
||||
|
||||
## What You'll Learn
|
||||
|
||||
- The story behind Jan and why it was created
|
||||
- Jan's core mission and values
|
||||
- Key features that set Jan apart
|
||||
- The difference between Jan and cloud-based AI services
|
||||
- Real-world use cases and applications
|
||||
|
||||
## Key Topics Covered
|
||||
|
||||
### Jan's Origin Story
|
||||
- Why the founders created Jan
|
||||
- The problem Jan solves in the AI landscape
|
||||
- Vision for the future of AI tools
|
||||
|
||||
### Core Features Overview
|
||||
- **100% Offline Operation**: How Jan works without internet
|
||||
- **Privacy-First Design**: Your data never leaves your device
|
||||
- **Model Flexibility**: Use local and cloud models
|
||||
- **Open Source**: Complete transparency and community-driven development
|
||||
|
||||
### Use Cases Preview
|
||||
- Personal AI assistant for daily tasks
|
||||
- Professional writing and research tool
|
||||
- Educational companion for learning
|
||||
- Development assistant for coding projects
|
||||
|
||||
## After This Video
|
||||
|
||||
You'll understand Jan's fundamental value proposition and be ready to explore how its privacy-first approach works in the next video: **Privacy-First AI**.
|
||||
|
||||
## Discussion Questions
|
||||
|
||||
As you watch, think about:
|
||||
- What current AI tools do you use and what limitations do they have?
|
||||
- How important is data privacy in your AI usage?
|
||||
- What would you want to use Jan for in your daily life?
|
||||
|
||||
## Key Takeaways
|
||||
|
||||
By the end of this video, you should be able to:
|
||||
- Explain what Jan is in your own words
|
||||
- Identify Jan's main differentiators
|
||||
- Understand why someone would choose Jan over alternatives
|
||||
- Recognize potential use cases for your own needs
|
||||
|
||||
Ready to discover what makes Jan special? Let's dive in!
|
||||
|
||||
## Knowledge Check
|
||||
|
||||
Test your understanding of Jan's core concepts:
|
||||
|
||||
<Quiz title="What is Jan's primary differentiator from other AI tools?" variant="tip">
|
||||
<QuizOption>It's faster than other AI tools</QuizOption>
|
||||
<QuizOption correct>It runs completely offline and keeps your data private</QuizOption>
|
||||
<QuizOption>It has more AI models available</QuizOption>
|
||||
<QuizOption>It's easier to install</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="Which of the following are key features of Jan?" multiple variant="note">
|
||||
<QuizOption correct>100% offline operation</QuizOption>
|
||||
<QuizOption>Cloud-only processing</QuizOption>
|
||||
<QuizOption correct>Privacy-first design</QuizOption>
|
||||
<QuizOption correct>Open source code</QuizOption>
|
||||
<QuizOption>Subscription-based pricing</QuizOption>
|
||||
</Quiz>
|
||||
|
||||
<Quiz title="What does Jan allow you to use?" variant="success">
|
||||
<QuizOption>Only local AI models</QuizOption>
|
||||
<QuizOption>Only cloud AI models</QuizOption>
|
||||
<QuizOption correct>Both local and cloud AI models</QuizOption>
|
||||
<QuizOption>Only models created by Jan</QuizOption>
|
||||
</Quiz>
|
||||
@ -1,68 +0,0 @@
|
||||
---
|
||||
title: Welcome to Jan AI University
|
||||
description: Learn AI fundamentals and master Jan with our comprehensive video courses
|
||||
---
|
||||
|
||||
import { Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
Jan AI University is your comprehensive learning hub for mastering artificial intelligence concepts and getting the most out of Jan. Whether you're a beginner exploring AI for the first time or an advanced user looking to optimize your workflows, our video tutorial series will guide you through every step.
|
||||
|
||||
## What You'll Learn
|
||||
|
||||
<CardGrid>
|
||||
<Card title="🚀 Getting Started" icon="rocket">
|
||||
Master the basics of Jan installation, setup, and your first AI conversations. Perfect for newcomers to AI and Jan.
|
||||
</Card>
|
||||
<Card title="🧠 AI Fundamentals" icon="star">
|
||||
Understand how large language models work, model parameters, and how to choose the right model for your needs.
|
||||
</Card>
|
||||
<Card title="⚙️ Advanced Techniques" icon="setting">
|
||||
Dive deep into custom prompting, model fine-tuning, and advanced Jan configurations for power users.
|
||||
</Card>
|
||||
<Card title="🔧 Integration & Development" icon="puzzle">
|
||||
Learn to integrate Jan with other tools, build applications using Jan's API, and extend Jan's capabilities.
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Learning Paths
|
||||
|
||||
### 🌟 Beginner Path
|
||||
Start here if you're new to AI or Jan. These tutorials build upon each other to give you a solid foundation.
|
||||
|
||||
1. **Introduction to AI and Jan** - Understand what AI can do for you
|
||||
2. **Installing and Setting Up Jan** - Get Jan running on your system
|
||||
3. **Your First AI Conversation** - Learn the basics of prompting
|
||||
4. **Choosing the Right Model** - Understand different model types and capabilities
|
||||
|
||||
### 🚀 Intermediate Path
|
||||
Ready to get more advanced? These tutorials will help you become proficient with Jan.
|
||||
|
||||
1. **Advanced Prompting Techniques** - Craft better prompts for better results
|
||||
2. **Model Parameters Deep Dive** - Fine-tune models for your specific needs
|
||||
3. **Local vs Cloud Models** - When to use each and how to switch
|
||||
4. **Privacy and Security Best Practices** - Keep your data safe
|
||||
|
||||
### ⚡ Advanced Path
|
||||
For power users and developers who want to push the boundaries.
|
||||
|
||||
1. **Jan API Integration** - Build applications using Jan's local server
|
||||
2. **Custom Model Integration** - Add your own models to Jan
|
||||
3. **Performance Optimization** - Get the most out of your hardware
|
||||
4. **Enterprise Deployment** - Scale Jan for teams and organizations
|
||||
|
||||
## Featured Tutorial Series Preview
|
||||
|
||||
Coming soon: Our flagship tutorial series **"From Zero to AI Hero"** - a comprehensive 10-part video series that takes you from AI beginner to Jan expert in just 30 days.
|
||||
|
||||
## Learning Format
|
||||
|
||||
- **Video-first learning**: Each lesson is presented as an engaging video tutorial
|
||||
- **Hands-on exercises**: Practice what you learn with real Jan examples
|
||||
- **Downloadable resources**: Code samples, configuration files, and cheat sheets
|
||||
- **Community support**: Join discussions with fellow learners in our Discord
|
||||
|
||||
## Getting Started
|
||||
|
||||
Ready to begin your AI journey? Start with our **Getting Started** section, where you'll find beginner-friendly videos that introduce you to the world of AI and Jan.
|
||||
|
||||
Have questions? Join our [Discord community](https://discord.gg/qSwXFx6Krr) where instructors and fellow students are always happy to help!
|
||||
210
website/src/content/products/index.mdx
Normal file
210
website/src/content/products/index.mdx
Normal file
@ -0,0 +1,210 @@
|
||||
---
|
||||
title: Products
|
||||
description: AI that runs where you need it, how you need it
|
||||
sidebar:
|
||||
order: 0
|
||||
---
|
||||
|
||||
import { Aside, Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
Jan is moving from a local AI application to a complete full-stack AI solution that you can self-host. This includes models, applications, and tools that delights users and help them solve their problems.
|
||||
|
||||
## What We're Building
|
||||
|
||||
**Jan Factory (or Agent)** = Jan Models + Jan Application + Jan Tools
|
||||
|
||||
Unlike other AI assistants that do specific tasks with one model or have many models with a myriad of solutions, Jan provides:
|
||||
- Its own specialised models that are optimised at specific tasks like web-search, creative writing, and translation
|
||||
- Applications that work across all of your devices in an integrated way
|
||||
- Tools that actually get things done
|
||||
|
||||
## Two Modes, One Experience
|
||||
|
||||
### Local (Incognito) Mode
|
||||
|
||||
Run AI models entirely on your device, giving you complete privacy with no internet required.
|
||||
|
||||
### Cloud Mode
|
||||
|
||||
Connect to more powerful models when needed - either self-hosted or via jan.ai.
|
||||
|
||||
<Aside type="tip" title="Simple Choice">
|
||||
Users shouldn't need to understand models, APIs, or technical details. Just choose Local for privacy or Cloud for power.
|
||||
</Aside>
|
||||
|
||||
## Our Product Principles
|
||||
|
||||
### 1) It Just Works
|
||||
|
||||
1. Open Jan, start chatting
|
||||
2. Onboarding is fully available but optional
|
||||
3. Setting up an API key is optional
|
||||
4. Selecting a local model is optional
|
||||
5. Become a power user at your own pace, if you want to
|
||||
|
||||
We handle the complexity.
|
||||
|
||||
### 2) Cloud When Needed
|
||||
|
||||
Start completely locally and own your AI models. Add cloud capabilities only when you choose to.
|
||||
|
||||
### 3) Solve Problems, Not Settings
|
||||
|
||||
We help users get to answers quickly answers, not configuration options. Power users can dig deeper, but it's never required.
|
||||
|
||||
## Available on Every Device
|
||||
|
||||
<CardGrid>
|
||||
<Card title="Jan Desktop" icon="laptop">
|
||||
**Available Now**
|
||||
|
||||
Your personal AI workstation that helps with our use cases and powers other devices. Run models locally right away or bring an API key to connect to your favorite cloud-based models.
|
||||
|
||||
- Runs models locally on your hardware
|
||||
- GPU acceleration support
|
||||
- Powers other devices via network connection
|
||||
- Complete privacy and control
|
||||
- Windows, macOS, and Linux support
|
||||
|
||||
**Requirements:**
|
||||
- Minimum 8GB RAM
|
||||
- 10GB+ storage space
|
||||
- Optional: NVIDIA GPU for acceleration
|
||||
</Card>
|
||||
|
||||
<Card title="Jan Web" icon="up-arrow">
|
||||
**Beta Launch Soon**
|
||||
|
||||
Web-based version of Jan with no setup required. Same default cloud mode for mobile and desktop users.
|
||||
|
||||
- No installation needed
|
||||
- Instant access from any browser
|
||||
- Automatic updates and maintenance
|
||||
- Default cloud backend for mobile apps
|
||||
- Team collaboration features
|
||||
|
||||
**Pricing:**
|
||||
- Free for everyone
|
||||
- Pro: Access our latest models
|
||||
- Enterprise: Self-host or we host it for you
|
||||
</Card>
|
||||
|
||||
<Card title="Jan Mobile" icon="phone">
|
||||
**Coming Q4 2025**
|
||||
|
||||
Connect to Desktop/Server, run local mode with Jan Nano or Lucy, same experience everywhere.
|
||||
|
||||
Jan Mobile adapts to your situation:
|
||||
- **At Home**: Connect to your Jan Desktop over WiFi
|
||||
- **At Work**: Connect to your company Jan Server
|
||||
- **On the Go**: Run Jan Nano on your phone or talk to cloud models
|
||||
|
||||
- iOS and Android support
|
||||
- Three adaptive modes (Desktop, Server, Local)
|
||||
- Voice-first interface
|
||||
- Seamless device switching
|
||||
- Jan Nano for on-device AI
|
||||
</Card>
|
||||
|
||||
<Card title="Jan Server" icon="bars">
|
||||
**Coming Q2 2025**
|
||||
|
||||
Self-hosted solution for teams and enterprises. Your own private AI cloud.
|
||||
|
||||
- Support for 5-500+ concurrent users
|
||||
- Enterprise authentication (SSO, LDAP)
|
||||
- Docker and Kubernetes deployment
|
||||
- Admin dashboard
|
||||
- Team knowledge sharing
|
||||
|
||||
**Deployment Options:**
|
||||
- Docker: Single command setup
|
||||
- Kubernetes: Enterprise scale
|
||||
- Bare metal: Maximum control
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Jan Mobile: Three Modes, One Experience
|
||||
|
||||
Jan Mobile brings the same AI experience to your phone. Connect to your desktop, your server, or run models locally.
|
||||
|
||||
### How It Works
|
||||
|
||||
Jan Mobile adapts to your situation:
|
||||
|
||||
**At Home** - Connect to your Jan Desktop over WiFi
|
||||
Your Phone → WiFi → Your Desktop → Response
|
||||
|
||||
**At Work** - Connect to your company Jan Server
|
||||
Your Phone → Internet → Company Server → Response
|
||||
|
||||
**On the Go** - Run Jan Nano on your phone or talk to your favorite cloud-based model
|
||||
Your Phone → Jan Nano (6GB) → Response
|
||||
|
||||
No configuration needed. It just works.
|
||||
|
||||
### Key Features
|
||||
|
||||
- **Seamless Switching**: Move from home to office to airplane. One-click and Jan adapts immediately.
|
||||
- **Voice First**: Talk to Jan naturally. Responses can be spoken too.
|
||||
- **Sync Everything**: Conversations, settings, and preferences follow you across devices.
|
||||
|
||||
### Privacy & Security
|
||||
|
||||
**Your Data, Your Control**
|
||||
- Local Mode: Everything stays on your phone
|
||||
- Desktop Mode: Direct encrypted connection
|
||||
- Server Mode: Your organization's policies apply
|
||||
|
||||
**No Compromises**
|
||||
- Biometric app lock
|
||||
- Encrypted storage
|
||||
- No cloud backups without permission
|
||||
- Clear data anytime
|
||||
|
||||
## What Makes Jan Different
|
||||
|
||||
| Feature | Other AI Assistants | Jan |
|
||||
|---------|---------------------|-----|
|
||||
| Models | Wrapper around Claude/GPT | Our own models + You can own them |
|
||||
| Dual mode | Your data on their servers | Your data stays yours |
|
||||
| Deployment | Cloud only | Local, self-hosted, or cloud |
|
||||
| Cost | Subscription forever | Free locally, pay for cloud |
|
||||
|
||||
## Development Timeline
|
||||
|
||||
Jan is actively developed with regular releases. Our development follows these key milestones:
|
||||
|
||||
### Current Focus
|
||||
- **Jan Desktop**: Continuous improvements and model support
|
||||
- **Jan Web**: Beta launch preparation
|
||||
- **Model Development**: Jan Nano and Lucy optimization
|
||||
|
||||
### Next 6 Months
|
||||
- Jan Web public beta
|
||||
- Mobile app development
|
||||
- Server deployment tools
|
||||
|
||||
### Future Vision
|
||||
- Complete AI Agent platform
|
||||
- Advanced tool integration
|
||||
- Enterprise features
|
||||
|
||||
<Aside type="tip">
|
||||
We're building AI that respects your choices. Run it locally for privacy, connect to cloud for power, or self-host for both.
|
||||
</Aside>
|
||||
|
||||
---
|
||||
|
||||
**Ready to Get Started?**
|
||||
|
||||
- [Download Jan Desktop](/download) - Available now for Windows, macOS, and Linux
|
||||
- [Join Jan Web Beta](/beta) - Get early access to our web platform
|
||||
- [Read the Documentation](/docs) - Learn how to use Jan effectively
|
||||
- [Explore Our Models](/docs/models) - Discover Jan's specialized AI models
|
||||
|
||||
For detailed information about each platform, explore the individual product pages:
|
||||
- [Jan Desktop](/docs/products/platforms/desktop)
|
||||
- [Jan Web](/docs/products/platforms/jan-ai)
|
||||
- [Jan Mobile](/docs/products/platforms/mobile)
|
||||
- [Jan Server](/docs/products/platforms/server)
|
||||
44
website/src/content/products/models/jan-v1.mdx
Normal file
44
website/src/content/products/models/jan-v1.mdx
Normal file
@ -0,0 +1,44 @@
|
||||
---
|
||||
title: Jan V1
|
||||
description: Our upcoming family of foundational models, built to compete with the best.
|
||||
sidebar:
|
||||
order: 1
|
||||
banner:
|
||||
content: 'In Development: Jan V1 models are currently being trained and are not yet available.'
|
||||
---
|
||||
import { Aside, Card, Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
|
||||
## Our Foundational Model Family
|
||||
|
||||
Jan V1 is our in-house family of models designed to compete directly with leading models like GPT-4 and Claude. We're not just fine-tuning; we're building powerful, general-purpose models from the ground up to solve real-world problems with a focus on efficiency and privacy.
|
||||
|
||||
<Aside type="note">
|
||||
**Jan V1 models are currently in the pre-training phase.** They are not yet available for download or use. The information on this page reflects our development goals and roadmap.
|
||||
</Aside>
|
||||
|
||||
### The Vision
|
||||
|
||||
Most AI applications today are simply wrappers around a few proprietary models. We believe in providing a powerful, open alternative that you can run on your own terms. Jan V1 is our commitment to that vision.
|
||||
|
||||
### Planned Model Lineup
|
||||
|
||||
| Model | Target Size | Intended Use Case | Availability |
|
||||
|-------------|-------------|------------------------------|---------------|
|
||||
| Jan V1-7B | 4-8GB | Fast, efficient daily tasks | Coming Soon |
|
||||
| Jan V1-13B | 8-16GB | Balanced power and performance | Coming Soon |
|
||||
| Jan V1-70B | 40-64GB | Deep analysis, professional work | Coming Soon |
|
||||
| Jan V1-180B | 100GB+ | Frontier research, complex tasks | Planned 2026 |
|
||||
|
||||
### What to Expect
|
||||
- **Competitive Performance**: Aiming for results on par with leading closed-source models.
|
||||
- **Optimized for Local Use**: Efficient quantized versions for running on your own hardware.
|
||||
- **Privacy-Centric**: Trainable and runnable in your own environment, ensuring your data stays yours.
|
||||
- **Seamless Integration**: Designed to work perfectly within the Jan ecosystem, from Desktop to Server.
|
||||
|
||||
---
|
||||
|
||||
### Stay Updated
|
||||
|
||||
Be the first to know about our training progress, benchmark results, and when the first Jan V1 models are released.
|
||||
|
||||
[**Follow our development →**](https://jan.ai/v1-updates)
|
||||
@ -7,6 +7,10 @@ sidebar:
|
||||
|
||||
import { Aside, Card, CardGrid, Tabs, TabItem } from '@astrojs/starlight/components';
|
||||
|
||||
This is how Jan started and it has been available since day 1. Jan Desktop strives to be:
|
||||
|
||||
> Your personal AI workstation that helps with our use cases and powers other devices. Run models locally right away or bring an API key to connect to your favorite cloud-based models.
|
||||
|
||||
Jan Desktop is where it all starts. Download it, open it, start chatting. Your AI runs on your computer with zero setup required.
|
||||
|
||||
## Two Modes, Zero Complexity
|
||||
@ -33,21 +37,39 @@ First time opening Jan? It just works. No API keys, no model downloads, no setti
|
||||
</Card>
|
||||
|
||||
<Card title="Powers Other Devices" icon="devices">
|
||||
Your desktop becomes an AI server for your phone and other computers.
|
||||
Your desktop becomes an AI server for your phone and other computers via network connection.
|
||||
</Card>
|
||||
|
||||
<Card title="Developer Friendly" icon="code">
|
||||
Local API at `localhost:1337`. Works with any OpenAI-compatible tool.
|
||||
</Card>
|
||||
|
||||
<Card title="GPU Acceleration" icon="rocket">
|
||||
Automatically detects and uses NVIDIA GPUs for faster performance.
|
||||
</Card>
|
||||
|
||||
<Card title="Cross-Platform" icon="laptop">
|
||||
Windows, macOS, and Linux support with native performance.
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## System Requirements
|
||||
|
||||
| Component | Minimum | Recommended |
|
||||
|-----------|---------|-------------|
|
||||
| RAM | 8GB | 16GB+ |
|
||||
| Storage | 10GB | 20GB+ |
|
||||
| OS | Windows 10, macOS 12, Ubuntu 20.04 | Latest versions |
|
||||
### Minimum Requirements
|
||||
- **RAM:** 8GB (models use less than 80% of available memory)
|
||||
- **Storage:** 10GB+ free space
|
||||
- **OS:** Windows 10, macOS 12, Ubuntu 20.04 or newer
|
||||
|
||||
### Recommended
|
||||
- **RAM:** 16GB+ for larger models
|
||||
- **Storage:** 20GB+ for multiple models
|
||||
- **GPU:** NVIDIA GPU with 6GB+ VRAM for acceleration
|
||||
- **OS:** Latest versions for best performance
|
||||
|
||||
### GPU Acceleration (Optional)
|
||||
- **NVIDIA Driver:** 470.63.01 or higher
|
||||
- **CUDA Toolkit:** 11.7 or higher
|
||||
- **Linux:** Additional GCC-11 packages required
|
||||
|
||||
<Aside type="tip">
|
||||
Jan automatically detects your hardware and recommends the best model. No manual configuration needed.
|
||||
@ -73,6 +95,8 @@ Jan comes with a model that works immediately. No downloading, no waiting.
|
||||
- No telemetry by default
|
||||
- No account required
|
||||
- No data leaves your machine
|
||||
- Chat history stored locally only
|
||||
- Full control over your data
|
||||
|
||||
## Cloud Mode (Optional)
|
||||
|
||||
@ -94,17 +118,19 @@ Connect to external AI providers when you need more power:
|
||||
|
||||
## Desktop as Your AI Hub
|
||||
|
||||
Your desktop can power AI across all your devices:
|
||||
Your desktop can power AI across all your devices. Jan Desktop automatically becomes a local server that other devices can connect to:
|
||||
|
||||
```
|
||||
┌─────────────┐
|
||||
│ Jan Desktop │ ← Your AI hub
|
||||
└──────┬──────┘
|
||||
│
|
||||
┌───┴────┬─────────┬────────┐
|
||||
│ │ │ │
|
||||
Mobile Tablet Other PCs Apps
|
||||
```
|
||||
**Network Sharing:**
|
||||
- Mobile apps connect over WiFi
|
||||
- Other computers can access your models
|
||||
- API available at `localhost:1337`
|
||||
- No internet required for local network
|
||||
|
||||
**Supported Connections:**
|
||||
- Jan Mobile (coming Q4 2025)
|
||||
- Other Jan Desktop instances
|
||||
- Any OpenAI-compatible application
|
||||
- Custom scripts and tools
|
||||
|
||||
## For Developers
|
||||
|
||||
@ -145,22 +171,51 @@ Yes. In Local Mode, nothing leaves your computer.
|
||||
| Code generation | Good | Excellent |
|
||||
| Privacy | Complete | Depends on provider |
|
||||
|
||||
## Performance Optimization
|
||||
|
||||
### Automatic GPU Detection
|
||||
Jan automatically detects and configures:
|
||||
- NVIDIA graphics cards
|
||||
- Available VRAM
|
||||
- Optimal GPU layers (ngl) settings
|
||||
- Fallback to CPU if needed
|
||||
|
||||
### Memory Management
|
||||
- Smart model loading based on available RAM
|
||||
- Automatic offloading when memory is low
|
||||
- Background process cleanup
|
||||
- Efficient resource usage
|
||||
|
||||
## Tested Configurations
|
||||
|
||||
Jan Desktop has been verified to work on these systems:
|
||||
|
||||
**Windows Systems:**
|
||||
- Windows 11 Pro + RTX 4070Ti + CUDA 12.2 + Driver 531.18
|
||||
- Windows 10 + GTX 1660Ti + Various driver versions
|
||||
|
||||
**Linux Systems:**
|
||||
- Ubuntu 22.04 + RTX 4070Ti + CUDA 12.2 + Driver 545
|
||||
- Ubuntu 20.04 on Proxmox VM + GTX 1660Ti + CUDA 12.1
|
||||
|
||||
**macOS Systems:**
|
||||
- macOS Monterey and newer
|
||||
- Both Intel and Apple Silicon (M1/M2) processors
|
||||
- Automatic Metal acceleration on Apple Silicon
|
||||
|
||||
## Coming Soon
|
||||
|
||||
### Simplified Onboarding (v0.7.0)
|
||||
- Hardware detection and optimization
|
||||
- One-click setup
|
||||
- Automatic model selection
|
||||
### Enhanced User Experience
|
||||
- One-click model switching
|
||||
- Improved onboarding flow
|
||||
- Better hardware detection
|
||||
- Voice input support
|
||||
|
||||
### Enhanced Local Mode
|
||||
- Better default model (Jan Nano)
|
||||
- Faster responses
|
||||
- Lower memory usage
|
||||
|
||||
### Quick Access
|
||||
- System-wide hotkey
|
||||
- Floating window
|
||||
- Voice input
|
||||
### Advanced Features
|
||||
- Model fine-tuning interface
|
||||
- Custom model deployment
|
||||
- Team collaboration features
|
||||
- Enhanced mobile connectivity
|
||||
|
||||
<Aside type="caution">
|
||||
Having issues? Most problems are solved by restarting the app. If that doesn't work, check our [troubleshooting guide](/docs/troubleshooting).
|
||||
189
website/src/content/products/platforms/jan-ai.mdx
Normal file
189
website/src/content/products/platforms/jan-ai.mdx
Normal file
@ -0,0 +1,189 @@
|
||||
---
|
||||
title: jan.ai
|
||||
description: Cloud AI that respects your privacy
|
||||
sidebar:
|
||||
order: 1
|
||||
---
|
||||
|
||||
import { Aside, Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
**Status:** Beta Launch Soon
|
||||
|
||||
Web-based version of Jan with no setup required. Same default cloud mode for mobile and desktop users.
|
||||
|
||||
## What is Jan Web?
|
||||
|
||||
Jan Web is the cloud-hosted version of Jan that runs in your browser. No installation needed, instant access from any device, with the same AI experience you get locally.
|
||||
|
||||
<Aside type="note">
|
||||
Currently in beta development. Beta launch coming soon.
|
||||
</Aside>
|
||||
|
||||
## How It Works
|
||||
|
||||
<CardGrid>
|
||||
<Card title="For Desktop Users" icon="laptop">
|
||||
When you switch to Cloud Mode in Jan Desktop, it connects to Jan Web automatically. No configuration needed.
|
||||
</Card>
|
||||
|
||||
<Card title="For Mobile Users" icon="mobile">
|
||||
Jan Web serves as the default cloud backend for mobile apps when not connected to your desktop.
|
||||
</Card>
|
||||
|
||||
<Card title="For Web Users" icon="globe">
|
||||
Visit jan.ai directly in your browser for instant access to AI without downloading anything.
|
||||
</Card>
|
||||
|
||||
<Card title="Team Collaboration" icon="group">
|
||||
Share prompts, workflows, and collaborate on threads with your team members.
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Key Features
|
||||
|
||||
### No Installation Needed
|
||||
- No installation needed
|
||||
- Instant access from any browser
|
||||
- Automatic updates and maintenance
|
||||
- Works on any device with internet
|
||||
|
||||
### Team Collaboration Features
|
||||
- Share prompts across team members
|
||||
- Share workflows and templates
|
||||
- Collaborate on conversation threads
|
||||
- Real-time team coordination
|
||||
|
||||
### Cloud Backend for Mobile
|
||||
- Default cloud backend for mobile apps
|
||||
- Seamless sync across devices
|
||||
- Consistent experience everywhere
|
||||
- No setup required for mobile users
|
||||
|
||||
### Enterprise Ready
|
||||
- Team management features
|
||||
- Admin controls and oversight
|
||||
- Usage analytics and reporting
|
||||
- Integration with existing tools
|
||||
|
||||
## Pricing
|
||||
|
||||
| Tier | Features | Price |
|
||||
|------|----------|--------|
|
||||
| **Free** | Free for everyone | $0 |
|
||||
| **Pro** | Access our latest models<br/>Access other cloud providers without API keys | Coming Soon |
|
||||
| **Enterprise** | Self-host or we host it for you<br/>Active support and SLAs<br/>SSO integration<br/>Team features | Contact Sales |
|
||||
|
||||
<Aside type="tip">
|
||||
Free tier gives you access to Jan's core functionality. Pro and Enterprise tiers add advanced features and support.
|
||||
</Aside>
|
||||
|
||||
## Why Use Jan Web?
|
||||
|
||||
### When You Need Cloud Power
|
||||
- Access from any device with internet
|
||||
- More powerful models than local hardware
|
||||
- Faster responses for complex tasks
|
||||
- Always up-to-date with latest features
|
||||
|
||||
### Team Collaboration
|
||||
- Share knowledge and workflows
|
||||
- Collaborate on projects in real-time
|
||||
- Consistent experience across team
|
||||
- Admin controls for organizations
|
||||
|
||||
### Zero Setup
|
||||
- No downloads or installations
|
||||
- No API keys to manage
|
||||
- No complex configurations
|
||||
- Just open browser and start chatting
|
||||
|
||||
### Enterprise Features
|
||||
- SSO integration for security
|
||||
- Team management and controls
|
||||
- Usage analytics and reporting
|
||||
- Professional support and SLAs
|
||||
|
||||
## For Developers
|
||||
|
||||
### API Access
|
||||
```javascript
|
||||
// Same API as local Jan
|
||||
const response = await fetch('https://api.jan.ai/v1/chat/completions', {
|
||||
headers: { 'Authorization': 'Bearer YOUR_KEY' },
|
||||
body: JSON.stringify({
|
||||
model: 'jan-nano',
|
||||
messages: [{ role: 'user', content: 'Hello' }]
|
||||
})
|
||||
});
|
||||
```
|
||||
|
||||
### OpenAI Compatible
|
||||
Drop-in replacement for OpenAI API:
|
||||
```python
|
||||
# Just change the base URL
|
||||
client = OpenAI(
|
||||
base_url="https://api.jan.ai/v1",
|
||||
api_key="your-jan-key"
|
||||
)
|
||||
```
|
||||
|
||||
### Integration Options
|
||||
- REST API for custom applications
|
||||
- Webhook support for automation
|
||||
- Team API for organization management
|
||||
- Usage analytics API for monitoring
|
||||
|
||||
## Common Questions
|
||||
|
||||
### How is this different from ChatGPT?
|
||||
- Open source and transparent
|
||||
- Privacy-focused by design
|
||||
- Same experience as local Jan
|
||||
- Team collaboration features
|
||||
- Enterprise self-hosting options
|
||||
|
||||
### Can I use my own models?
|
||||
Not directly on Jan Web. For custom models, use Jan Desktop locally or self-host Jan Server.
|
||||
|
||||
### Is my data secure?
|
||||
Yes, Jan Web follows enterprise security practices:
|
||||
- Encrypted connections and storage
|
||||
- No training on user data
|
||||
- Optional anonymous usage
|
||||
- Regular security audits
|
||||
- GDPR compliance
|
||||
|
||||
### When will it launch?
|
||||
Beta launch is coming soon. Full public launch planned for 2025.
|
||||
|
||||
## Coming Soon
|
||||
|
||||
### Beta Launch Features
|
||||
- Web-based chat interface
|
||||
- Team collaboration tools
|
||||
- API access for developers
|
||||
- Jan Desktop/Mobile integration
|
||||
- Basic admin controls
|
||||
|
||||
### Full Launch Features
|
||||
- Advanced team management
|
||||
- SSO and enterprise authentication
|
||||
- Custom workflows and automation
|
||||
- Usage analytics and reporting
|
||||
- Advanced model access
|
||||
|
||||
<Aside type="note">
|
||||
Jan Web is designed to feel exactly like local Jan, just accessible anywhere. Same principles, same experience, cloud convenience.
|
||||
</Aside>
|
||||
|
||||
## The Philosophy
|
||||
|
||||
Cloud AI doesn't have to mean giving up control. Jan Web proves you can have:
|
||||
- Convenience without surveillance
|
||||
- Team collaboration without data mining
|
||||
- Enterprise features without vendor lock-in
|
||||
- Cloud power with privacy respect
|
||||
|
||||
---
|
||||
|
||||
[Join Beta Waitlist](https://jan.ai/beta) | [Contact Sales](https://jan.ai/enterprise) | [API Documentation](/docs/api)
|
||||
219
website/src/content/products/platforms/mobile.mdx
Normal file
219
website/src/content/products/platforms/mobile.mdx
Normal file
@ -0,0 +1,219 @@
|
||||
---
|
||||
title: Jan Mobile
|
||||
description: Your AI assistant on the go
|
||||
sidebar:
|
||||
order: 3
|
||||
---
|
||||
|
||||
import { Aside, Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
**Status:** Coming Q4 2025
|
||||
|
||||
Connect to Desktop/Server, run local mode with Jan Nano or Lucy, same experience everywhere.
|
||||
|
||||
Jan Mobile brings the same AI experience to your phone. Connect to your desktop, your server, or run models locally.
|
||||
|
||||
<Aside type="note">
|
||||
Coming Q4 2025. Sign up here to get a notification when it's available [Newsletter](https://jan.ai/mobile).
|
||||
</Aside>
|
||||
|
||||
## How It Works
|
||||
|
||||
Jan Mobile adapts to your situation:
|
||||
|
||||
At Home, you can connect to your Jan Desktop over WiFi
|
||||
|
||||

|
||||
|
||||
At Work, you can connect to your company Jan Server
|
||||
|
||||

|
||||
|
||||
On the Go, you can run Jan Nano on your phone or talk to your favourite cloud-based model
|
||||
|
||||

|
||||
|
||||
**No configuration needed. It just works.**
|
||||
|
||||
## Three Modes, One Experience
|
||||
|
||||
### Desktop Mode
|
||||
When you're near your computer at home, you an toggle phone uses its models and processing power.
|
||||
|
||||
```
|
||||
Your Phone → WiFi → Your Desktop → Response
|
||||
(automatic) (powerful models)
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- Access to larger models
|
||||
- Faster processing
|
||||
- Shared conversations
|
||||
- No phone battery drain
|
||||
|
||||
### Server Mode
|
||||
Connect to your organization's Jan Server for team collaboration.
|
||||
|
||||
```
|
||||
Your Phone → Internet → Company Server → Response
|
||||
(secure) (shared models)
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- Team knowledge base
|
||||
- Consistent models
|
||||
- Central management
|
||||
- Work anywhere
|
||||
|
||||
### Local Mode
|
||||
No connection? No problem. Jan Nano runs directly on your phone.
|
||||
|
||||
```
|
||||
Your Phone → Jan Nano (6GB) → Response
|
||||
(private & offline)
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- Complete privacy
|
||||
- Works offline
|
||||
- No data usage
|
||||
- Always available
|
||||
|
||||
<Aside type="tip">
|
||||
The app automatically switches between modes based on what's available. You don't need to do anything.
|
||||
</Aside>
|
||||
|
||||
## Key Features
|
||||
|
||||
<CardGrid>
|
||||
<Card title="Seamless Switching" icon="random">
|
||||
Move from home to office to airplane. One-click and Jan adapts immediately.
|
||||
</Card>
|
||||
|
||||
<Card title="Voice First" icon="phone">
|
||||
Talk to Jan naturally. Responses can be spoken too.
|
||||
</Card>
|
||||
|
||||
<Card title="Sync Everything" icon="up-arrow">
|
||||
Conversations, settings, and preferences follow you across devices.
|
||||
</Card>
|
||||
|
||||
<Card title="iOS and Android" icon="mobile">
|
||||
Native apps for both platforms with consistent experience.
|
||||
</Card>
|
||||
|
||||
<Card title="Three Adaptive Modes" icon="setting">
|
||||
Automatically switches between Desktop, Server, and Local modes.
|
||||
</Card>
|
||||
|
||||
<Card title="Jan Nano Integration" icon="rocket">
|
||||
Run AI models directly on your phone for complete privacy.
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Privacy & Security
|
||||
|
||||
### Your Data, Your Control
|
||||
- Local Mode: Everything stays on your phone
|
||||
- Desktop Mode: Direct encrypted connection
|
||||
- Server Mode: Your organization's policies apply
|
||||
|
||||
### No Compromises
|
||||
- Biometric app lock
|
||||
- Encrypted storage
|
||||
- No cloud backups without permission
|
||||
- Clear data anytime
|
||||
|
||||
## Why Mobile Matters
|
||||
|
||||
Your phone is with you always. Your AI assistant should be too. But that doesn't mean sacrificing
|
||||
privacy or control.
|
||||
|
||||
Jan Mobile proves you can have:
|
||||
- Powerful AI anywhere
|
||||
- Complete privacy when needed
|
||||
- Simple experience always
|
||||
- No subscriptions or tracking
|
||||
|
||||
## Technical Details
|
||||
|
||||
### App Architecture
|
||||
- Built with native performance in mind
|
||||
- Small app size (~50MB download)
|
||||
- Efficient battery usage
|
||||
- Background processing support
|
||||
|
||||
### Supported Platforms
|
||||
- **iOS:** iPhone and iPad support
|
||||
- **Android:** Phone and tablet support
|
||||
- **Voice Interface:** Natural speech input/output
|
||||
- **Seamless Device Switching:** Real-time sync across devices
|
||||
|
||||
### Jan Nano for Mobile
|
||||
- Optimized 6GB model for mobile devices
|
||||
- Runs entirely on-device
|
||||
- No internet required
|
||||
- Complete privacy and offline functionality
|
||||
|
||||
## Coming Features
|
||||
|
||||
### Launch (Q4 2025)
|
||||
- iOS and Android native apps
|
||||
- Three adaptive connection modes
|
||||
- Voice-first interface
|
||||
- Basic chat and conversation management
|
||||
- Automatic mode switching
|
||||
|
||||
### Post-Launch Updates
|
||||
- Advanced voice input with wake words
|
||||
- Background conversation sync
|
||||
- Widget support for quick access
|
||||
- Enhanced team collaboration features
|
||||
- Custom model support
|
||||
|
||||
<Aside type="note">
|
||||
Jan Mobile is built for native performance with efficient resource usage and small app size.
|
||||
</Aside>
|
||||
|
||||
## The Vision
|
||||
|
||||
Most mobile AI apps are just cloud wrappers. Jan Mobile is different:
|
||||
|
||||
| Feature | Other AI Apps | Jan Mobile |
|
||||
|---------|---------------|------------|
|
||||
| Offline Mode | ❌ | ✅ Jan Nano |
|
||||
| Desktop Connection | ❌ | ✅ Your models |
|
||||
| Privacy | Your data in cloud | Your data stays yours |
|
||||
| Cost | Monthly subscription | Free with your hardware |
|
||||
|
||||
## Development Status
|
||||
|
||||
### Current Progress
|
||||
- Core architecture completed
|
||||
- Desktop/Server connection protocols implemented
|
||||
- Jan Nano mobile optimization in progress
|
||||
- iOS and Android app development underway
|
||||
|
||||
### Beta Testing
|
||||
- Closed beta planned for Q3 2025
|
||||
- Open beta following successful internal testing
|
||||
- Community feedback integration throughout development
|
||||
|
||||
## While You Wait for Jan Mobile
|
||||
|
||||
1. **Set up Jan Desktop** - It will power your mobile experience when at home
|
||||
2. **Try Jan Web** - Get familiar with the cloud interface
|
||||
3. **Join our newsletter** - Be first to know when beta launches
|
||||
4. **Provide feedback** - Help shape the mobile experience through our Discord community
|
||||
5. **Prepare your setup** - Ensure Jan Desktop is configured for network access
|
||||
|
||||
## Get Notified
|
||||
|
||||
Want to be among the first to try Jan Mobile?
|
||||
- [Join our newsletter](https://jan.ai/mobile) for launch updates
|
||||
- [Follow us on Discord](https://discord.gg/jan) for development progress
|
||||
- [Check our GitHub](https://github.com/janhq/jan) for technical updates
|
||||
|
||||
---
|
||||
|
||||
[Jan Desktop](/docs/products/platforms/desktop) | [Jan Web](/docs/products/platforms/jan-ai) | [Jan Server](/docs/products/platforms/server)
|
||||
335
website/src/content/products/platforms/server.mdx
Normal file
335
website/src/content/products/platforms/server.mdx
Normal file
@ -0,0 +1,335 @@
|
||||
---
|
||||
title: Jan Server
|
||||
description: Your own private AI cloud
|
||||
sidebar:
|
||||
order: 4
|
||||
---
|
||||
|
||||
import { Aside, Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
**Status:** Coming Q2 2025
|
||||
|
||||
Self-hosted solution for teams and enterprises. Your own private AI cloud.
|
||||
|
||||
Jan Server is Jan Desktop with multi-user support. Deploy it on your hardware to create your own private AI cloud for your team or organization.
|
||||
|
||||
<Aside type="note">
|
||||
Coming Q2 2025. Join the early access list at [jan.ai/server](https://jan.ai/server).
|
||||
</Aside>
|
||||
|
||||
## What is Jan Server?
|
||||
|
||||
```
|
||||
Jan Server = Jan Desktop + Multi-user support + Real hardware
|
||||
```
|
||||
|
||||
It's the same engine that powers Jan Desktop, scaled up for teams. Your data stays on your servers, your models run on your GPUs, your AI remains yours.
|
||||
|
||||
## Why Organizations Need This
|
||||
|
||||
### The Problem
|
||||
Every API call to ChatGPT or Claude is:
|
||||
- Your intellectual property leaving your network
|
||||
- Potential training data for someone else's model
|
||||
- A compliance nightmare waiting to happen
|
||||
- A monthly bill that never ends
|
||||
|
||||
### The Solution
|
||||
Jan Server gives you:
|
||||
- **Complete control**: Your hardware, your rules
|
||||
- **Total privacy**: Nothing leaves your network
|
||||
- **Predictable costs**: One-time hardware investment
|
||||
- **Compliance ready**: GDPR, HIPAA, SOC2 friendly
|
||||
|
||||
## Key Features
|
||||
|
||||
<CardGrid>
|
||||
<Card title="Multi-User Support" icon="users">
|
||||
Support for 5-500+ concurrent users with individual accounts and permissions.
|
||||
</Card>
|
||||
|
||||
<Card title="Enterprise Authentication" icon="shield">
|
||||
SSO, LDAP integration and enterprise-grade security controls.
|
||||
</Card>
|
||||
|
||||
<Card title="Flexible Deployment" icon="setting">
|
||||
Docker, Kubernetes, or bare metal deployment options.
|
||||
</Card>
|
||||
|
||||
<Card title="Admin Dashboard" icon="laptop">
|
||||
Centralized management for users, models, and system monitoring.
|
||||
</Card>
|
||||
|
||||
<Card title="Team Knowledge Sharing" icon="group">
|
||||
Shared conversations, templates, and collaborative workflows.
|
||||
</Card>
|
||||
|
||||
<Card title="Same API" icon="code">
|
||||
Drop-in replacement for OpenAI API - no code changes needed.
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Deployment Options
|
||||
|
||||
<CardGrid>
|
||||
<Card title="Docker: Single Command Setup" icon="laptop">
|
||||
Perfect for getting started quickly with containerized deployment.
|
||||
</Card>
|
||||
|
||||
<Card title="Kubernetes: Enterprise Scale" icon="building">
|
||||
Auto-scaling, high availability, and enterprise orchestration.
|
||||
</Card>
|
||||
|
||||
<Card title="Bare Metal: Maximum Control" icon="setting">
|
||||
Direct hardware access for maximum performance and customization.
|
||||
</Card>
|
||||
</CardGrid>
|
||||
|
||||
## Simple Deployment
|
||||
|
||||
### Docker (Single Command Setup)
|
||||
```yaml
|
||||
version: '3'
|
||||
services:
|
||||
jan-server:
|
||||
image: jan.ai/server:latest
|
||||
ports:
|
||||
- "80:80"
|
||||
- "1337:1337"
|
||||
volumes:
|
||||
- ./models:/models
|
||||
- ./data:/data
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- capabilities: [gpu]
|
||||
```
|
||||
|
||||
### Kubernetes (Enterprise Scale)
|
||||
```yaml
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: jan-server
|
||||
spec:
|
||||
replicas: 3
|
||||
selector:
|
||||
matchLabels:
|
||||
app: jan-server
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: jan-server
|
||||
spec:
|
||||
containers:
|
||||
- name: jan-server
|
||||
image: jan.ai/server:latest
|
||||
resources:
|
||||
limits:
|
||||
nvidia.com/gpu: 1
|
||||
```
|
||||
|
||||
### Bare Metal (Maximum Control)
|
||||
Direct installation on your hardware for maximum performance and custom configurations.
|
||||
|
||||
## Detailed Features
|
||||
|
||||
### Enterprise Authentication
|
||||
- **SSO Integration**: SAML, OAuth, OpenID Connect
|
||||
- **LDAP/Active Directory**: Existing user directory integration
|
||||
- **Role-Based Access**: Granular permissions and model access
|
||||
- **API Key Management**: Individual and service account keys
|
||||
|
||||
### Multi-User Management
|
||||
- **User Accounts**: Individual profiles and preferences
|
||||
- **Usage Tracking**: Per-user analytics and quotas
|
||||
- **Team Collaboration**: Shared conversations and workflows
|
||||
- **Admin Dashboard**: Centralized user and system management
|
||||
|
||||
### Same API as Desktop
|
||||
```python
|
||||
# Your code doesn't change
|
||||
client = OpenAI(
|
||||
base_url="https://jan.company.internal/v1",
|
||||
api_key="user-specific-key"
|
||||
)
|
||||
```
|
||||
|
||||
### Team Knowledge Sharing
|
||||
- **Shared Conversations**: Collaborative chat threads
|
||||
- **Template Library**: Reusable prompts and workflows
|
||||
- **Knowledge Base**: Organizational AI knowledge
|
||||
- **Team Analytics**: Usage patterns and insights
|
||||
|
||||
<Aside type="tip">
|
||||
Jan Server uses the same models as Desktop. No special "enterprise" versions with inflated prices.
|
||||
</Aside>
|
||||
|
||||
## Scaling Guidelines
|
||||
|
||||
### Small Teams (5-10 users)
|
||||
- **Hardware**: Single RTX 6000 Ada (48GB VRAM)
|
||||
- **RAM**: 128GB system memory
|
||||
- **Models**: Up to 70B parameter models
|
||||
- **Concurrent Users**: 5-10 active users
|
||||
- **Estimated Cost**: ~$15,000 one-time hardware
|
||||
|
||||
### Department Scale (10-50 users)
|
||||
- **Hardware**: 2-4 GPU cluster nodes
|
||||
- **RAM**: 256GB per node
|
||||
- **Models**: Multiple concurrent model instances
|
||||
- **Concurrent Users**: 10-50 active users
|
||||
- **Estimated Cost**: ~$50,000-$100,000 one-time
|
||||
|
||||
### Enterprise Scale (50+ users)
|
||||
- **Hardware**: DGX cluster or custom configuration
|
||||
- **RAM**: Scalable as needed
|
||||
- **Models**: Full model library with redundancy
|
||||
- **Concurrent Users**: 50-500+ active users
|
||||
- **Estimated Cost**: Custom enterprise quote
|
||||
|
||||
## Real-World Use Cases
|
||||
|
||||
| Organization Type | Deployment | Benefits Achieved |
|
||||
|------------------|------------|-------------------|
|
||||
| Law Firm | 2x RTX 6000, 200 users | Client data never leaves network, GDPR compliance |
|
||||
| Hospital System | DGX node, 500 users | HIPAA compliant AI assistant, medical data privacy |
|
||||
| Tech Startup | 4x RTX 4090, 50 users | 90% cost reduction vs. OpenAI API calls |
|
||||
| Research University | Multi-node cluster | Unrestricted research, no usage limits |
|
||||
| Financial Services | Air-gapped deployment | Complete data isolation, regulatory compliance |
|
||||
|
||||
## Hardware Requirements
|
||||
|
||||
### Minimum Configuration
|
||||
- **GPU**: RTX 3090 or better (24GB VRAM minimum)
|
||||
- **CPU**: 16+ cores (Xeon, EPYC, or equivalent)
|
||||
- **RAM**: 64GB system memory minimum
|
||||
- **Storage**: 1TB NVMe SSD for models and data
|
||||
- **Network**: Gigabit Ethernet minimum
|
||||
|
||||
### Recommended Production Setup
|
||||
- **GPU**: RTX 6000 Ada (48GB) or A100 (80GB)
|
||||
- **CPU**: Dual socket Xeon/EPYC (32+ cores)
|
||||
- **RAM**: 128-256GB system memory
|
||||
- **Storage**: RAID NVMe array (2TB+ capacity)
|
||||
- **Network**: 10Gbps for multiple concurrent users
|
||||
|
||||
### Enterprise Scaling Guidelines
|
||||
- **Users per GPU**: ~5-10 concurrent active users
|
||||
- **70B Models**: Require 40-80GB VRAM depending on quantization
|
||||
- **CPU Fallback**: Smaller models can run on CPU for cost optimization
|
||||
- **High Availability**: Multi-node deployment with load balancing
|
||||
- **Backup Strategy**: Regular model and data backups recommended
|
||||
|
||||
## Why Self-Host?
|
||||
|
||||
### For IT Teams
|
||||
- No data leaves your network
|
||||
- Complete audit trails
|
||||
- Integrate with existing auth (LDAP/AD)
|
||||
- Predictable resource usage
|
||||
|
||||
### For Security Teams
|
||||
- Air-gapped deployment options
|
||||
- End-to-end encryption
|
||||
- No third-party access
|
||||
- Full compliance control
|
||||
|
||||
### For Finance Teams
|
||||
- One-time hardware cost
|
||||
- No per-token pricing
|
||||
- Predictable TCO
|
||||
- Use existing infrastructure
|
||||
|
||||
## Development Roadmap
|
||||
|
||||
### Q2 2025 Launch Features
|
||||
- Multi-user authentication and management
|
||||
- Web-based admin dashboard
|
||||
- OpenAI API compatibility
|
||||
- Docker and Kubernetes deployment
|
||||
- Basic usage monitoring and analytics
|
||||
- Team collaboration features
|
||||
|
||||
### Post-Launch Updates
|
||||
- Advanced governance and compliance tools
|
||||
- Model fine-tuning interface
|
||||
- Automated scaling and load balancing
|
||||
- Comprehensive backup and restore
|
||||
- Enhanced security and audit logging
|
||||
|
||||
### Future Vision
|
||||
- Federated multi-site deployments
|
||||
- Cross-region synchronization
|
||||
- Advanced usage analytics and insights
|
||||
- Custom model training and optimization
|
||||
- Integration with enterprise workflow tools
|
||||
|
||||
<Aside type="caution">
|
||||
Jan Server requires proper cooling and power for GPU hardware. Plan your infrastructure accordingly.
|
||||
</Aside>
|
||||
|
||||
## Migration Path
|
||||
|
||||
### From Cloud AI
|
||||
1. Deploy Jan Server
|
||||
2. Import your workflows
|
||||
3. Update API endpoints
|
||||
4. Migrate users gradually
|
||||
|
||||
### From Jan Desktop
|
||||
1. Same models work instantly
|
||||
2. Add user management
|
||||
3. Scale as needed
|
||||
|
||||
## The Philosophy
|
||||
|
||||
We believe organizations should own their AI infrastructure just like they own their data. Jan Server makes this possible without compromising on capabilities.
|
||||
|
||||
This isn't about avoiding the cloud - it's about having a choice. Run your AI where it makes sense for your organization.
|
||||
|
||||
## Support Options
|
||||
|
||||
### Community Edition
|
||||
- Full features
|
||||
- Community support
|
||||
- Perfect for small teams
|
||||
|
||||
### Enterprise Edition
|
||||
- Priority support
|
||||
- Custom deployment help
|
||||
- SLA guarantees
|
||||
- Training included
|
||||
|
||||
## Get Ready for Jan Server
|
||||
|
||||
While Jan Server is in development (Q2 2025 launch):
|
||||
|
||||
### Preparation Steps
|
||||
1. **Assess Hardware Needs**: Review our scaling guidelines above
|
||||
2. **Plan Network Architecture**: Consider security and access requirements
|
||||
3. **Evaluate Authentication**: Determine SSO/LDAP integration needs
|
||||
4. **Test with Jan Desktop**: Same models and API, perfect for preparation
|
||||
5. **Join Early Access Program**: Get notified when beta testing begins
|
||||
|
||||
### Early Access Benefits
|
||||
- Beta testing opportunities
|
||||
- Hardware optimization guidance
|
||||
- Deployment planning assistance
|
||||
- Priority technical support
|
||||
- Input on feature development
|
||||
|
||||
### Enterprise Consultation
|
||||
For large deployments or custom requirements:
|
||||
- Architecture planning sessions
|
||||
- Hardware recommendation consultations
|
||||
- Security and compliance reviews
|
||||
- Migration planning from existing AI services
|
||||
- Custom deployment assistance
|
||||
|
||||
---
|
||||
|
||||
**Ready to Plan Your Deployment?**
|
||||
|
||||
[Join Early Access](https://jan.ai/server) | [Enterprise Consultation](https://jan.ai/enterprise) | [Hardware Planning Guide](/docs/server/hardware) | [Jan Desktop Trial](/download)
|
||||
36
website/src/content/products/tools/browseruse.mdx
Normal file
36
website/src/content/products/tools/browseruse.mdx
Normal file
@ -0,0 +1,36 @@
|
||||
---
|
||||
title: BrowserUse
|
||||
description: Native browser automation for Jan, enabling AI to interact with the web on your behalf.
|
||||
sidebar:
|
||||
order: 3
|
||||
banner:
|
||||
content: 'In Development: This tool is planned and not yet available.'
|
||||
---
|
||||
import { Aside, Card, CardGrid } from '@astrojs-starlight/components';
|
||||
|
||||
## Let Jan Use Your Browser
|
||||
|
||||
`BrowserUse` is a native tool being developed for the Jan ecosystem that will allow the AI to securely control a browser to perform tasks, automate workflows, and interact with websites just like a human would.
|
||||
|
||||
Think of it as an integrated, privacy-first automation layer that turns Jan from a conversational AI into a powerful agent for action.
|
||||
|
||||
<Aside type="note">
|
||||
**This tool is not yet available.** We are designing `BrowserUse` to be a core component of Jan's agentic capabilities.
|
||||
</Aside>
|
||||
|
||||
### Inspired by the Best
|
||||
|
||||
While tools like [Browser Use](https://docs.browser-use.com/introduction) exist for developers, Jan's `BrowserUse` will be a built-in, user-friendly feature. No complex setup required—just grant permission and let Jan handle the rest.
|
||||
|
||||
### Built on MCP
|
||||
|
||||
The tool will be implemented as a native **Model Context Protocol (MCP)** server within Jan, ensuring secure and standardized communication between the AI model and the browser. This allows for powerful, auditable, and secure web interactions.
|
||||
|
||||
---
|
||||
|
||||
### Planned Core Features:
|
||||
- **Secure Sessions**: All browsing activity happens in an isolated, sandboxed environment.
|
||||
- **Natural Language Control**: Simply tell Jan what you want to do (e.g., "Book a flight," "Research this topic and summarize the top 5 findings," "Apply for this job for me").
|
||||
- **Visual Understanding**: Jan will be able to see and interpret the content on pages, not just the underlying code.
|
||||
- **User in the Loop**: Always ask for permission before taking critical actions like submitting forms or making purchases.
|
||||
|
||||
31
website/src/content/products/tools/deepresearch.mdx
Normal file
31
website/src/content/products/tools/deepresearch.mdx
Normal file
@ -0,0 +1,31 @@
|
||||
---
|
||||
title: DeepResearch
|
||||
description: Go beyond simple search with an AI agent that performs comprehensive, multi-step research for you.
|
||||
sidebar:
|
||||
order: 1
|
||||
banner:
|
||||
content: 'In Development: This tool is planned and not yet available.'
|
||||
---
|
||||
import { Aside, Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
## Your Personal Research Analyst
|
||||
|
||||
`DeepResearch` is a planned native tool for Jan that transforms it into a powerful research agent. Give it a complex question, and it will autonomously browse, analyze, and synthesize information from numerous sources to deliver a comprehensive, structured report.
|
||||
|
||||
Think of it as Jan's answer to the advanced research capabilities seen in [OpenAI's ChatGPT](https://openai.com/index/introducing-deep-research/) and [Google's Gemini](https://gemini.google/overview/deep-research/), but built with privacy and user control at its core.
|
||||
|
||||
<Aside type="note">
|
||||
**This tool is not yet available.** We are building `DeepResearch` to handle complex queries that would normally take a human hours of work.
|
||||
</Aside>
|
||||
|
||||
### How It Will Work
|
||||
|
||||
Unlike a simple web search that returns a list of links, `DeepResearch` will understand your goal, create a research plan, execute it, and deliver a final, synthesized document with citations.
|
||||
|
||||
### Planned Core Features:
|
||||
- **Autonomous Multi-Step Research**: Deconstructs complex questions into logical steps and executes them.
|
||||
- **Comprehensive Source Analysis**: Can read and synthesize information from web pages, PDFs, and other documents.
|
||||
- **Structured Report Generation**: Delivers well-organized reports, not just chat messages.
|
||||
- **Full Transparency**: Cites all sources and shows its work, so you can verify the information.
|
||||
- **Local-First Privacy**: Conduct research using local models to ensure your queries and findings remain private.
|
||||
|
||||
34
website/src/content/products/tools/search.mdx
Normal file
34
website/src/content/products/tools/search.mdx
Normal file
@ -0,0 +1,34 @@
|
||||
---
|
||||
title: Search
|
||||
description: A native search tool that gives you answers, not just links, with complete privacy.
|
||||
sidebar:
|
||||
order: 2
|
||||
banner:
|
||||
content: 'In Development: This tool is planned and not yet available.'
|
||||
---
|
||||
import { Aside, Card, CardGrid } from '@astrojs/starlight/components';
|
||||
|
||||
## Answers, Not Just Links
|
||||
|
||||
`Search` is a planned native tool for Jan that rethinks web search. Instead of just giving you a list of links to sift through, it understands your question, scours the web, and provides a direct, synthesized answer with sources cited.
|
||||
|
||||
Think of it as a private, self-hosted alternative to services like Perplexity.ai, integrated directly into your AI assistant.
|
||||
|
||||
<Aside type="note">
|
||||
**This tool is not yet available.** We are building `Search` to be the default way Jan accesses real-time information from the web.
|
||||
</Aside>
|
||||
|
||||
### How It's Different
|
||||
- **Privacy-First**: Your search queries are processed locally and anonymized. What you search for is your business.
|
||||
- **Direct Answers**: Get a concise, accurate answer compiled from the best sources, not just ten blue links.
|
||||
- **Cited Sources**: Every piece of information is backed by a verifiable source, so you can trust the answer or dig deeper.
|
||||
- **Conversational Follow-up**: Ask follow-up questions in a natural way, and Jan will maintain context.
|
||||
|
||||
### Planned Core Features:
|
||||
- **Real-Time Information**: Access up-to-the-minute news, data, and events.
|
||||
- **Source Verification**: Prioritizes authoritative and reliable sources.
|
||||
- **Customizable Focus**: Tailor your search to specific domains like academic papers, news, or code repositories.
|
||||
- **Seamless Integration**: Works hand-in-hand with other tools like `DeepResearch` and `BrowserUse` for more complex tasks.
|
||||
|
||||
|
||||
---
|
||||
@ -1,13 +1,14 @@
|
||||
---
|
||||
// Custom Products page with StarlightPage and custom sidebar
|
||||
// Products index page with custom sidebar for root deployment
|
||||
import StarlightPage from '@astrojs/starlight/components/StarlightPage.astro';
|
||||
import ProductsContent from '../content/docs/products/index.mdx';
|
||||
import { Content } from '../../content/products/index.mdx';
|
||||
---
|
||||
|
||||
<StarlightPage
|
||||
frontmatter={{
|
||||
frontmatter={{
|
||||
title: 'Products',
|
||||
description: 'AI that runs where you need it, how you need it'
|
||||
description: 'AI that runs where you need it, how you need it',
|
||||
tableOfContents: false
|
||||
}}
|
||||
sidebar={[
|
||||
{
|
||||
@ -15,14 +16,7 @@ import ProductsContent from '../content/docs/products/index.mdx';
|
||||
items: [
|
||||
{ label: 'Overview', link: '/products/' },
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Platforms',
|
||||
label: 'Platforms',
|
||||
items: [
|
||||
{ label: 'Desktop', link: '/products/platforms/desktop/' },
|
||||
{ label: 'Jan.ai', link: '/products/platforms/jan-ai/' },
|
||||
@ -30,6 +24,13 @@ import ProductsContent from '../content/docs/products/index.mdx';
|
||||
{ label: 'Server', link: '/products/platforms/server/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Tools',
|
||||
items: [
|
||||
@ -42,5 +43,5 @@ import ProductsContent from '../content/docs/products/index.mdx';
|
||||
}
|
||||
]}
|
||||
>
|
||||
<ProductsContent />
|
||||
</StarlightPage>
|
||||
<Content />
|
||||
</StarlightPage>
|
||||
@ -1,12 +1,13 @@
|
||||
---
|
||||
import StarlightPage from '@astrojs/starlight/components/StarlightPage.astro';
|
||||
import Content from '../../../content/docs/products/models/jan-nano.mdx';
|
||||
import { Content } from '../../../content/products/models/jan-nano.mdx';
|
||||
---
|
||||
|
||||
<StarlightPage
|
||||
frontmatter={{
|
||||
frontmatter={{
|
||||
title: 'Jan Nano',
|
||||
description: 'Lightweight AI model for mobile and edge devices'
|
||||
description: 'Lightweight AI model for mobile and edge devices',
|
||||
tableOfContents: false
|
||||
}}
|
||||
sidebar={[
|
||||
{
|
||||
@ -14,14 +15,7 @@ import Content from '../../../content/docs/products/models/jan-nano.mdx';
|
||||
items: [
|
||||
{ label: 'Overview', link: '/products/' },
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Platforms',
|
||||
label: 'Platforms',
|
||||
items: [
|
||||
{ label: 'Desktop', link: '/products/platforms/desktop/' },
|
||||
{ label: 'Jan.ai', link: '/products/platforms/jan-ai/' },
|
||||
@ -29,6 +23,13 @@ import Content from '../../../content/docs/products/models/jan-nano.mdx';
|
||||
{ label: 'Server', link: '/products/platforms/server/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Tools',
|
||||
items: [
|
||||
@ -42,4 +43,4 @@ import Content from '../../../content/docs/products/models/jan-nano.mdx';
|
||||
]}
|
||||
>
|
||||
<Content />
|
||||
</StarlightPage>
|
||||
</StarlightPage>
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
---
|
||||
import StarlightPage from '@astrojs/starlight/components/StarlightPage.astro';
|
||||
import Content from '../../../content/docs/products/models/jan-v1.mdx';
|
||||
import { Content } from '../../../content/products/models/jan-v1.mdx';
|
||||
---
|
||||
|
||||
<StarlightPage
|
||||
frontmatter={{
|
||||
frontmatter={{
|
||||
title: 'Jan v1',
|
||||
description: 'Full-featured AI model for desktop and server deployment'
|
||||
description: 'Full-featured AI model for desktop and server deployment',
|
||||
tableOfContents: false
|
||||
}}
|
||||
sidebar={[
|
||||
{
|
||||
@ -14,14 +15,7 @@ import Content from '../../../content/docs/products/models/jan-v1.mdx';
|
||||
items: [
|
||||
{ label: 'Overview', link: '/products/' },
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Platforms',
|
||||
label: 'Platforms',
|
||||
items: [
|
||||
{ label: 'Desktop', link: '/products/platforms/desktop/' },
|
||||
{ label: 'Jan.ai', link: '/products/platforms/jan-ai/' },
|
||||
@ -29,6 +23,13 @@ import Content from '../../../content/docs/products/models/jan-v1.mdx';
|
||||
{ label: 'Server', link: '/products/platforms/server/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Tools',
|
||||
items: [
|
||||
@ -42,4 +43,4 @@ import Content from '../../../content/docs/products/models/jan-v1.mdx';
|
||||
]}
|
||||
>
|
||||
<Content />
|
||||
</StarlightPage>
|
||||
</StarlightPage>
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
---
|
||||
import StarlightPage from '@astrojs/starlight/components/StarlightPage.astro';
|
||||
import Content from '../../../content/docs/products/platforms/desktop.mdx';
|
||||
import { Content } from '../../../content/products/platforms/desktop.mdx';
|
||||
---
|
||||
|
||||
<StarlightPage
|
||||
frontmatter={{
|
||||
frontmatter={{
|
||||
title: 'Desktop',
|
||||
description: 'Jan Desktop application for Windows, macOS, and Linux'
|
||||
description: 'Jan Desktop application for Windows, macOS, and Linux',
|
||||
tableOfContents: false
|
||||
}}
|
||||
sidebar={[
|
||||
{
|
||||
@ -14,14 +15,7 @@ import Content from '../../../content/docs/products/platforms/desktop.mdx';
|
||||
items: [
|
||||
{ label: 'Overview', link: '/products/' },
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Platforms',
|
||||
label: 'Platforms',
|
||||
items: [
|
||||
{ label: 'Desktop', link: '/products/platforms/desktop/' },
|
||||
{ label: 'Jan.ai', link: '/products/platforms/jan-ai/' },
|
||||
@ -29,6 +23,13 @@ import Content from '../../../content/docs/products/platforms/desktop.mdx';
|
||||
{ label: 'Server', link: '/products/platforms/server/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Tools',
|
||||
items: [
|
||||
@ -42,4 +43,4 @@ import Content from '../../../content/docs/products/platforms/desktop.mdx';
|
||||
]}
|
||||
>
|
||||
<Content />
|
||||
</StarlightPage>
|
||||
</StarlightPage>
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
---
|
||||
import StarlightPage from '@astrojs/starlight/components/StarlightPage.astro';
|
||||
import Content from '../../../content/docs/products/platforms/jan-ai.mdx';
|
||||
import { Content } from '../../../content/products/platforms/jan-ai.mdx';
|
||||
---
|
||||
|
||||
<StarlightPage
|
||||
frontmatter={{
|
||||
frontmatter={{
|
||||
title: 'Jan.ai',
|
||||
description: 'Cloud-based Jan platform accessible from anywhere'
|
||||
description: 'Cloud-based Jan platform accessible from anywhere',
|
||||
tableOfContents: false
|
||||
}}
|
||||
sidebar={[
|
||||
{
|
||||
@ -14,14 +15,7 @@ import Content from '../../../content/docs/products/platforms/jan-ai.mdx';
|
||||
items: [
|
||||
{ label: 'Overview', link: '/products/' },
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Platforms',
|
||||
label: 'Platforms',
|
||||
items: [
|
||||
{ label: 'Desktop', link: '/products/platforms/desktop/' },
|
||||
{ label: 'Jan.ai', link: '/products/platforms/jan-ai/' },
|
||||
@ -29,6 +23,13 @@ import Content from '../../../content/docs/products/platforms/jan-ai.mdx';
|
||||
{ label: 'Server', link: '/products/platforms/server/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Tools',
|
||||
items: [
|
||||
@ -42,4 +43,4 @@ import Content from '../../../content/docs/products/platforms/jan-ai.mdx';
|
||||
]}
|
||||
>
|
||||
<Content />
|
||||
</StarlightPage>
|
||||
</StarlightPage>
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
---
|
||||
import StarlightPage from '@astrojs/starlight/components/StarlightPage.astro';
|
||||
import Content from '../../../content/docs/products/platforms/mobile.mdx';
|
||||
import { Content } from '../../../content/products/platforms/mobile.mdx';
|
||||
---
|
||||
|
||||
<StarlightPage
|
||||
frontmatter={{
|
||||
frontmatter={{
|
||||
title: 'Mobile',
|
||||
description: 'Jan mobile app for iOS and Android devices'
|
||||
description: 'Jan mobile app for iOS and Android devices',
|
||||
tableOfContents: false
|
||||
}}
|
||||
sidebar={[
|
||||
{
|
||||
@ -14,14 +15,7 @@ import Content from '../../../content/docs/products/platforms/mobile.mdx';
|
||||
items: [
|
||||
{ label: 'Overview', link: '/products/' },
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Platforms',
|
||||
label: 'Platforms',
|
||||
items: [
|
||||
{ label: 'Desktop', link: '/products/platforms/desktop/' },
|
||||
{ label: 'Jan.ai', link: '/products/platforms/jan-ai/' },
|
||||
@ -29,6 +23,13 @@ import Content from '../../../content/docs/products/platforms/mobile.mdx';
|
||||
{ label: 'Server', link: '/products/platforms/server/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Tools',
|
||||
items: [
|
||||
@ -42,4 +43,4 @@ import Content from '../../../content/docs/products/platforms/mobile.mdx';
|
||||
]}
|
||||
>
|
||||
<Content />
|
||||
</StarlightPage>
|
||||
</StarlightPage>
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
---
|
||||
import StarlightPage from '@astrojs/starlight/components/StarlightPage.astro';
|
||||
import Content from '../../../content/docs/products/platforms/server.mdx';
|
||||
import { Content } from '../../../content/products/platforms/server.mdx';
|
||||
---
|
||||
|
||||
<StarlightPage
|
||||
frontmatter={{
|
||||
frontmatter={{
|
||||
title: 'Server',
|
||||
description: 'Jan server deployment for enterprise and team environments'
|
||||
description: 'Jan server deployment for enterprise and team environments',
|
||||
tableOfContents: false
|
||||
}}
|
||||
sidebar={[
|
||||
{
|
||||
@ -14,14 +15,7 @@ import Content from '../../../content/docs/products/platforms/server.mdx';
|
||||
items: [
|
||||
{ label: 'Overview', link: '/products/' },
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Platforms',
|
||||
label: 'Platforms',
|
||||
items: [
|
||||
{ label: 'Desktop', link: '/products/platforms/desktop/' },
|
||||
{ label: 'Jan.ai', link: '/products/platforms/jan-ai/' },
|
||||
@ -29,6 +23,13 @@ import Content from '../../../content/docs/products/platforms/server.mdx';
|
||||
{ label: 'Server', link: '/products/platforms/server/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Models',
|
||||
items: [
|
||||
{ label: 'Jan Nano', link: '/products/models/jan-nano/' },
|
||||
{ label: 'Jan v1', link: '/products/models/jan-v1/' },
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Tools',
|
||||
items: [
|
||||
@ -42,4 +43,4 @@ import Content from '../../../content/docs/products/platforms/server.mdx';
|
||||
]}
|
||||
>
|
||||
<Content />
|
||||
</StarlightPage>
|
||||
</StarlightPage>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user