Merge branch 'dev' into blog/add-deepresearch-piece
23
.github/ISSUE_TEMPLATE/3-epic.md
vendored
@ -1,12 +1,27 @@
|
||||
---
|
||||
name: 🌟 Epic
|
||||
about: Major building block that advances Jan's goals
|
||||
about: User stories and specs
|
||||
title: 'epic: '
|
||||
type: Epic
|
||||
---
|
||||
|
||||
## Goal
|
||||
## User Stories
|
||||
|
||||
## Tasklist
|
||||
- As a [user type], I can [do something] so that [outcome]
|
||||
|
||||
## Out of scope
|
||||
## Not in scope
|
||||
|
||||
-
|
||||
|
||||
## User Flows & Designs
|
||||
|
||||
- Key user flows
|
||||
- Figma link
|
||||
- Edge cases
|
||||
- Error states
|
||||
|
||||
## Engineering Decisions
|
||||
|
||||
- **Technical Approach:** Brief outline of the solution.
|
||||
- **Key Trade-offs:** What’s been considered/rejected and why.
|
||||
- **Dependencies:** APIs, services, libraries, teams.
|
||||
|
||||
31
.github/ISSUE_TEMPLATE/4-goal.md
vendored
@ -1,13 +1,38 @@
|
||||
---
|
||||
name: 🎯 Goal
|
||||
about: External communication of Jan's roadmap and objectives
|
||||
about: Roadmap goals for our users
|
||||
title: 'goal: '
|
||||
type: Goal
|
||||
---
|
||||
|
||||
## Goal
|
||||
|
||||
## Tasklist
|
||||
> Why are we doing this? 1 liner value proposition
|
||||
|
||||
## Out of scope
|
||||
_e.g. Make onboarding to Jan 3x easier_
|
||||
|
||||
## Success Criteria
|
||||
|
||||
> When do we consider this done? Limit to 3.
|
||||
|
||||
1. _e.g. Redesign onboarding flow to remove redundant steps._
|
||||
2. _e.g. Add a “getting started” guide_
|
||||
3. _e.g. Make local model setup more “normie” friendly_
|
||||
|
||||
## Non Goals
|
||||
|
||||
> What is out of scope?
|
||||
|
||||
- _e.g. Take advanced users through customizing settings_
|
||||
|
||||
## User research (if any)
|
||||
|
||||
> Links to user messages and interviews
|
||||
|
||||
## Design inspo
|
||||
|
||||
> Links
|
||||
|
||||
## Open questions
|
||||
|
||||
> What are we not sure about?
|
||||
|
||||
2
.github/workflows/jan-docs.yml
vendored
@ -76,7 +76,7 @@ jobs:
|
||||
Preview URL: ${{ steps.deployCloudflarePages.outputs.url }}
|
||||
|
||||
- name: Publish to Cloudflare Pages Production
|
||||
if: (github.event_name == 'push' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/dev')
|
||||
if: (github.event_name == 'push' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && startsWith(github.ref, 'refs/heads/release/'))
|
||||
uses: cloudflare/pages-action@v1
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
|
||||
2
.github/workflows/jan-linter-and-test.yml
vendored
@ -134,7 +134,7 @@ jobs:
|
||||
|
||||
test-on-windows-pr:
|
||||
if: github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ${{ (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) && 'windows-latest' || 'WINDOWS-11' }}
|
||||
runs-on: 'windows-latest'
|
||||
steps:
|
||||
- name: Getting the repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
20
.github/workflows/jan-tauri-build-flatpak.yaml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
name: Tauri Builder Flatpak
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version to build. For example: 0.6.8'
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
|
||||
build-linux-x64:
|
||||
uses: ./.github/workflows/template-tauri-build-linux-x64-flatpak.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
ref: ${{ github.ref }}
|
||||
public_provider: none
|
||||
channel: stable
|
||||
new_version: ${{ inputs.version }}
|
||||
disable_updater: true
|
||||
22
.github/workflows/jan-tauri-build-nightly.yaml
vendored
@ -12,10 +12,31 @@ on:
|
||||
- none
|
||||
- aws-s3
|
||||
default: none
|
||||
disable_updater:
|
||||
type: boolean
|
||||
description: 'If true, builds both .deb and .appimage but disables auto-updater'
|
||||
default: false
|
||||
pull_request:
|
||||
branches:
|
||||
- release/**
|
||||
- dev
|
||||
paths:
|
||||
- '.github/workflows/jan-tauri-build-nightly.yaml'
|
||||
- '.github/workflows/template-get-update-version.yml'
|
||||
- '.github/workflows/template-tauri-build-macos.yml'
|
||||
- '.github/workflows/template-tauri-build-windows-x64.yml'
|
||||
- '.github/workflows/template-tauri-build-linux-x64.yml'
|
||||
- '.github/workflows/template-noti-discord-and-update-url-readme.yml'
|
||||
- 'src-tauri/**'
|
||||
- 'core/**'
|
||||
- 'web-app/**'
|
||||
- 'extensions/**'
|
||||
- 'scripts/**'
|
||||
- 'pre-install/**'
|
||||
- 'Makefile'
|
||||
- 'package.json'
|
||||
- 'mise.toml'
|
||||
|
||||
|
||||
jobs:
|
||||
set-public-provider:
|
||||
@ -85,6 +106,7 @@ jobs:
|
||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||
channel: nightly
|
||||
cortex_api_port: '39261'
|
||||
disable_updater: ${{ github.event.inputs.disable_updater == 'true' }}
|
||||
|
||||
sync-temp-to-latest:
|
||||
needs:
|
||||
|
||||
25
.github/workflows/jan-tauri-build.yaml
vendored
@ -32,6 +32,7 @@ jobs:
|
||||
name: "${{ env.VERSION }}"
|
||||
draft: true
|
||||
prerelease: false
|
||||
generate_release_notes: true
|
||||
|
||||
build-macos:
|
||||
uses: ./.github/workflows/template-tauri-build-macos.yml
|
||||
@ -119,27 +120,3 @@ jobs:
|
||||
asset_path: ./latest.json
|
||||
asset_name: latest.json
|
||||
asset_content_type: text/json
|
||||
|
||||
update_release_draft:
|
||||
needs: [build-macos, build-windows-x64, build-linux-x64]
|
||||
permissions:
|
||||
# write permission is required to create a github release
|
||||
contents: write
|
||||
# write permission is required for autolabeler
|
||||
# otherwise, read permission is required at least
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# (Optional) GitHub Enterprise requires GHE_HOST variable set
|
||||
#- name: Set GHE_HOST
|
||||
# run: |
|
||||
# echo "GHE_HOST=${GITHUB_SERVER_URL##https:\/\/}" >> $GITHUB_ENV
|
||||
|
||||
# Drafts your next Release notes as Pull Requests are merged into "master"
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
|
||||
# with:
|
||||
# config-name: my-config.yml
|
||||
# disable-autolabeler: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
164
.github/workflows/template-tauri-build-linux-x64-flatpak.yml
vendored
Normal file
@ -0,0 +1,164 @@
|
||||
name: tauri-build-linux-x64-flatpak
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ref:
|
||||
required: true
|
||||
type: string
|
||||
default: 'refs/heads/main'
|
||||
public_provider:
|
||||
required: true
|
||||
type: string
|
||||
default: none
|
||||
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
||||
new_version:
|
||||
required: true
|
||||
type: string
|
||||
default: ''
|
||||
cortex_api_port:
|
||||
required: false
|
||||
type: string
|
||||
default: ''
|
||||
upload_url:
|
||||
required: false
|
||||
type: string
|
||||
default: ''
|
||||
channel:
|
||||
required: true
|
||||
type: string
|
||||
default: 'nightly'
|
||||
description: 'The channel to use for this job'
|
||||
disable_updater:
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
description: 'If true, builds both .deb and .appimage but disables auto-updater'
|
||||
secrets:
|
||||
DELTA_AWS_S3_BUCKET_NAME:
|
||||
required: false
|
||||
DELTA_AWS_ACCESS_KEY_ID:
|
||||
required: false
|
||||
DELTA_AWS_SECRET_ACCESS_KEY:
|
||||
required: false
|
||||
TAURI_SIGNING_PRIVATE_KEY:
|
||||
required: false
|
||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
|
||||
required: false
|
||||
jobs:
|
||||
build-linux-x64:
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Getting the repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
|
||||
- name: Free Disk Space Before Build
|
||||
run: |
|
||||
echo "Disk space before cleanup:"
|
||||
df -h
|
||||
sudo rm -rf /usr/local/.ghcup
|
||||
sudo rm -rf /opt/hostedtoolcache/CodeQL
|
||||
sudo rm -rf /usr/local/lib/android/sdk/ndk
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
sudo apt-get clean
|
||||
echo "Disk space after cleanup:"
|
||||
df -h
|
||||
|
||||
- name: Installing node
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install jq
|
||||
uses: dcarbone/install-jq-action@v2.0.1
|
||||
|
||||
- name: Install ctoml
|
||||
run: |
|
||||
cargo install ctoml
|
||||
|
||||
- name: Install Tauri dependencies
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2
|
||||
|
||||
- name: Update app version base public_provider
|
||||
run: |
|
||||
echo "Version: ${{ inputs.new_version }}"
|
||||
# Update tauri.conf.json
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
|
||||
"usr/lib/Jan-${{ inputs.channel }}/resources/lib/libvulkan.so": "resources/lib/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
|
||||
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
|
||||
fi
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||
mv /tmp/package.json web-app/package.json
|
||||
|
||||
# Temporarily enable devtool on prod build
|
||||
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||
cat ./src-tauri/Cargo.toml
|
||||
|
||||
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||
cat ./src-tauri/Cargo.toml
|
||||
|
||||
# Change app name for beta and nightly builds
|
||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||
|
||||
chmod +x .github/scripts/rename-tauri-app.sh
|
||||
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||
|
||||
cat ./src-tauri/tauri.conf.json
|
||||
|
||||
# Update Cargo.toml
|
||||
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||
echo "------------------"
|
||||
cat ./src-tauri/Cargo.toml
|
||||
|
||||
chmod +x .github/scripts/rename-workspace.sh
|
||||
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||
cat ./package.json
|
||||
fi
|
||||
- name: Build app
|
||||
run: |
|
||||
make build
|
||||
|
||||
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep AppImage | head -1)
|
||||
yarn tauri signer sign \
|
||||
--private-key "$TAURI_SIGNING_PRIVATE_KEY" \
|
||||
--password "$TAURI_SIGNING_PRIVATE_KEY_PASSWORD" \
|
||||
"$APP_IMAGE"
|
||||
|
||||
env:
|
||||
RELEASE_CHANNEL: '${{ inputs.channel }}'
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
|
||||
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
||||
AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }}
|
||||
# Publish app
|
||||
|
||||
## Artifacts, for dev and test
|
||||
- name: Upload Artifact
|
||||
if: inputs.public_provider != 'github'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jan-linux-amd64-flatpak-${{ inputs.new_version }}-deb
|
||||
path: ./src-tauri/target/release/bundle/deb/*.deb
|
||||
|
||||
- name: Upload Artifact
|
||||
if: inputs.public_provider != 'github'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jan-linux-amd64-flatpak-${{ inputs.new_version }}-AppImage
|
||||
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
|
||||
|
||||
@ -28,6 +28,11 @@ on:
|
||||
type: string
|
||||
default: 'nightly'
|
||||
description: 'The channel to use for this job'
|
||||
disable_updater:
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
description: 'If true, builds both .deb and .appimage but disables auto-updater'
|
||||
secrets:
|
||||
DELTA_AWS_S3_BUCKET_NAME:
|
||||
required: false
|
||||
@ -156,7 +161,7 @@ jobs:
|
||||
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
||||
|
||||
AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }}
|
||||
# Publish app
|
||||
|
||||
## Artifacts, for dev and test
|
||||
|
||||
34
.gitignore
vendored
@ -1,46 +1,22 @@
|
||||
.idea
|
||||
.env
|
||||
.idea
|
||||
|
||||
# Jan inference
|
||||
error.log
|
||||
node_modules
|
||||
*.tgz
|
||||
!charts/server/charts/*.tgz
|
||||
dist
|
||||
build
|
||||
.DS_Store
|
||||
electron/renderer
|
||||
electron/models
|
||||
electron/docs
|
||||
electron/engines
|
||||
electron/themes
|
||||
electron/playwright-report
|
||||
server/pre-install
|
||||
package-lock.json
|
||||
coverage
|
||||
*.log
|
||||
core/lib/**
|
||||
|
||||
# Turborepo
|
||||
.turbo
|
||||
electron/test-data
|
||||
electron/test-results
|
||||
core/test_results.html
|
||||
coverage
|
||||
.yarn
|
||||
.yarnrc
|
||||
test_results.html
|
||||
*.tsbuildinfo
|
||||
electron/shared/**
|
||||
test_results.html
|
||||
|
||||
# docs
|
||||
docs/yarn.lock
|
||||
electron/.version.bak
|
||||
src-tauri/binaries/engines/cortex.llamacpp
|
||||
src-tauri/resources/themes
|
||||
src-tauri/resources/lib
|
||||
src-tauri/Cargo.lock
|
||||
src-tauri/icons
|
||||
!src-tauri/icons/icon.png
|
||||
src-tauri/gen/apple
|
||||
@ -75,5 +51,9 @@ docs/.next/
|
||||
**/yarn-error.log*
|
||||
**/pnpm-debug.log*
|
||||
|
||||
# Combined output for local testing
|
||||
combined-output/
|
||||
## cargo
|
||||
target
|
||||
Cargo.lock
|
||||
|
||||
## test
|
||||
test-data
|
||||
|
||||
4
Makefile
@ -26,6 +26,7 @@ else ifeq ($(shell uname -s),Linux)
|
||||
chmod +x src-tauri/build-utils/*
|
||||
endif
|
||||
yarn install
|
||||
yarn build:tauri:plugin:api
|
||||
yarn build:core
|
||||
yarn build:extensions
|
||||
|
||||
@ -43,6 +44,9 @@ test: lint
|
||||
yarn download:bin
|
||||
yarn download:lib
|
||||
yarn test
|
||||
yarn copy:assets:tauri
|
||||
yarn build:icon
|
||||
cargo test --manifest-path src-tauri/Cargo.toml --no-default-features --features test-tauri -- --test-threads=1
|
||||
|
||||
# Builds and publishes the app
|
||||
build-and-publish: install-and-build
|
||||
|
||||
@ -120,6 +120,7 @@ mise dev # runs the full development setup
|
||||
|
||||
```bash
|
||||
yarn install
|
||||
yarn build:tauri:plugin:api
|
||||
yarn build:core
|
||||
yarn build:extensions
|
||||
yarn dev
|
||||
|
||||
@ -1,9 +0,0 @@
|
||||
[Desktop Entry]
|
||||
Name=Jan
|
||||
Comment=Local AI Assistant that runs 100% offline
|
||||
Exec=run.sh
|
||||
Icon=ai.menlo.jan
|
||||
Type=Application
|
||||
Categories=Development;
|
||||
Keywords=AI;Assistant;LLM;ChatGPT;Local;Offline;
|
||||
StartupNotify=true
|
||||
@ -1,42 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<component type="desktop-application">
|
||||
<id>ai.menlo.jan</id>
|
||||
<metadata_license>FSFAP</metadata_license>
|
||||
<project_license>AGPL-3.0-only</project_license>
|
||||
<name>Jan</name>
|
||||
<summary>Local AI Assistant that runs 100% offline on your device</summary>
|
||||
|
||||
<description>
|
||||
<p>
|
||||
Jan is a ChatGPT-alternative that runs 100% offline on your device. Our goal is to make it easy for anyone to download and run LLMs and use AI with full control and privacy.
|
||||
</p>
|
||||
<p>Features:</p>
|
||||
<ul>
|
||||
<li>Model Library with popular LLMs like Llama, Gemma, Mistral, or Qwen</li>
|
||||
<li>Connect to Remote AI APIs like Groq and OpenRouter</li>
|
||||
<li>Local API Server with OpenAI-equivalent API</li>
|
||||
<li>Extensions for customizing Jan</li>
|
||||
</ul>
|
||||
</description>
|
||||
|
||||
<launchable type="desktop-id">ai.menlo.jan.desktop</launchable>
|
||||
|
||||
<screenshots>
|
||||
<screenshot type="default">
|
||||

|
||||
</screenshot>
|
||||
</screenshots>
|
||||
|
||||
<url type="homepage">https://jan.ai/</url>
|
||||
<url type="bugtracker">https://github.com/menloresearch/jan/issues</url>
|
||||
|
||||
<content_rating type="oars-1.1" />
|
||||
|
||||
<releases>
|
||||
<release version="0.5.12" date="2024-01-02">
|
||||
<description>
|
||||
<p>Latest stable release of Jan AI</p>
|
||||
</description>
|
||||
</release>
|
||||
</releases>
|
||||
</component>
|
||||
BIN
docs/public/assets/images/changelog/gpt-oss-serper.png
Normal file
|
After Width: | Height: | Size: 57 KiB |
BIN
docs/public/assets/images/changelog/jupyter5.png
Normal file
|
After Width: | Height: | Size: 947 KiB |
BIN
docs/public/assets/videos/mcpjupyter.mp4
Normal file
@ -35,28 +35,28 @@ const socials = [
|
||||
]
|
||||
|
||||
const menus = [
|
||||
{
|
||||
name: 'Product',
|
||||
child: [
|
||||
{
|
||||
menu: 'Download',
|
||||
path: '/download',
|
||||
},
|
||||
{
|
||||
menu: 'Changelog',
|
||||
path: '/changelog',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'For Developers',
|
||||
child: [
|
||||
{
|
||||
menu: 'Documentation',
|
||||
path: '/docs',
|
||||
},
|
||||
],
|
||||
},
|
||||
// {
|
||||
// name: 'Product',
|
||||
// child: [
|
||||
// {
|
||||
// menu: 'Download',
|
||||
// path: '/download',
|
||||
// },
|
||||
// {
|
||||
// menu: 'Changelog',
|
||||
// path: '/changelog',
|
||||
// },
|
||||
// ],
|
||||
// },
|
||||
// {
|
||||
// name: 'For Developers',
|
||||
// child: [
|
||||
// {
|
||||
// menu: 'Documentation',
|
||||
// path: '/docs',
|
||||
// },
|
||||
// ],
|
||||
// },
|
||||
{
|
||||
name: 'Community',
|
||||
child: [
|
||||
@ -71,7 +71,7 @@ const menus = [
|
||||
external: true,
|
||||
},
|
||||
{
|
||||
menu: 'Twitter',
|
||||
menu: 'X/Twitter',
|
||||
path: 'https://twitter.com/jandotai',
|
||||
external: true,
|
||||
},
|
||||
@ -86,8 +86,8 @@ const menus = [
|
||||
name: 'Company',
|
||||
child: [
|
||||
{
|
||||
menu: 'About',
|
||||
path: '/about',
|
||||
menu: 'Menlo',
|
||||
path: 'https://menlo.ai',
|
||||
},
|
||||
{
|
||||
menu: 'Blog',
|
||||
@ -158,8 +158,8 @@ export default function Footer() {
|
||||
|
||||
return (
|
||||
<div className="flex-shrink-0 relative overflow-hidden w-full">
|
||||
<div className="grid grid-cols-2 gap-8 md:grid-cols-2 lg:grid-cols-6">
|
||||
<div className="col-span-2">
|
||||
<div className="grid grid-cols-2 gap-8 md:grid-cols-2 lg:grid-cols-12">
|
||||
<div className="col-span-2 lg:col-span-3">
|
||||
<div className="flex items-center space-x-2 mb-3">
|
||||
<LogoMark />
|
||||
<h2 className="text-lg font-semibold dark:text-white text-black">
|
||||
@ -209,9 +209,10 @@ export default function Footer() {
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="hidden lg:block lg:col-span-3"></div>
|
||||
{menus.map((menu, i) => {
|
||||
return (
|
||||
<div key={i} className="lg:text-right">
|
||||
<div key={i} className="lg:text-right lg:col-span-3">
|
||||
<h2 className="mb-2 font-bold dark:text-gray-300 text-black">
|
||||
{menu.name}
|
||||
</h2>
|
||||
|
||||
@ -33,10 +33,6 @@
|
||||
"layout": "raw"
|
||||
}
|
||||
},
|
||||
"about": {
|
||||
"type": "page",
|
||||
"title": "About"
|
||||
},
|
||||
"blog": {
|
||||
"type": "page",
|
||||
"title": "Blog",
|
||||
|
||||
92
docs/src/pages/changelog/2025-08-07-gpt-oss.mdx
Normal file
@ -0,0 +1,92 @@
|
||||
---
|
||||
title: "Jan v0.6.7: OpenAI gpt-oss support and enhanced MCP tutorials"
|
||||
version: 0.6.7
|
||||
description: "Full support for OpenAI's open-weight gpt-oss models and new Jupyter MCP integration guide"
|
||||
date: 2025-08-07
|
||||
ogImage: "/assets/images/changelog/gpt-oss-serper.png"
|
||||
---
|
||||
|
||||
import ChangelogHeader from "@/components/Changelog/ChangelogHeader"
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
|
||||
<ChangelogHeader title="Jan v0.6.7: OpenAI gpt-oss support and enhanced MCP tutorials" date="2025-08-07" ogImage="/assets/images/changelog/gpt-oss-serper.png"/>
|
||||
|
||||
## Highlights 🎉
|
||||
|
||||
Jan v0.6.7 brings full support for OpenAI's groundbreaking open-weight models - gpt-oss-120b and gpt-oss-20b - along with enhanced MCP documentation and critical bug fixes for reasoning models.
|
||||
|
||||
### 🚀 OpenAI gpt-oss Models Now Supported
|
||||
|
||||
Jan now fully supports OpenAI's first open-weight language models since GPT-2:
|
||||
|
||||
**gpt-oss-120b:**
|
||||
- 117B total parameters, 5.1B active per token
|
||||
- Runs efficiently on a single 80GB GPU
|
||||
- Near-parity with OpenAI o4-mini on reasoning benchmarks
|
||||
- Exceptional tool use and function calling capabilities
|
||||
|
||||
**gpt-oss-20b:**
|
||||
- 21B total parameters, 3.6B active per token
|
||||
- Runs on edge devices with just 16GB memory
|
||||
- Similar performance to OpenAI o3-mini
|
||||
- Perfect for local inference and rapid iteration
|
||||
|
||||
<Callout type="info">
|
||||
Both models use Mixture-of-Experts (MoE) architecture and support context lengths up to 128k tokens. They come natively quantized in MXFP4 format for efficient memory usage.
|
||||
</Callout>
|
||||
|
||||
### 🎮 GPU Layer Configuration
|
||||
|
||||
Due to the models' size, you may need to adjust GPU layers based on your hardware:
|
||||
|
||||

|
||||
|
||||
Start with default settings and reduce layers if you encounter out-of-memory errors. Each system requires different configurations based on available VRAM.
|
||||
|
||||
### 📚 New Jupyter MCP Tutorial
|
||||
|
||||
We've added comprehensive documentation for the Jupyter MCP integration:
|
||||
- Real-time notebook interaction and code execution
|
||||
- Step-by-step setup with Python environment management
|
||||
- Example workflows for data analysis and visualization
|
||||
- Security best practices for code execution
|
||||
- Performance optimization tips
|
||||
|
||||
The tutorial demonstrates how to turn Jan into a capable data science partner that can execute analysis, create visualizations, and iterate based on actual results.
|
||||
|
||||
### 🔧 Bug Fixes
|
||||
|
||||
Critical fixes for reasoning model support:
|
||||
- **Fixed reasoning text inclusion**: Reasoning text is no longer incorrectly included in chat completion requests
|
||||
- **Fixed thinking block display**: gpt-oss thinking blocks now render properly in the UI
|
||||
- **Fixed React state loop**: Resolved infinite re-render issue with useMediaQuery hook
|
||||
|
||||
## Using gpt-oss Models
|
||||
|
||||
### Download from Hub
|
||||
|
||||
All gpt-oss GGUF variants are available in the Jan Hub. Simply search for "gpt-oss" and choose the quantization that fits your hardware:
|
||||
|
||||
### Model Capabilities
|
||||
|
||||
Both models excel at:
|
||||
- **Reasoning tasks**: Competition coding, mathematics, and problem solving
|
||||
- **Tool use**: Web search, code execution, and function calling
|
||||
- **CoT reasoning**: Full chain-of-thought visibility for monitoring
|
||||
- **Structured outputs**: JSON schema enforcement and grammar constraints
|
||||
|
||||
### Performance Tips
|
||||
|
||||
- **Memory requirements**: gpt-oss-120b needs ~80GB, gpt-oss-20b needs ~16GB
|
||||
- **GPU layers**: Adjust based on your VRAM (start high, reduce if needed)
|
||||
- **Context size**: Both models support up to 128k tokens
|
||||
- **Quantization**: Choose lower quantization for smaller memory footprint
|
||||
|
||||
## Coming Next
|
||||
|
||||
We're continuing to optimize performance for large models, expand MCP integrations, and improve the overall experience for running cutting-edge open models locally.
|
||||
|
||||
Update your Jan or [download the latest](https://jan.ai/).
|
||||
|
||||
For the complete list of changes, see the [GitHub release notes](https://github.com/menloresearch/jan/releases/tag/v0.6.7).
|
||||
BIN
docs/src/pages/docs/_assets/gpt-oss-tools.png
Normal file
|
After Width: | Height: | Size: 45 KiB |
BIN
docs/src/pages/docs/_assets/gpt-oss.png
Normal file
|
After Width: | Height: | Size: 39 KiB |
BIN
docs/src/pages/docs/_assets/gpt5-add.png
Normal file
|
After Width: | Height: | Size: 37 KiB |
BIN
docs/src/pages/docs/_assets/gpt5-chat.png
Normal file
|
After Width: | Height: | Size: 81 KiB |
BIN
docs/src/pages/docs/_assets/gpt5-msg.png
Normal file
|
After Width: | Height: | Size: 136 KiB |
BIN
docs/src/pages/docs/_assets/gpt5-msg2.png
Normal file
|
After Width: | Height: | Size: 264 KiB |
BIN
docs/src/pages/docs/_assets/gpt5-msg3.png
Normal file
|
After Width: | Height: | Size: 391 KiB |
BIN
docs/src/pages/docs/_assets/gpt5-tools.png
Normal file
|
After Width: | Height: | Size: 41 KiB |
BIN
docs/src/pages/docs/_assets/jupyter.png
Normal file
|
After Width: | Height: | Size: 56 KiB |
BIN
docs/src/pages/docs/_assets/jupyter1.png
Normal file
|
After Width: | Height: | Size: 307 KiB |
BIN
docs/src/pages/docs/_assets/jupyter2.png
Normal file
|
After Width: | Height: | Size: 35 KiB |
BIN
docs/src/pages/docs/_assets/jupyter3.png
Normal file
|
After Width: | Height: | Size: 50 KiB |
BIN
docs/src/pages/docs/_assets/jupyter4.png
Normal file
|
After Width: | Height: | Size: 80 KiB |
BIN
docs/src/pages/docs/_assets/jupyter5.png
Normal file
|
After Width: | Height: | Size: 947 KiB |
BIN
docs/src/pages/docs/_assets/openai-settings.png
Normal file
|
After Width: | Height: | Size: 131 KiB |
337
docs/src/pages/docs/mcp-examples/data-analysis/jupyter.mdx
Normal file
@ -0,0 +1,337 @@
|
||||
---
|
||||
title: Jupyter MCP
|
||||
description: Real-time Jupyter notebook interaction and code execution through MCP integration.
|
||||
keywords:
|
||||
[
|
||||
Jan,
|
||||
MCP,
|
||||
Model Context Protocol,
|
||||
Jupyter,
|
||||
data analysis,
|
||||
code execution,
|
||||
notebooks,
|
||||
Python,
|
||||
visualization,
|
||||
tool calling,
|
||||
GPT-5,
|
||||
OpenAI,
|
||||
]
|
||||
---
|
||||
|
||||
import { Callout } from 'nextra/components'
|
||||
|
||||
# Jupyter MCP
|
||||
|
||||
[Jupyter MCP Server](https://jupyter-mcp-server.datalayer.tech/) enables real-time interaction with Jupyter notebooks, allowing AI models to edit, execute, and document code for data analysis and visualization. Instead of just generating code suggestions, AI can actually run Python code and see the results.
|
||||
|
||||
This integration gives Jan the ability to execute analysis, create visualizations, and iterate based on actual results - turning your AI assistant into a capable data science partner.
|
||||
|
||||
<Callout type="info">
|
||||
**Breaking Change**: Version 0.11.0+ renamed `room` to `document`. Check the [release notes](https://jupyter-mcp-server.datalayer.tech/releases) for details.
|
||||
</Callout>
|
||||
|
||||
## Available Tools
|
||||
|
||||
The Jupyter MCP Server provides [12 comprehensive tools](https://jupyter-mcp-server.datalayer.tech/tools/):
|
||||
|
||||
### Core Operations
|
||||
- `append_execute_code_cell`: Add and run code cells at notebook end
|
||||
- `insert_execute_code_cell`: Insert and run code at specific positions
|
||||
- `execute_cell_simple_timeout`: Execute cells with timeout control
|
||||
- `execute_cell_streaming`: Long-running cells with progress updates
|
||||
- `execute_cell_with_progress`: Execute with timeout and monitoring
|
||||
|
||||
### Cell Management
|
||||
- `append_markdown_cell`: Add documentation cells
|
||||
- `insert_markdown_cell`: Insert markdown at specific positions
|
||||
- `delete_cell`: Remove cells from notebook
|
||||
- `overwrite_cell_source`: Update existing cell content
|
||||
|
||||
### Information & Reading
|
||||
- `get_notebook_info`: Retrieve notebook metadata
|
||||
- `read_cell`: Examine specific cell content
|
||||
- `read_all_cells`: Get complete notebook state
|
||||
|
||||
<Callout type="warning">
|
||||
The MCP connects to **one notebook at a time**, not multiple notebooks. Specify your target notebook in the configuration.
|
||||
</Callout>
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Jan with MCP enabled
|
||||
- Python 3.8+ with uv package manager
|
||||
- Docker installed
|
||||
- OpenAI API key for GPT-5 access
|
||||
- Basic understanding of Jupyter notebooks
|
||||
|
||||
## Setup
|
||||
|
||||
### Enable MCP
|
||||
|
||||
1. Go to **Settings** > **MCP Servers**
|
||||
2. Toggle **Allow All MCP Tool Permission** ON
|
||||
|
||||

|
||||
|
||||
### Install uv Package Manager
|
||||
|
||||
If you don't have uv installed:
|
||||
|
||||
```bash
|
||||
# macOS and Linux
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
|
||||
# Windows
|
||||
powershell -c "irm https://astral.sh/uv/install.ps1 | iex"
|
||||
```
|
||||
|
||||
### Create Python Environment
|
||||
|
||||
Set up an isolated environment for Jupyter:
|
||||
|
||||
```bash
|
||||
# Create environment with Python 3.13
|
||||
uv venv .venv --python 3.13
|
||||
|
||||
# Activate environment
|
||||
source .venv/bin/activate # Linux/macOS
|
||||
# or
|
||||
.venv\Scripts\activate # Windows
|
||||
|
||||
# Install Jupyter dependencies
|
||||
uv pip install jupyterlab==4.4.1 jupyter-collaboration==4.0.2 ipykernel
|
||||
uv pip uninstall pycrdt datalayer_pycrdt
|
||||
uv pip install datalayer_pycrdt==0.12.17
|
||||
|
||||
# Add data science libraries
|
||||
uv pip install pandas numpy matplotlib altair
|
||||
```
|
||||
|
||||
### Start JupyterLab Server
|
||||
|
||||
Launch JupyterLab with authentication:
|
||||
|
||||
```bash
|
||||
jupyter lab --port 8888 --IdentityProvider.token heyheyyou --ip 0.0.0.0
|
||||
```
|
||||
|
||||

|
||||
|
||||
The server opens in your browser:
|
||||
|
||||

|
||||
|
||||
### Create Target Notebook
|
||||
|
||||
Create a new notebook named `for_jan.ipynb`:
|
||||
|
||||

|
||||
|
||||
### Configure MCP Server in Jan
|
||||
|
||||
Click `+` in MCP Servers section:
|
||||
|
||||
**Configuration for macOS/Windows:**
|
||||
- **Server Name**: `jupyter`
|
||||
- **Command**: `docker`
|
||||
- **Arguments**:
|
||||
```
|
||||
run -i --rm -e DOCUMENT_URL -e DOCUMENT_TOKEN -e DOCUMENT_ID -e RUNTIME_URL -e RUNTIME_TOKEN datalayer/jupyter-mcp-server:latest
|
||||
```
|
||||
- **Environment Variables**:
|
||||
- Key: `DOCUMENT_URL`, Value: `http://host.docker.internal:8888`
|
||||
- Key: `DOCUMENT_TOKEN`, Value: `heyheyyou`
|
||||
- Key: `DOCUMENT_ID`, Value: `for_jan.ipynb`
|
||||
- Key: `RUNTIME_URL`, Value: `http://host.docker.internal:8888`
|
||||
- Key: `RUNTIME_TOKEN`, Value: `heyheyyou`
|
||||
|
||||

|
||||
|
||||
## Using OpenAI's GPT-5
|
||||
|
||||
### Configure OpenAI Provider
|
||||
|
||||
Navigate to **Settings** > **Model Providers** > **OpenAI**:
|
||||
|
||||

|
||||
|
||||
### Add GPT-5 Model
|
||||
|
||||
Since GPT-5 is new, you'll need to manually add it to Jan:
|
||||
|
||||

|
||||
|
||||
<Callout type="info">
|
||||
**About GPT-5**: OpenAI's smartest, fastest, most useful model yet. It features built-in thinking capabilities, state-of-the-art performance across coding, math, and writing, and exceptional tool use abilities. GPT-5 automatically decides when to respond quickly versus when to think longer for expert-level responses.
|
||||
</Callout>
|
||||
|
||||
### Enable Tool Calling
|
||||
|
||||
Ensure tools are enabled for GPT-5:
|
||||
|
||||

|
||||
|
||||
## Usage
|
||||
|
||||
### Verify Tool Availability
|
||||
|
||||
Start a new chat with GPT-5. The tools bubble shows all available Jupyter operations:
|
||||
|
||||

|
||||
|
||||
### Initial Test
|
||||
|
||||
Start with establishing the notebook as your workspace:
|
||||
|
||||
```
|
||||
You have access to a jupyter notebook, please use it as our data analysis scratchpad. Let's start by printing "Hello Jan" in a new cell.
|
||||
```
|
||||
|
||||
GPT-5 creates and executes the code successfully:
|
||||
|
||||

|
||||
|
||||
### Advanced Data Analysis
|
||||
|
||||
Try a more complex task combining multiple operations:
|
||||
|
||||
```
|
||||
Generate synthetic data with numpy, move it to a pandas dataframe and create a pivot table, and then make a cool animated plot using matplotlib. Your use case will be sales analysis in the luxury fashion industry.
|
||||
```
|
||||
|
||||

|
||||
|
||||
Watch the complete output unfold:
|
||||
|
||||
<video width="100%" controls>
|
||||
<source src="/assets/videos/mcpjupyter.mp4" type="video/mp4" />
|
||||
Your browser does not support the video tag.
|
||||
</video>
|
||||
|
||||
## Example Prompts to Try
|
||||
|
||||
### Financial Analysis
|
||||
```
|
||||
Create a Monte Carlo simulation for portfolio risk analysis. Generate 10,000 scenarios, calculate VaR at 95% confidence, and visualize the distribution.
|
||||
```
|
||||
|
||||
### Time Series Forecasting
|
||||
```
|
||||
Generate synthetic time series data representing daily website traffic over 2 years with weekly seasonality and trend. Build an ARIMA model and forecast the next 30 days.
|
||||
```
|
||||
|
||||
### Machine Learning Pipeline
|
||||
```
|
||||
Build a complete classification pipeline: generate a dataset with 3 classes and 5 features, split the data, try multiple algorithms (RF, SVM, XGBoost), and create a comparison chart of their performance.
|
||||
```
|
||||
|
||||
### Interactive Dashboards
|
||||
```
|
||||
Create an interactive visualization using matplotlib widgets showing how changing interest rates affects loan payments over different time periods.
|
||||
```
|
||||
|
||||
### Statistical Testing
|
||||
```
|
||||
Generate two datasets representing A/B test results for an e-commerce site. Perform appropriate statistical tests and create visualizations to determine if the difference is significant.
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
<Callout type="warning">
|
||||
Multiple tools can quickly consume context windows, especially for local models. GPT-5's unified system with smart routing helps manage this, but local models may struggle with speed and context limitations.
|
||||
</Callout>
|
||||
|
||||
### Context Management
|
||||
- Each tool call adds to conversation history
|
||||
- 12 available tools means substantial system prompt overhead
|
||||
- Local models may need reduced tool sets for reasonable performance
|
||||
- Consider disabling unused tools to conserve context
|
||||
|
||||
### Cloud vs Local Trade-offs
|
||||
- **Cloud models (GPT-5)**: Handle multiple tools efficiently with large context windows
|
||||
- **Local models**: May require optimization, reduced tool sets, or smaller context sizes
|
||||
- **Hybrid approach**: Use cloud for complex multi-tool workflows, local for simple tasks
|
||||
|
||||
## Security Considerations
|
||||
|
||||
<Callout type="warning">
|
||||
MCP provides powerful capabilities but requires careful security practices.
|
||||
</Callout>
|
||||
|
||||
### Authentication Tokens
|
||||
- **Always use strong tokens** - avoid simple passwords
|
||||
- **Never commit tokens** to version control
|
||||
- **Rotate tokens regularly** for production use
|
||||
- **Use different tokens** for different environments
|
||||
|
||||
### Network Security
|
||||
- JupyterLab is network-accessible with `--ip 0.0.0.0`
|
||||
- Consider using `--ip 127.0.0.1` for local-only access
|
||||
- Implement firewall rules to restrict access
|
||||
- Use HTTPS in production environments
|
||||
|
||||
### Code Execution Risks
|
||||
- AI has full Python execution capabilities
|
||||
- Review generated code before execution
|
||||
- Use isolated environments for sensitive work
|
||||
- Monitor resource usage and set limits
|
||||
|
||||
### Data Privacy
|
||||
- Notebook content is processed by AI models
|
||||
- When using cloud models like GPT-5, data leaves your system
|
||||
- Keep sensitive data in secure environments
|
||||
- Consider model provider's data policies
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Environment Management
|
||||
- Use virtual environments for isolation
|
||||
- Document required dependencies
|
||||
- Version control your notebooks
|
||||
- Regular environment cleanup
|
||||
|
||||
### Performance Optimization
|
||||
- Start with simple operations
|
||||
- Monitor memory usage during execution
|
||||
- Close unused notebooks
|
||||
- Restart kernels when needed
|
||||
|
||||
### Effective Prompting
|
||||
- Be specific about desired outputs
|
||||
- Break complex tasks into steps
|
||||
- Ask for explanations with code
|
||||
- Request error handling in critical operations
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Connection Problems:**
|
||||
- Verify JupyterLab is running
|
||||
- Check token matches configuration
|
||||
- Confirm Docker can reach host
|
||||
- Test with curl to verify connectivity
|
||||
|
||||
**Execution Failures:**
|
||||
- Check Python package availability
|
||||
- Verify kernel is running
|
||||
- Look for syntax errors in generated code
|
||||
- Restart kernel if stuck
|
||||
|
||||
**Tool Calling Errors:**
|
||||
- Ensure model supports tool calling
|
||||
- Verify all 12 tools appear in chat
|
||||
- Check MCP server is active
|
||||
- Review Docker logs for errors
|
||||
|
||||
**API Rate Limits:**
|
||||
- Monitor OpenAI usage dashboard
|
||||
- Implement retry logic for transient errors
|
||||
- Consider fallback to local models
|
||||
- Cache results when possible
|
||||
|
||||
## Conclusion
|
||||
|
||||
The Jupyter MCP integration combined with GPT-5's advanced capabilities creates an exceptionally powerful data science environment. With GPT-5's built-in reasoning and expert-level intelligence, complex analyses that once required extensive manual coding can now be accomplished through natural conversation.
|
||||
|
||||
Whether you're exploring data, building models, or creating visualizations, this integration provides the computational power of Jupyter with the intelligence of GPT-5 - all within Jan's privacy-conscious interface.
|
||||
|
||||
Remember: with great computational power comes the responsibility to use it securely. Always validate generated code, use strong authentication, and be mindful of data privacy when using cloud-based models.
|
||||
@ -28,6 +28,8 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@janhq/core": "../../core/package.tgz",
|
||||
"@janhq/tauri-plugin-hardware-api": "link:../../src-tauri/plugins/tauri-plugin-hardware",
|
||||
"@janhq/tauri-plugin-llamacpp-api": "link:../../src-tauri/plugins/tauri-plugin-llamacpp",
|
||||
"@tauri-apps/api": "^2.5.0",
|
||||
"@tauri-apps/plugin-log": "^2.6.0",
|
||||
"fetch-retry": "^5.0.6",
|
||||
|
||||
@ -2,6 +2,7 @@ import { getJanDataFolderPath, fs, joinPath, events } from '@janhq/core'
|
||||
import { invoke } from '@tauri-apps/api/core'
|
||||
import { getProxyConfig } from './util'
|
||||
import { dirname } from '@tauri-apps/api/path'
|
||||
import { getSystemInfo } from '@janhq/tauri-plugin-hardware-api'
|
||||
|
||||
// folder structure
|
||||
// <Jan's data folder>/llamacpp/backends/<backend_version>/<backend_type>
|
||||
@ -10,7 +11,7 @@ import { dirname } from '@tauri-apps/api/path'
|
||||
export async function listSupportedBackends(): Promise<
|
||||
{ version: string; backend: string }[]
|
||||
> {
|
||||
const sysInfo = await window.core.api.getSystemInfo()
|
||||
const sysInfo = await getSystemInfo()
|
||||
const os_type = sysInfo.os_type
|
||||
const arch = sysInfo.cpu.arch
|
||||
|
||||
@ -229,7 +230,7 @@ export async function downloadBackend(
|
||||
}
|
||||
|
||||
async function _getSupportedFeatures() {
|
||||
const sysInfo = await window.core.api.getSystemInfo()
|
||||
const sysInfo = await getSystemInfo()
|
||||
const features = {
|
||||
avx: sysInfo.cpu.extensions.includes('avx'),
|
||||
avx2: sysInfo.cpu.extensions.includes('avx2'),
|
||||
@ -289,7 +290,7 @@ async function _fetchGithubReleases(
|
||||
}
|
||||
|
||||
async function _isCudaInstalled(version: string): Promise<boolean> {
|
||||
const sysInfo = await window.core.api.getSystemInfo()
|
||||
const sysInfo = await getSystemInfo()
|
||||
const os_type = sysInfo.os_type
|
||||
|
||||
// not sure the reason behind this naming convention
|
||||
|
||||
@ -100,6 +100,13 @@ interface DeviceList {
|
||||
mem: number
|
||||
free: number
|
||||
}
|
||||
|
||||
interface GgufMetadata {
|
||||
version: number
|
||||
tensor_count: number
|
||||
metadata: Record<string, string>
|
||||
}
|
||||
|
||||
/**
|
||||
* Override the default app.log function to use Jan's logging system.
|
||||
* @param args
|
||||
@ -801,7 +808,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
}
|
||||
|
||||
private async generateApiKey(modelId: string, port: string): Promise<string> {
|
||||
const hash = await invoke<string>('generate_api_key', {
|
||||
const hash = await invoke<string>('plugin:llamacpp|generate_api_key', {
|
||||
modelId: modelId + port,
|
||||
apiSecret: this.apiSecret,
|
||||
})
|
||||
@ -1094,7 +1101,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
*/
|
||||
private async getRandomPort(): Promise<number> {
|
||||
try {
|
||||
const port = await invoke<number>('get_random_port')
|
||||
const port = await invoke<number>('plugin:llamacpp|get_random_port')
|
||||
return port
|
||||
} catch {
|
||||
logger.error('Unable to find a suitable port')
|
||||
@ -1165,7 +1172,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
const [version, backend] = cfg.version_backend.split('/')
|
||||
if (!version || !backend) {
|
||||
throw new Error(
|
||||
`Invalid version/backend format: ${cfg.version_backend}. Expected format: <version>/<backend>`
|
||||
"Initial setup for the backend failed due to a network issue. Please restart the app!"
|
||||
)
|
||||
}
|
||||
|
||||
@ -1272,7 +1279,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
|
||||
try {
|
||||
// TODO: add LIBRARY_PATH
|
||||
const sInfo = await invoke<SessionInfo>('load_llama_model', {
|
||||
const sInfo = await invoke<SessionInfo>('plugin:llamacpp|load_llama_model', {
|
||||
backendPath,
|
||||
libraryPath,
|
||||
args,
|
||||
@ -1292,7 +1299,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
const pid = sInfo.pid
|
||||
try {
|
||||
// Pass the PID as the session_id
|
||||
const result = await invoke<UnloadResult>('unload_llama_model', {
|
||||
const result = await invoke<UnloadResult>('plugin:llamacpp|unload_llama_model', {
|
||||
pid: pid,
|
||||
})
|
||||
|
||||
@ -1430,7 +1437,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
|
||||
private async findSessionByModel(modelId: string): Promise<SessionInfo> {
|
||||
try {
|
||||
let sInfo = await invoke<SessionInfo>('find_session_by_model', {
|
||||
let sInfo = await invoke<SessionInfo>('plugin:llamacpp|find_session_by_model', {
|
||||
modelId,
|
||||
})
|
||||
return sInfo
|
||||
@ -1449,7 +1456,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
throw new Error(`No active session found for model: ${opts.model}`)
|
||||
}
|
||||
// check if the process is alive
|
||||
const result = await invoke<boolean>('is_process_running', {
|
||||
const result = await invoke<boolean>('plugin:llamacpp|is_process_running', {
|
||||
pid: sessionInfo.pid,
|
||||
})
|
||||
if (result) {
|
||||
@ -1509,7 +1516,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
|
||||
override async getLoadedModels(): Promise<string[]> {
|
||||
try {
|
||||
let models: string[] = await invoke<string[]>('get_loaded_models')
|
||||
let models: string[] = await invoke<string[]>('plugin:llamacpp|get_loaded_models')
|
||||
return models
|
||||
} catch (e) {
|
||||
logger.error(e)
|
||||
@ -1532,7 +1539,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
const backendPath = await getBackendExePath(backend, version)
|
||||
const libraryPath = await joinPath([await this.getProviderPath(), 'lib'])
|
||||
try {
|
||||
const dList = await invoke<DeviceList[]>('get_devices', {
|
||||
const dList = await invoke<DeviceList[]>('plugin:llamacpp|get_devices', {
|
||||
backendPath,
|
||||
libraryPath,
|
||||
})
|
||||
@ -1591,4 +1598,15 @@ export default class llamacpp_extension extends AIEngine {
|
||||
override getChatClient(sessionId: string): any {
|
||||
throw new Error('method not implemented yet')
|
||||
}
|
||||
|
||||
private async loadMetadata(path: string): Promise<GgufMetadata> {
|
||||
try {
|
||||
const data = await invoke<GgufMetadata>('plugin:llamacpp|read_gguf_metadata', {
|
||||
path: path,
|
||||
})
|
||||
return data
|
||||
} catch (err) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -18,7 +18,7 @@ describe('Backend functions', () => {
|
||||
describe('listSupportedBackends', () => {
|
||||
it('should return supported backends for Windows x64', async () => {
|
||||
// Mock system info
|
||||
window.core.api.getSystemInfo = vi.fn().mockResolvedValue({
|
||||
const getSystemInfo = vi.fn().mockResolvedValue({
|
||||
os_type: 'windows',
|
||||
cpu: {
|
||||
arch: 'x86_64',
|
||||
@ -53,7 +53,7 @@ describe('Backend functions', () => {
|
||||
|
||||
it('should return CUDA backends with proper CPU instruction detection for Windows', async () => {
|
||||
// Mock system info with CUDA support and AVX512
|
||||
window.core.api.getSystemInfo = vi.fn().mockResolvedValue({
|
||||
const getSystemInfo = vi.fn().mockResolvedValue({
|
||||
os_type: 'windows',
|
||||
cpu: {
|
||||
arch: 'x86_64',
|
||||
@ -92,7 +92,7 @@ describe('Backend functions', () => {
|
||||
|
||||
it('should select appropriate CUDA backend based on CPU features - AVX2 only', async () => {
|
||||
// Mock system info with CUDA support but only AVX2
|
||||
window.core.api.getSystemInfo = vi.fn().mockResolvedValue({
|
||||
const getSystemInfo = vi.fn().mockResolvedValue({
|
||||
os_type: 'windows',
|
||||
cpu: {
|
||||
arch: 'x86_64',
|
||||
@ -131,7 +131,7 @@ describe('Backend functions', () => {
|
||||
|
||||
it('should select appropriate CUDA backend based on CPU features - no AVX', async () => {
|
||||
// Mock system info with CUDA support but no AVX
|
||||
window.core.api.getSystemInfo = vi.fn().mockResolvedValue({
|
||||
const getSystemInfo = vi.fn().mockResolvedValue({
|
||||
os_type: 'windows',
|
||||
cpu: {
|
||||
arch: 'x86_64',
|
||||
@ -171,7 +171,7 @@ describe('Backend functions', () => {
|
||||
|
||||
it('should return CUDA backends with proper CPU instruction detection for Linux', async () => {
|
||||
// Mock system info with CUDA support and AVX support
|
||||
window.core.api.getSystemInfo = vi.fn().mockResolvedValue({
|
||||
const getSystemInfo = vi.fn().mockResolvedValue({
|
||||
os_type: 'linux',
|
||||
cpu: {
|
||||
arch: 'x86_64',
|
||||
@ -210,7 +210,7 @@ describe('Backend functions', () => {
|
||||
})
|
||||
|
||||
it('should return supported backends for macOS arm64', async () => {
|
||||
window.core.api.getSystemInfo = vi.fn().mockResolvedValue({
|
||||
const getSystemInfo = vi.fn().mockResolvedValue({
|
||||
os_type: 'macos',
|
||||
cpu: {
|
||||
arch: 'aarch64',
|
||||
@ -261,7 +261,7 @@ describe('Backend functions', () => {
|
||||
|
||||
describe('getBackendExePath', () => {
|
||||
it('should return correct exe path for Windows', async () => {
|
||||
window.core.api.getSystemInfo = vi.fn().mockResolvedValue({
|
||||
const getSystemInfo = vi.fn().mockResolvedValue({
|
||||
os_type: 'windows',
|
||||
})
|
||||
|
||||
@ -289,7 +289,7 @@ describe('Backend functions', () => {
|
||||
})
|
||||
|
||||
it('should return correct exe path for Linux/macOS', async () => {
|
||||
window.core.api.getSystemInfo = vi.fn().mockResolvedValue({
|
||||
const getSystemInfo = vi.fn().mockResolvedValue({
|
||||
os_type: 'linux',
|
||||
})
|
||||
|
||||
|
||||
@ -175,7 +175,7 @@ describe('llamacpp_extension', () => {
|
||||
const { invoke } = await import('@tauri-apps/api/core')
|
||||
|
||||
// Mock system info for getBackendExePath
|
||||
window.core.api.getSystemInfo = vi.fn().mockResolvedValue({
|
||||
const getSystemInfo = vi.fn().mockResolvedValue({
|
||||
os_type: 'linux'
|
||||
})
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ Object.defineProperty(globalThis, 'window', {
|
||||
localStorage: localStorageMock,
|
||||
core: {
|
||||
api: {
|
||||
getSystemInfo: vi.fn(),
|
||||
// getSystemInfo: vi.fn(),
|
||||
},
|
||||
extensionManager: {
|
||||
getByName: vi.fn().mockReturnValue({
|
||||
@ -31,6 +31,10 @@ Object.defineProperty(globalThis, 'window', {
|
||||
},
|
||||
})
|
||||
|
||||
vi.mock('@janhq/tauri-plugin-hardware-api', () => ({
|
||||
getSystemInfo: vi.fn(),
|
||||
}));
|
||||
|
||||
// Mock Tauri invoke function
|
||||
vi.mock('@tauri-apps/api/core', () => ({
|
||||
invoke: vi.fn(),
|
||||
|
||||
49
flatpak/ai.jan.Jan.metainfo.xml
Normal file
@ -0,0 +1,49 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<component type="desktop-application">
|
||||
<id>ai.jan.Jan</id>
|
||||
<metadata_license>FSFAP</metadata_license>
|
||||
<project_license>Apache-2.0</project_license>
|
||||
<developer_name>Menlo Research</developer_name>
|
||||
<name>Jan</name>
|
||||
<icon type="stock">ai.jan.Jan</icon>
|
||||
<summary>Local AI Assistant that runs 100% offline on your device</summary>
|
||||
<categories>
|
||||
<category>Utility</category>
|
||||
<category>Education</category>
|
||||
<category>Chat</category>
|
||||
<category>Dictionary</category>
|
||||
</categories>
|
||||
<description>
|
||||
<p>
|
||||
Jan is a ChatGPT-alternative that runs 100% offline on your device. Our goal is to make it easy for anyone to download and run LLMs and use AI with full control and privacy.
|
||||
</p>
|
||||
<p>Features:</p>
|
||||
<ul>
|
||||
<li> Local AI Models: Download and run large language models like Llama, Gemma, and Qwen directly from Hugging Face. All models run locally on your device.</li>
|
||||
<li>Cloud Integration: Connect to cloud providers such as OpenAI, Anthropic, Gemini, and Groq when needed. Easily switch between local and cloud models</li>
|
||||
<li>Custom Assistants: Set up specialized AI assistants for different tasks, such as writing, summarizing, or coding. Customize their tone and behavior.</li>
|
||||
<li> OpenAI-Compatible API: Jan includes a local API server (localhost:1337) that works with tools and plugins expecting OpenAI’s API format.</li>
|
||||
<li>Model Context Protocol (MCP): Supports MCP to improve context handling and interactions across larger or more complex tasks.</li>
|
||||
<li>Privacy First: Jan runs fully offline by default. Your data never leaves your device unless you enable cloud connections.</li>
|
||||
</ul>
|
||||
</description>
|
||||
<launchable type="desktop-id">ai.jan.Jan.desktop</launchable>
|
||||
<screenshots>
|
||||
<screenshot type="default">
|
||||

|
||||
</screenshot>
|
||||
</screenshots>
|
||||
|
||||
<url type="homepage">https://jan.ai/</url>
|
||||
<url type="bugtracker">https://github.com/menloresearch/jan/issues</url>
|
||||
|
||||
<content_rating type="oars-1.1" />
|
||||
|
||||
<releases>
|
||||
<release version="0.6.8" date="2025-08-14">
|
||||
<description>
|
||||
<p>Latest stable release of Jan AI</p>
|
||||
</description>
|
||||
</release>
|
||||
</releases>
|
||||
</component>
|
||||
118
flatpak/ai.jan.Jan.yml
Normal file
@ -0,0 +1,118 @@
|
||||
id: ai.jan.Jan
|
||||
runtime: org.gnome.Platform
|
||||
runtime-version: '48'
|
||||
sdk: org.gnome.Sdk
|
||||
command: Jan
|
||||
finish-args:
|
||||
- --socket=wayland # Permission needed to show the window
|
||||
- --socket=fallback-x11 # Permission needed to show the window on X11
|
||||
- --device=dri
|
||||
- --share=ipc
|
||||
- --share=network
|
||||
- --socket=pulseaudio # for future multimodality
|
||||
- --filesystem=xdg-run/dconf
|
||||
- --filesystem=~/.config/dconf:ro
|
||||
- --filesystem=~/.config/kioslaverc
|
||||
- --env=GTK_PATH=/app/lib/gtkmodules
|
||||
|
||||
modules:
|
||||
- name: volk
|
||||
buildsystem: cmake-ninja
|
||||
builddir: true
|
||||
config-opts:
|
||||
- -DVOLK_INSTALL=ON
|
||||
sources:
|
||||
- type: archive
|
||||
url: https://github.com/zeux/volk/archive/refs/tags/vulkan-sdk-1.3.280.0.zip
|
||||
sha256: 178875134d36e8b90f7e3ec31171355df3b71f47eba49cca2f98158e6552b011
|
||||
|
||||
- name: vulkan-headers
|
||||
buildsystem: cmake-ninja
|
||||
builddir: true
|
||||
sources:
|
||||
- type: archive
|
||||
url: https://github.com/KhronosGroup/Vulkan-Headers/archive/refs/tags/v1.3.283.zip
|
||||
sha256: 2094159c87fb4b6d8f734bd4cad59564cef7ef32feb00cf6d8ca7e75a84df921
|
||||
|
||||
- name: vulkan-tools
|
||||
buildsystem: cmake-ninja
|
||||
builddir: true
|
||||
sources:
|
||||
- type: archive
|
||||
url: https://github.com/KhronosGroup/Vulkan-Tools/archive/refs/tags/v1.3.283.zip
|
||||
sha256: 11ec6b474e91dc8cb6e7f22891294ede549bb6ed67c19d230e293b3fc9610883
|
||||
|
||||
- name: shaderc
|
||||
buildsystem: cmake-ninja
|
||||
builddir: true
|
||||
config-opts:
|
||||
- -DSHADERC_SKIP_COPYRIGHT_CHECK=ON
|
||||
- -DSHADERC_SKIP_EXAMPLES=ON
|
||||
- -DSHADERC_SKIP_TESTS=ON
|
||||
- -DSPIRV_SKIP_EXECUTABLES=ON
|
||||
- -DENABLE_GLSLANG_BINARIES=OFF
|
||||
cleanup:
|
||||
- /bin
|
||||
- /include
|
||||
- /lib/cmake
|
||||
- /lib/pkgconfig
|
||||
sources:
|
||||
- type: git
|
||||
url: https://github.com/google/shaderc.git
|
||||
tag: v2024.1
|
||||
commit: 47a9387ef5b3600d30d84c71ec77a59dc7db46fa
|
||||
# https://github.com/google/shaderc/blob/known-good/known_good.json
|
||||
- type: git
|
||||
url: https://github.com/KhronosGroup/SPIRV-Tools.git
|
||||
commit: dd4b663e13c07fea4fbb3f70c1c91c86731099f7
|
||||
dest: third_party/spirv-tools
|
||||
- type: git
|
||||
url: https://github.com/KhronosGroup/SPIRV-Headers.git
|
||||
commit: 5e3ad389ee56fca27c9705d093ae5387ce404df4
|
||||
dest: third_party/spirv-headers
|
||||
- type: git
|
||||
url: https://github.com/KhronosGroup/glslang.git
|
||||
commit: 142052fa30f9eca191aa9dcf65359fcaed09eeec
|
||||
dest: third_party/glslang
|
||||
|
||||
- name: cuda-toolkit
|
||||
only-arches:
|
||||
- x86_64
|
||||
cleanup:
|
||||
- /cuda
|
||||
buildsystem: simple
|
||||
build-commands:
|
||||
- mkdir /app/cuda
|
||||
- sh cuda_toolkit.run --silent --toolkit --toolkitpath=/app/cuda
|
||||
- mv /app/cuda/lib64/libcudart.* /app/lib/
|
||||
- mv /app/cuda/lib64/libcublas* /app/lib/
|
||||
sources:
|
||||
- type: file
|
||||
only-arches:
|
||||
- x86_64
|
||||
url: https://developer.download.nvidia.com/compute/cuda/12.8.0/local_installers/cuda_12.8.0_570.86.10_linux.run
|
||||
dest-filename: cuda_toolkit.run
|
||||
md5: c71027cf1a4ce84f80b9cbf81116e767
|
||||
|
||||
- name: binary
|
||||
buildsystem: simple
|
||||
sources:
|
||||
- type: file
|
||||
url: https://catalog.jan.ai/flatpak/Jan_0.6.8_amd64.deb
|
||||
sha256: 15d1368c318a3853bad4fc3646f88afdbf9f52b6416a4734f7a297741d171f28
|
||||
only-arches: [x86_64]
|
||||
- type: file
|
||||
path: ai.jan.Jan.metainfo.xml
|
||||
build-commands:
|
||||
- ar -x *.deb
|
||||
- tar -xf data.tar.gz
|
||||
- 'install -Dm755 usr/bin/Jan /app/bin/Jan'
|
||||
- 'install -Dm755 usr/bin/bun /app/bin/bun'
|
||||
- 'install -Dm755 usr/bin/uv /app/bin/uv'
|
||||
- cp -rv usr/lib/* /app/lib/.
|
||||
- install -Dm644 usr/share/applications/Jan.desktop /app/share/applications/ai.jan.Jan.desktop
|
||||
- sed -e 's/Icon=Jan/Icon=ai.jan.Jan/g' -e 's#Exec=Jan#Exec=/app/bin/Jan#g' -i /app/share/applications/ai.jan.Jan.desktop
|
||||
- install -Dm644 usr/share/icons/hicolor/128x128/apps/Jan.png /app/share/icons/hicolor/128x128/apps/ai.jan.Jan.png
|
||||
- install -Dm644 usr/share/icons/hicolor/32x32/apps/Jan.png /app/share/icons/hicolor/32x32/apps/ai.jan.Jan.png
|
||||
- install -Dm644 usr/share/icons/hicolor/256x256@2/apps/Jan.png /app/share/icons/hicolor/256x256@2/apps/ai.jan.Jan.png
|
||||
- install -Dm644 ai.jan.Jan.metainfo.xml /app/share/metainfo/ai.jan.Jan.rosary.metainfo.xml
|
||||
3
flatpak/flathub.json
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"only-arches": ["x86_64"]
|
||||
}
|
||||
12
mise.toml
@ -28,9 +28,19 @@ run = "yarn install"
|
||||
sources = ['package.json', 'yarn.lock']
|
||||
outputs = ['node_modules']
|
||||
|
||||
[tasks.build-tauri-plugin-api]
|
||||
description = "Build Tauri plugin API"
|
||||
depends = ["install"]
|
||||
run = "yarn build:tauri:plugin:api"
|
||||
sources = ['src-tauri/plugins/**/*']
|
||||
outputs = [
|
||||
'src-tauri/plugins/tauri-plugin-hardware/dist-js',
|
||||
'src-tauri/plugins/tauri-plugin-llamacpp/dist-js',
|
||||
]
|
||||
|
||||
[tasks.build-core]
|
||||
description = "Build core package"
|
||||
depends = ["install"]
|
||||
depends = ["build-tauri-plugin-api"]
|
||||
run = "yarn build:core"
|
||||
sources = ['core/**/*']
|
||||
outputs = ['core/dist']
|
||||
|
||||
@ -25,6 +25,7 @@
|
||||
"build:tauri:linux": "yarn download:bin && ./src-tauri/build-utils/shim-linuxdeploy.sh yarn tauri build && ./src-tauri/build-utils/buildAppImage.sh",
|
||||
"build:tauri:darwin": "yarn tauri build --target universal-apple-darwin",
|
||||
"build:tauri": "yarn build:icon && yarn copy:assets:tauri && run-script-os",
|
||||
"build:tauri:plugin:api": "cd src-tauri/plugins && yarn install && yarn workspaces foreach -Apt run build",
|
||||
"build:icon": "tauri icon ./src-tauri/icons/icon.png",
|
||||
"build:core": "cd core && yarn build && yarn pack",
|
||||
"build:web": "yarn workspace @janhq/web-app build",
|
||||
|
||||
4
src-tauri/.cargo/config.toml
Normal file
@ -0,0 +1,4 @@
|
||||
[env]
|
||||
# workaround needed to prevent `STATUS_ENTRYPOINT_NOT_FOUND` error in tests
|
||||
# see https://github.com/tauri-apps/tauri/pull/4383#issuecomment-1212221864
|
||||
__TAURI_WORKSPACE__ = "true"
|
||||
7184
src-tauri/Cargo.lock
generated
Normal file
@ -7,33 +7,43 @@ license = "MIT"
|
||||
repository = "https://github.com/menloresearch/jan"
|
||||
edition = "2021"
|
||||
rust-version = "1.77.2"
|
||||
resolver = "2"
|
||||
|
||||
[lib]
|
||||
name = "app_lib"
|
||||
crate-type = ["staticlib", "cdylib", "rlib"]
|
||||
|
||||
[features]
|
||||
default = [
|
||||
"tauri/wry",
|
||||
"tauri/common-controls-v6",
|
||||
"tauri/x11",
|
||||
"tauri/protocol-asset",
|
||||
"tauri/macos-private-api",
|
||||
"tauri/test",
|
||||
]
|
||||
test-tauri = [
|
||||
"tauri/wry",
|
||||
"tauri/x11",
|
||||
"tauri/protocol-asset",
|
||||
"tauri/macos-private-api",
|
||||
"tauri/test",
|
||||
]
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2.0.2", features = [] }
|
||||
|
||||
[dependencies]
|
||||
serde_json = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
log = "0.4"
|
||||
tauri = { version = "2.5.0", features = [ "protocol-asset", "macos-private-api",
|
||||
"test"
|
||||
] }
|
||||
tauri-plugin-log = "2.0.0-rc"
|
||||
tauri-plugin-shell = "2.2.0"
|
||||
tauri-plugin-os = "2.2.1"
|
||||
tauri-plugin-opener = "2.2.7"
|
||||
dirs = "6.0.0"
|
||||
env = "1.0.1"
|
||||
fix-path-env = { git = "https://github.com/tauri-apps/fix-path-env-rs" }
|
||||
flate2 = "1.0"
|
||||
tar = "0.4"
|
||||
rand = "0.8"
|
||||
tauri-plugin-http = { version = "2", features = ["unsafe-headers"] }
|
||||
tauri-plugin-store = "2"
|
||||
futures-util = "0.3.31"
|
||||
hyper = { version = "0.14", features = ["server"] }
|
||||
jan-utils = { path = "./utils" }
|
||||
libloading = "0.8.7"
|
||||
log = "0.4"
|
||||
reqwest = { version = "0.11", features = ["json", "blocking", "stream"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", rev = "3196c95f1dfafbffbdcdd6d365c94969ac975e6a", features = [
|
||||
"client",
|
||||
"transport-sse-client",
|
||||
@ -41,24 +51,35 @@ rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", rev = "3196c9
|
||||
"tower",
|
||||
"reqwest",
|
||||
] }
|
||||
uuid = { version = "1.7", features = ["v4"] }
|
||||
env = "1.0.1"
|
||||
futures-util = "0.3.31"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde_yaml = "0.9.34"
|
||||
tar = "0.4"
|
||||
tauri-plugin-deep-link = "2"
|
||||
tauri-plugin-dialog = "2.2.1"
|
||||
tauri-plugin-hardware = { path = "./plugins/tauri-plugin-hardware" }
|
||||
tauri-plugin-http = { version = "2", features = ["unsafe-headers"] }
|
||||
tauri-plugin-llamacpp = { path = "./plugins/tauri-plugin-llamacpp" }
|
||||
tauri-plugin-log = "2.0.0-rc"
|
||||
tauri-plugin-opener = "2.2.7"
|
||||
tauri-plugin-os = "2.2.1"
|
||||
tauri-plugin-shell = "2.2.0"
|
||||
tauri-plugin-store = "2"
|
||||
thiserror = "2.0.12"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-util = "0.7.14"
|
||||
url = "2.5"
|
||||
tauri-plugin-dialog = "2.2.1"
|
||||
dirs = "6.0.0"
|
||||
sysinfo = "0.34.2"
|
||||
ash = "0.38.0"
|
||||
nvml-wrapper = "0.10.0"
|
||||
tauri-plugin-deep-link = "2"
|
||||
fix-path-env = { git = "https://github.com/tauri-apps/fix-path-env-rs" }
|
||||
serde_yaml = "0.9.34"
|
||||
hmac = "0.12.1"
|
||||
sha2 = "0.10.9"
|
||||
base64 = "0.22.1"
|
||||
libloading = "0.8.7"
|
||||
thiserror = "2.0.12"
|
||||
uuid = { version = "1.7", features = ["v4"] }
|
||||
|
||||
[dependencies.tauri]
|
||||
version = "2.5.0"
|
||||
default-features = false
|
||||
features = ["protocol-asset", "macos-private-api", "test"]
|
||||
|
||||
[target.'cfg(windows)'.dev-dependencies]
|
||||
tempfile = "3.20.0"
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
nix = "=0.30.1"
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
@ -69,6 +90,3 @@ windows-sys = { version = "0.60.2", features = ["Win32_Storage_FileSystem"] }
|
||||
tauri-plugin-updater = "2"
|
||||
once_cell = "1.18"
|
||||
tauri-plugin-single-instance = { version = "2.0.0", features = ["deep-link"] }
|
||||
|
||||
[target.'cfg(windows)'.dev-dependencies]
|
||||
tempfile = "3.20.0"
|
||||
|
||||
@ -55,6 +55,8 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"store:default"
|
||||
"store:default",
|
||||
"llamacpp:default",
|
||||
"hardware:default"
|
||||
]
|
||||
}
|
||||
|
||||
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 36 KiB |
3
src-tauri/plugins/.yarnrc.yml
Normal file
@ -0,0 +1,3 @@
|
||||
nmHoistingLimits: workspaces
|
||||
nodeLinker: node-modules
|
||||
checksumBehavior: update
|
||||
12
src-tauri/plugins/package.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"private": true,
|
||||
"workspaces": {
|
||||
"packages": [
|
||||
"**"
|
||||
]
|
||||
},
|
||||
"installConfig": {
|
||||
"hoistingLimits": "workspaces"
|
||||
},
|
||||
"packageManager": "yarn@4.5.3"
|
||||
}
|
||||
17
src-tauri/plugins/tauri-plugin-hardware/.gitignore
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
/.vs
|
||||
.DS_Store
|
||||
.Thumbs.db
|
||||
*.sublime*
|
||||
.idea/
|
||||
debug.log
|
||||
package-lock.json
|
||||
.vscode/settings.json
|
||||
yarn.lock
|
||||
|
||||
/.tauri
|
||||
/target
|
||||
Cargo.lock
|
||||
node_modules/
|
||||
|
||||
dist-js
|
||||
dist
|
||||
28
src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
Normal file
@ -0,0 +1,28 @@
|
||||
[package]
|
||||
name = "tauri-plugin-hardware"
|
||||
version = "0.6.599"
|
||||
authors = ["Jan <service@jan.ai>"]
|
||||
description = "Tauri plugin for hardware information and GPU monitoring"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/menloresearch/jan"
|
||||
edition = "2021"
|
||||
rust-version = "1.77.2"
|
||||
exclude = ["/examples", "/dist-js", "/guest-js", "/node_modules"]
|
||||
links = "tauri-plugin-hardware"
|
||||
|
||||
[dependencies]
|
||||
ash = "0.38.0"
|
||||
libc = "0.2"
|
||||
log = "0.4"
|
||||
nvml-wrapper = "0.10.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
sysinfo = "0.34.2"
|
||||
tauri = { version = "2.5.0", default-features = false, features = ["test"] }
|
||||
|
||||
# Windows-specific dependencies
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
libloading = "0.8"
|
||||
|
||||
[build-dependencies]
|
||||
tauri-plugin = { version = "2.3.1", features = ["build"] }
|
||||
5
src-tauri/plugins/tauri-plugin-hardware/build.rs
Normal file
@ -0,0 +1,5 @@
|
||||
const COMMANDS: &[&str] = &["get_system_info", "get_system_usage"];
|
||||
|
||||
fn main() {
|
||||
tauri_plugin::Builder::new(COMMANDS).build();
|
||||
}
|
||||
49
src-tauri/plugins/tauri-plugin-hardware/guest-js/index.ts
Normal file
@ -0,0 +1,49 @@
|
||||
import { invoke } from '@tauri-apps/api/core'
|
||||
|
||||
// Types
|
||||
export interface CpuStaticInfo {
|
||||
name: string;
|
||||
core_count: number;
|
||||
arch: string;
|
||||
extensions: string[];
|
||||
}
|
||||
|
||||
export interface GpuInfo {
|
||||
name: string;
|
||||
total_memory: number;
|
||||
vendor: string;
|
||||
uuid: string;
|
||||
driver_version: string;
|
||||
nvidia_info?: any;
|
||||
vulkan_info?: any;
|
||||
}
|
||||
|
||||
export interface SystemInfo {
|
||||
cpu: CpuStaticInfo;
|
||||
os_type: string;
|
||||
os_name: string;
|
||||
total_memory: number;
|
||||
gpus: GpuInfo[];
|
||||
}
|
||||
|
||||
export interface GpuUsage {
|
||||
uuid: string;
|
||||
used_memory: number;
|
||||
total_memory: number;
|
||||
}
|
||||
|
||||
export interface SystemUsage {
|
||||
cpu: number;
|
||||
used_memory: number;
|
||||
total_memory: number;
|
||||
gpus: GpuUsage[];
|
||||
}
|
||||
|
||||
// Hardware commands
|
||||
export async function getSystemInfo(): Promise<SystemInfo> {
|
||||
return await invoke('plugin:hardware|get_system_info');
|
||||
}
|
||||
|
||||
export async function getSystemUsage(): Promise<SystemUsage> {
|
||||
return await invoke('plugin:hardware|get_system_usage');
|
||||
}
|
||||
33
src-tauri/plugins/tauri-plugin-hardware/package.json
Normal file
@ -0,0 +1,33 @@
|
||||
{
|
||||
"name": "@janhq/tauri-plugin-hardware-api",
|
||||
"version": "0.6.6",
|
||||
"private": true,
|
||||
"description": "Hardware monitoring plugin API for Tauri",
|
||||
"type": "module",
|
||||
"types": "./dist-js/index.d.ts",
|
||||
"main": "./dist-js/index.cjs",
|
||||
"module": "./dist-js/index.js",
|
||||
"exports": {
|
||||
"types": "./dist-js/index.d.ts",
|
||||
"import": "./dist-js/index.js",
|
||||
"require": "./dist-js/index.cjs"
|
||||
},
|
||||
"files": [
|
||||
"dist-js",
|
||||
"README.md"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "rollup -c",
|
||||
"prepublishOnly": "yarn build",
|
||||
"pretest": "yarn build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tauri-apps/api": ">=2.0.0-beta.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "^12.0.0",
|
||||
"rollup": "^4.9.6",
|
||||
"tslib": "^2.6.2",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-get-system-info"
|
||||
description = "Enables the get_system_info command without any pre-configured scope."
|
||||
commands.allow = ["get_system_info"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-get-system-info"
|
||||
description = "Denies the get_system_info command without any pre-configured scope."
|
||||
commands.deny = ["get_system_info"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-get-system-usage"
|
||||
description = "Enables the get_system_usage command without any pre-configured scope."
|
||||
commands.allow = ["get_system_usage"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-get-system-usage"
|
||||
description = "Denies the get_system_usage command without any pre-configured scope."
|
||||
commands.deny = ["get_system_usage"]
|
||||
@ -0,0 +1,70 @@
|
||||
## Default Permission
|
||||
|
||||
Default permissions for the hardware plugin
|
||||
|
||||
#### This default permission set includes the following:
|
||||
|
||||
- `allow-get-system-info`
|
||||
- `allow-get-system-usage`
|
||||
|
||||
## Permission Table
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th>Identifier</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`hardware:allow-get-system-info`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the get_system_info command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`hardware:deny-get-system-info`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the get_system_info command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`hardware:allow-get-system-usage`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the get_system_usage command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`hardware:deny-get-system-usage`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the get_system_usage command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
@ -0,0 +1,6 @@
|
||||
[default]
|
||||
description = "Default permissions for the hardware plugin"
|
||||
permissions = [
|
||||
"allow-get-system-info",
|
||||
"allow-get-system-usage"
|
||||
]
|
||||
@ -0,0 +1,330 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "PermissionFile",
|
||||
"description": "Permission file that can define a default permission, a set of permissions or a list of inlined permissions.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"default": {
|
||||
"description": "The default permission set for the plugin",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/DefaultPermission"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"set": {
|
||||
"description": "A list of permissions sets defined",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/PermissionSet"
|
||||
}
|
||||
},
|
||||
"permission": {
|
||||
"description": "A list of inlined permissions",
|
||||
"default": [],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Permission"
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"DefaultPermission": {
|
||||
"description": "The default permission set of the plugin.\n\nWorks similarly to a permission with the \"default\" identifier.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"permissions"
|
||||
],
|
||||
"properties": {
|
||||
"version": {
|
||||
"description": "The version of the permission.",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
],
|
||||
"format": "uint64",
|
||||
"minimum": 1.0
|
||||
},
|
||||
"description": {
|
||||
"description": "Human-readable description of what the permission does. Tauri convention is to use `<h4>` headings in markdown content for Tauri documentation generation purposes.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissions": {
|
||||
"description": "All permissions this set contains.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"PermissionSet": {
|
||||
"description": "A set of direct permissions grouped together under a new name.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"description",
|
||||
"identifier",
|
||||
"permissions"
|
||||
],
|
||||
"properties": {
|
||||
"identifier": {
|
||||
"description": "A unique identifier for the permission.",
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Human-readable description of what the permission does.",
|
||||
"type": "string"
|
||||
},
|
||||
"permissions": {
|
||||
"description": "All permissions this set contains.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/PermissionKind"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Permission": {
|
||||
"description": "Descriptions of explicit privileges of commands.\n\nIt can enable commands to be accessible in the frontend of the application.\n\nIf the scope is defined it can be used to fine grain control the access of individual or multiple commands.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"identifier"
|
||||
],
|
||||
"properties": {
|
||||
"version": {
|
||||
"description": "The version of the permission.",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
],
|
||||
"format": "uint64",
|
||||
"minimum": 1.0
|
||||
},
|
||||
"identifier": {
|
||||
"description": "A unique identifier for the permission.",
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Human-readable description of what the permission does. Tauri internal convention is to use `<h4>` headings in markdown content for Tauri documentation generation purposes.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"commands": {
|
||||
"description": "Allowed or denied commands when using this permission.",
|
||||
"default": {
|
||||
"allow": [],
|
||||
"deny": []
|
||||
},
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Commands"
|
||||
}
|
||||
]
|
||||
},
|
||||
"scope": {
|
||||
"description": "Allowed or denied scoped when using this permission.",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Scopes"
|
||||
}
|
||||
]
|
||||
},
|
||||
"platforms": {
|
||||
"description": "Target platforms this permission applies. By default all platforms are affected by this permission.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/Target"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Commands": {
|
||||
"description": "Allowed and denied commands inside a permission.\n\nIf two commands clash inside of `allow` and `deny`, it should be denied by default.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allow": {
|
||||
"description": "Allowed command.",
|
||||
"default": [],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"deny": {
|
||||
"description": "Denied command, which takes priority.",
|
||||
"default": [],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Scopes": {
|
||||
"description": "An argument for fine grained behavior control of Tauri commands.\n\nIt can be of any serde serializable type and is used to allow or prevent certain actions inside a Tauri command. The configured scope is passed to the command and will be enforced by the command implementation.\n\n## Example\n\n```json { \"allow\": [{ \"path\": \"$HOME/**\" }], \"deny\": [{ \"path\": \"$HOME/secret.txt\" }] } ```",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allow": {
|
||||
"description": "Data that defines what is allowed by the scope.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
},
|
||||
"deny": {
|
||||
"description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Value": {
|
||||
"description": "All supported ACL values.",
|
||||
"anyOf": [
|
||||
{
|
||||
"description": "Represents a null JSON value.",
|
||||
"type": "null"
|
||||
},
|
||||
{
|
||||
"description": "Represents a [`bool`].",
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"description": "Represents a valid ACL [`Number`].",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Number"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Represents a [`String`].",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Represents a list of other [`Value`]s.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Represents a map of [`String`] keys to [`Value`]s.",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"Number": {
|
||||
"description": "A valid ACL number.",
|
||||
"anyOf": [
|
||||
{
|
||||
"description": "Represents an [`i64`].",
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
},
|
||||
{
|
||||
"description": "Represents a [`f64`].",
|
||||
"type": "number",
|
||||
"format": "double"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Target": {
|
||||
"description": "Platform target.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "MacOS.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"macOS"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Windows.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"windows"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Linux.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Android.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"android"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "iOS.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"iOS"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"PermissionKind": {
|
||||
"type": "string",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Enables the get_system_info command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-get-system-info",
|
||||
"markdownDescription": "Enables the get_system_info command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_system_info command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-get-system-info",
|
||||
"markdownDescription": "Denies the get_system_info command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_system_usage command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-get-system-usage",
|
||||
"markdownDescription": "Enables the get_system_usage command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_system_usage command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-get-system-usage",
|
||||
"markdownDescription": "Denies the get_system_usage command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Default permissions for the hardware plugin\n#### This default permission set includes:\n\n- `allow-get-system-info`\n- `allow-get-system-usage`",
|
||||
"type": "string",
|
||||
"const": "default",
|
||||
"markdownDescription": "Default permissions for the hardware plugin\n#### This default permission set includes:\n\n- `allow-get-system-info`\n- `allow-get-system-usage`"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
31
src-tauri/plugins/tauri-plugin-hardware/rollup.config.js
Normal file
@ -0,0 +1,31 @@
|
||||
import { readFileSync } from 'node:fs'
|
||||
import { dirname, join } from 'node:path'
|
||||
import { cwd } from 'node:process'
|
||||
import typescript from '@rollup/plugin-typescript'
|
||||
|
||||
const pkg = JSON.parse(readFileSync(join(cwd(), 'package.json'), 'utf8'))
|
||||
|
||||
export default {
|
||||
input: 'guest-js/index.ts',
|
||||
output: [
|
||||
{
|
||||
file: pkg.exports.import,
|
||||
format: 'esm'
|
||||
},
|
||||
{
|
||||
file: pkg.exports.require,
|
||||
format: 'cjs'
|
||||
}
|
||||
],
|
||||
plugins: [
|
||||
typescript({
|
||||
declaration: true,
|
||||
declarationDir: dirname(pkg.exports.import)
|
||||
})
|
||||
],
|
||||
external: [
|
||||
/^@tauri-apps\/api/,
|
||||
...Object.keys(pkg.dependencies || {}),
|
||||
...Object.keys(pkg.peerDependencies || {})
|
||||
]
|
||||
}
|
||||
90
src-tauri/plugins/tauri-plugin-hardware/src/commands.rs
Normal file
@ -0,0 +1,90 @@
|
||||
use crate::{
|
||||
helpers::get_jan_libvulkan_path,
|
||||
types::{CpuStaticInfo, SystemInfo, SystemUsage},
|
||||
vendor::{nvidia, vulkan},
|
||||
SYSTEM_INFO,
|
||||
};
|
||||
use sysinfo::System;
|
||||
use tauri::Runtime;
|
||||
|
||||
#[tauri::command]
|
||||
pub fn get_system_info<R: Runtime>(app: tauri::AppHandle<R>) -> SystemInfo {
|
||||
SYSTEM_INFO
|
||||
.get_or_init(|| {
|
||||
let mut system = System::new();
|
||||
system.refresh_memory();
|
||||
|
||||
let mut gpu_map = std::collections::HashMap::new();
|
||||
for gpu in nvidia::get_nvidia_gpus() {
|
||||
gpu_map.insert(gpu.uuid.clone(), gpu);
|
||||
}
|
||||
|
||||
// try system vulkan first
|
||||
let paths = vec!["".to_string(), get_jan_libvulkan_path(app.clone())];
|
||||
let mut vulkan_gpus = vec![];
|
||||
for path in paths {
|
||||
vulkan_gpus = vulkan::get_vulkan_gpus(&path);
|
||||
if !vulkan_gpus.is_empty() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for gpu in vulkan_gpus {
|
||||
match gpu_map.get_mut(&gpu.uuid) {
|
||||
// for existing NVIDIA GPUs, add Vulkan info
|
||||
Some(nvidia_gpu) => {
|
||||
nvidia_gpu.vulkan_info = gpu.vulkan_info;
|
||||
}
|
||||
None => {
|
||||
gpu_map.insert(gpu.uuid.clone(), gpu);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let os_type = if cfg!(target_os = "windows") {
|
||||
"windows"
|
||||
} else if cfg!(target_os = "macos") {
|
||||
"macos"
|
||||
} else if cfg!(target_os = "linux") {
|
||||
"linux"
|
||||
} else {
|
||||
"unknown"
|
||||
};
|
||||
let os_name = System::long_os_version().unwrap_or("Unknown".to_string());
|
||||
|
||||
SystemInfo {
|
||||
cpu: CpuStaticInfo::new(),
|
||||
os_type: os_type.to_string(),
|
||||
os_name,
|
||||
total_memory: system.total_memory() / 1024 / 1024, // bytes to MiB
|
||||
gpus: gpu_map.into_values().collect(),
|
||||
}
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn get_system_usage<R: Runtime>(app: tauri::AppHandle<R>) -> SystemUsage {
|
||||
let mut system = System::new();
|
||||
system.refresh_memory();
|
||||
|
||||
// need to refresh 2 times to get CPU usage
|
||||
system.refresh_cpu_all();
|
||||
std::thread::sleep(sysinfo::MINIMUM_CPU_UPDATE_INTERVAL);
|
||||
system.refresh_cpu_all();
|
||||
|
||||
let cpus = system.cpus();
|
||||
let cpu_usage =
|
||||
cpus.iter().map(|cpu| cpu.cpu_usage()).sum::<f32>() / (cpus.len().max(1) as f32);
|
||||
|
||||
SystemUsage {
|
||||
cpu: cpu_usage,
|
||||
used_memory: system.used_memory() / 1024 / 1024, // bytes to MiB,
|
||||
total_memory: system.total_memory() / 1024 / 1024, // bytes to MiB,
|
||||
gpus: get_system_info(app.clone())
|
||||
.gpus
|
||||
.iter()
|
||||
.map(|gpu| gpu.get_usage())
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
4
src-tauri/plugins/tauri-plugin-hardware/src/constants.rs
Normal file
@ -0,0 +1,4 @@
|
||||
// https://devicehunt.com/all-pci-vendors
|
||||
pub const VENDOR_ID_AMD: u32 = 0x1002;
|
||||
pub const VENDOR_ID_NVIDIA: u32 = 0x10DE;
|
||||
pub const VENDOR_ID_INTEL: u32 = 0x8086;
|
||||
130
src-tauri/plugins/tauri-plugin-hardware/src/cpu.rs
Normal file
@ -0,0 +1,130 @@
|
||||
use sysinfo::System;
|
||||
|
||||
use crate::types::CpuStaticInfo;
|
||||
|
||||
impl CpuStaticInfo {
|
||||
pub fn new() -> Self {
|
||||
let mut system = System::new();
|
||||
system.refresh_cpu_all();
|
||||
|
||||
let name = system
|
||||
.cpus()
|
||||
.first()
|
||||
.map(|cpu| {
|
||||
let brand = cpu.brand();
|
||||
if brand.is_empty() {
|
||||
cpu.name()
|
||||
} else {
|
||||
brand
|
||||
}
|
||||
})
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
|
||||
CpuStaticInfo {
|
||||
name,
|
||||
core_count: System::physical_core_count().unwrap_or(0),
|
||||
arch: std::env::consts::ARCH.to_string(),
|
||||
extensions: CpuStaticInfo::get_extensions(),
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: see if we need to check for all CPU extensions
|
||||
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
|
||||
fn get_extensions() -> Vec<String> {
|
||||
let mut exts = vec![];
|
||||
|
||||
// fpu is always present on modern x86 processors,
|
||||
// but is_x86_feature_detected doesn't support it
|
||||
exts.push("fpu".to_string());
|
||||
if is_x86_feature_detected!("mmx") {
|
||||
exts.push("mmx".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("sse") {
|
||||
exts.push("sse".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("sse2") {
|
||||
exts.push("sse2".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("sse3") {
|
||||
exts.push("sse3".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("ssse3") {
|
||||
exts.push("ssse3".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("sse4.1") {
|
||||
exts.push("sse4_1".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("sse4.2") {
|
||||
exts.push("sse4_2".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("pclmulqdq") {
|
||||
exts.push("pclmulqdq".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx") {
|
||||
exts.push("avx".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx2") {
|
||||
exts.push("avx2".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512f") {
|
||||
exts.push("avx512_f".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512dq") {
|
||||
exts.push("avx512_dq".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512ifma") {
|
||||
exts.push("avx512_ifma".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512pf") {
|
||||
exts.push("avx512_pf".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512er") {
|
||||
exts.push("avx512_er".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512cd") {
|
||||
exts.push("avx512_cd".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512bw") {
|
||||
exts.push("avx512_bw".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512vl") {
|
||||
exts.push("avx512_vl".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512vbmi") {
|
||||
exts.push("avx512_vbmi".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512vbmi2") {
|
||||
exts.push("avx512_vbmi2".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512vnni") {
|
||||
exts.push("avx512_vnni".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512bitalg") {
|
||||
exts.push("avx512_bitalg".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("avx512vpopcntdq") {
|
||||
exts.push("avx512_vpopcntdq".to_string());
|
||||
}
|
||||
// avx512_4vnniw and avx512_4fmaps are only available on Intel Knights Mill, which are
|
||||
// very rare. https://en.wikipedia.org/wiki/AVX-512
|
||||
// is_x86_feature_detected doesn't support them
|
||||
if is_x86_feature_detected!("avx512vp2intersect") {
|
||||
exts.push("avx512_vp2intersect".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("aes") {
|
||||
exts.push("aes".to_string());
|
||||
}
|
||||
if is_x86_feature_detected!("f16c") {
|
||||
exts.push("f16c".to_string());
|
||||
}
|
||||
|
||||
exts
|
||||
}
|
||||
|
||||
// Cortex always returns empty list for non-x86
|
||||
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
|
||||
fn get_extensions() -> Vec<String> {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
33
src-tauri/plugins/tauri-plugin-hardware/src/gpu.rs
Normal file
@ -0,0 +1,33 @@
|
||||
use crate::{
|
||||
constants::{VENDOR_ID_AMD, VENDOR_ID_INTEL, VENDOR_ID_NVIDIA},
|
||||
types::{GpuInfo, GpuUsage, Vendor},
|
||||
};
|
||||
|
||||
impl Vendor {
|
||||
pub fn from_vendor_id(vendor_id: u32) -> Self {
|
||||
match vendor_id {
|
||||
VENDOR_ID_AMD => Vendor::AMD,
|
||||
VENDOR_ID_NVIDIA => Vendor::NVIDIA,
|
||||
VENDOR_ID_INTEL => Vendor::Intel,
|
||||
_ => Vendor::Unknown(vendor_id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl GpuInfo {
|
||||
pub fn get_usage(&self) -> GpuUsage {
|
||||
match self.vendor {
|
||||
Vendor::NVIDIA => self.get_usage_nvidia(),
|
||||
Vendor::AMD => self.get_usage_amd(),
|
||||
_ => self.get_usage_unsupported(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_usage_unsupported(&self) -> GpuUsage {
|
||||
GpuUsage {
|
||||
uuid: self.uuid.clone(),
|
||||
used_memory: 0,
|
||||
total_memory: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
20
src-tauri/plugins/tauri-plugin-hardware/src/helpers.rs
Normal file
@ -0,0 +1,20 @@
|
||||
use tauri::{path::BaseDirectory, Manager, Runtime};
|
||||
|
||||
pub fn get_jan_libvulkan_path<R: Runtime>(app: tauri::AppHandle<R>) -> String {
|
||||
let lib_name = if cfg!(target_os = "windows") {
|
||||
"vulkan-1.dll"
|
||||
} else if cfg!(target_os = "linux") {
|
||||
"libvulkan.so"
|
||||
} else {
|
||||
return "".to_string();
|
||||
};
|
||||
|
||||
// NOTE: this does not work in test mode (mock app)
|
||||
match app.path().resolve(
|
||||
format!("resources/lib/{}", lib_name),
|
||||
BaseDirectory::Resource,
|
||||
) {
|
||||
Ok(lib_path) => lib_path.to_string_lossy().to_string(),
|
||||
Err(_) => "".to_string(),
|
||||
}
|
||||
}
|
||||
29
src-tauri/plugins/tauri-plugin-hardware/src/lib.rs
Normal file
@ -0,0 +1,29 @@
|
||||
mod commands;
|
||||
mod constants;
|
||||
pub mod cpu;
|
||||
pub mod gpu;
|
||||
mod helpers;
|
||||
mod types;
|
||||
pub mod vendor;
|
||||
|
||||
pub use constants::*;
|
||||
pub use helpers::*;
|
||||
pub use types::*;
|
||||
|
||||
use std::sync::OnceLock;
|
||||
use tauri::Runtime;
|
||||
|
||||
static SYSTEM_INFO: OnceLock<SystemInfo> = OnceLock::new();
|
||||
|
||||
/// Initialize the hardware plugin
|
||||
pub fn init<R: Runtime>() -> tauri::plugin::TauriPlugin<R> {
|
||||
tauri::plugin::Builder::new("hardware")
|
||||
.invoke_handler(tauri::generate_handler![
|
||||
commands::get_system_info,
|
||||
commands::get_system_usage
|
||||
])
|
||||
.build()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
16
src-tauri/plugins/tauri-plugin-hardware/src/tests.rs
Normal file
@ -0,0 +1,16 @@
|
||||
use crate::commands::*;
|
||||
use tauri::test::mock_app;
|
||||
|
||||
#[test]
|
||||
fn test_system_info() {
|
||||
let app = mock_app();
|
||||
let info = get_system_info(app.handle().clone());
|
||||
println!("System Static Info: {:?}", info);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_system_usage() {
|
||||
let app = mock_app();
|
||||
let usage = get_system_usage(app.handle().clone());
|
||||
println!("System Usage Info: {:?}", usage);
|
||||
}
|
||||
71
src-tauri/plugins/tauri-plugin-hardware/src/types.rs
Normal file
@ -0,0 +1,71 @@
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::vendor::{nvidia::NvidiaInfo, vulkan::VulkanInfo};
|
||||
|
||||
#[derive(Clone, Serialize, Debug)]
|
||||
pub struct CpuStaticInfo {
|
||||
pub name: String,
|
||||
pub core_count: usize,
|
||||
pub arch: String,
|
||||
pub extensions: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Vendor {
|
||||
AMD,
|
||||
NVIDIA,
|
||||
Intel,
|
||||
Unknown(u32),
|
||||
}
|
||||
|
||||
impl Serialize for Vendor {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
match self {
|
||||
Vendor::AMD => "AMD".serialize(serializer),
|
||||
Vendor::NVIDIA => "NVIDIA".serialize(serializer),
|
||||
Vendor::Intel => "Intel".serialize(serializer),
|
||||
Vendor::Unknown(vendor_id) => {
|
||||
let formatted = format!("Unknown (vendor_id: {})", vendor_id);
|
||||
serializer.serialize_str(&formatted)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct GpuInfo {
|
||||
pub name: String,
|
||||
pub total_memory: u64,
|
||||
pub vendor: Vendor,
|
||||
pub uuid: String,
|
||||
pub driver_version: String,
|
||||
pub nvidia_info: Option<NvidiaInfo>,
|
||||
pub vulkan_info: Option<VulkanInfo>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone, Debug)]
|
||||
pub struct SystemInfo {
|
||||
pub cpu: CpuStaticInfo,
|
||||
pub os_type: String,
|
||||
pub os_name: String,
|
||||
pub total_memory: u64,
|
||||
pub gpus: Vec<GpuInfo>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone, Debug)]
|
||||
pub struct GpuUsage {
|
||||
pub uuid: String,
|
||||
pub used_memory: u64,
|
||||
pub total_memory: u64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone, Debug)]
|
||||
pub struct SystemUsage {
|
||||
pub cpu: f32,
|
||||
pub used_memory: u64,
|
||||
pub total_memory: u64,
|
||||
pub gpus: Vec<GpuUsage>,
|
||||
}
|
||||
@ -1,4 +1,4 @@
|
||||
use super::{GpuInfo, GpuUsage};
|
||||
use crate::types::{GpuInfo, GpuUsage};
|
||||
|
||||
impl GpuInfo {
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
6
src-tauri/plugins/tauri-plugin-hardware/src/vendor/mod.rs
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
pub mod amd;
|
||||
pub mod nvidia;
|
||||
pub mod vulkan;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
@ -1,4 +1,4 @@
|
||||
use super::{GpuInfo, GpuUsage, Vendor};
|
||||
use crate::types::{GpuInfo, GpuUsage, Vendor};
|
||||
use nvml_wrapper::{error::NvmlError, Nvml};
|
||||
use std::sync::OnceLock;
|
||||
|
||||
@ -103,18 +103,3 @@ pub fn get_nvidia_gpus() -> Vec<GpuInfo> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_get_nvidia_gpus() {
|
||||
let gpus = get_nvidia_gpus();
|
||||
for (i, gpu) in gpus.iter().enumerate() {
|
||||
println!("GPU {}:", i);
|
||||
println!(" {:?}", gpu);
|
||||
println!(" {:?}", gpu.get_usage());
|
||||
}
|
||||
}
|
||||
}
|
||||
21
src-tauri/plugins/tauri-plugin-hardware/src/vendor/tests.rs
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
use crate::vendor::{nvidia, vulkan};
|
||||
|
||||
#[test]
|
||||
fn test_get_nvidia_gpus() {
|
||||
let gpus = nvidia::get_nvidia_gpus();
|
||||
for (i, gpu) in gpus.iter().enumerate() {
|
||||
println!("GPU {}:", i);
|
||||
println!(" {:?}", gpu);
|
||||
println!(" {:?}", gpu.get_usage());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_vulkan_gpus() {
|
||||
let gpus = vulkan::get_vulkan_gpus("");
|
||||
for (i, gpu) in gpus.iter().enumerate() {
|
||||
println!("GPU {}:", i);
|
||||
println!(" {:?}", gpu);
|
||||
println!(" {:?}", gpu.get_usage());
|
||||
}
|
||||
}
|
||||
@ -1,4 +1,4 @@
|
||||
use super::{GpuInfo, Vendor};
|
||||
use crate::types::{GpuInfo, Vendor};
|
||||
use ash::{vk, Entry};
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize)]
|
||||
@ -128,18 +128,3 @@ fn get_vulkan_gpus_internal(lib_path: &str) -> Result<Vec<GpuInfo>, Box<dyn std:
|
||||
|
||||
Ok(device_info_list)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_get_vulkan_gpus() {
|
||||
let gpus = get_vulkan_gpus("");
|
||||
for (i, gpu) in gpus.iter().enumerate() {
|
||||
println!("GPU {}:", i);
|
||||
println!(" {:?}", gpu);
|
||||
println!(" {:?}", gpu.get_usage());
|
||||
}
|
||||
}
|
||||
}
|
||||
14
src-tauri/plugins/tauri-plugin-hardware/tsconfig.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2021",
|
||||
"module": "esnext",
|
||||
"moduleResolution": "bundler",
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noImplicitAny": true,
|
||||
"noEmit": true
|
||||
},
|
||||
"include": ["guest-js/*.ts"],
|
||||
"exclude": ["dist-js", "node_modules"]
|
||||
}
|
||||
17
src-tauri/plugins/tauri-plugin-llamacpp/.gitignore
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
/.vs
|
||||
.DS_Store
|
||||
.Thumbs.db
|
||||
*.sublime*
|
||||
.idea/
|
||||
debug.log
|
||||
package-lock.json
|
||||
.vscode/settings.json
|
||||
yarn.lock
|
||||
|
||||
/.tauri
|
||||
/target
|
||||
Cargo.lock
|
||||
node_modules/
|
||||
|
||||
dist-js
|
||||
dist
|
||||
36
src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
Normal file
@ -0,0 +1,36 @@
|
||||
[package]
|
||||
name = "tauri-plugin-llamacpp"
|
||||
version = "0.6.599"
|
||||
authors = ["Jan <service@jan.ai>"]
|
||||
description = "Tauri plugin for managing Jan LlamaCpp server processes and model loading"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/menloresearch/jan"
|
||||
edition = "2021"
|
||||
rust-version = "1.77.2"
|
||||
exclude = ["/examples", "/dist-js", "/guest-js", "/node_modules"]
|
||||
links = "tauri-plugin-llamacpp"
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.22.1"
|
||||
byteorder = "1.5.0"
|
||||
hmac = "0.12.1"
|
||||
jan-utils = { path = "../../utils" }
|
||||
log = "0.4"
|
||||
rand = "0.8"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
sha2 = "0.10.9"
|
||||
sysinfo = "0.34.2"
|
||||
tauri = { version = "2.5.0", default-features = false, features = [] }
|
||||
thiserror = "2.0.12"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
|
||||
# Windows-specific dependencies
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
windows-sys = { version = "0.60.2", features = ["Win32_Storage_FileSystem"] }
|
||||
|
||||
# Unix-specific dependencies
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
nix = { version = "=0.30.1", features = ["signal", "process"] }
|
||||
|
||||
[build-dependencies]
|
||||
tauri-plugin = { version = "2.3.1", features = ["build"] }
|
||||
21
src-tauri/plugins/tauri-plugin-llamacpp/build.rs
Normal file
@ -0,0 +1,21 @@
|
||||
const COMMANDS: &[&str] = &[
|
||||
// Cleanup command
|
||||
"cleanup_llama_processes",
|
||||
// LlamaCpp server commands
|
||||
"load_llama_model",
|
||||
"unload_llama_model",
|
||||
"get_devices",
|
||||
"generate_api_key",
|
||||
"is_process_running",
|
||||
"get_random_port",
|
||||
"find_session_by_model",
|
||||
"get_loaded_models",
|
||||
"get_all_sessions",
|
||||
"get_session_by_model",
|
||||
// GGUF commands
|
||||
"read_gguf_metadata",
|
||||
];
|
||||
|
||||
fn main() {
|
||||
tauri_plugin::Builder::new(COMMANDS).build();
|
||||
}
|
||||
93
src-tauri/plugins/tauri-plugin-llamacpp/guest-js/index.ts
Normal file
@ -0,0 +1,93 @@
|
||||
import { invoke } from '@tauri-apps/api/core'
|
||||
|
||||
// Types
|
||||
export interface SessionInfo {
|
||||
pid: number;
|
||||
port: number;
|
||||
model_id: string;
|
||||
model_path: string;
|
||||
api_key: string;
|
||||
}
|
||||
|
||||
export interface DeviceInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
memory: number;
|
||||
}
|
||||
|
||||
export interface GgufMetadata {
|
||||
version: number;
|
||||
tensor_count: number;
|
||||
metadata: Record<string, string>;
|
||||
}
|
||||
|
||||
// Cleanup commands
|
||||
export async function cleanupLlamaProcesses(): Promise<void> {
|
||||
return await invoke('plugin:llamacpp|cleanup_llama_processes');
|
||||
}
|
||||
|
||||
// LlamaCpp server commands
|
||||
export async function loadLlamaModel(
|
||||
backendPath: string,
|
||||
libraryPath?: string,
|
||||
args: string[] = []
|
||||
): Promise<SessionInfo> {
|
||||
return await invoke('plugin:llamacpp|load_llama_model', {
|
||||
backendPath,
|
||||
libraryPath,
|
||||
args
|
||||
});
|
||||
}
|
||||
|
||||
export async function unloadLlamaModel(pid: number): Promise<void> {
|
||||
return await invoke('plugin:llamacpp|unload_llama_model', { pid });
|
||||
}
|
||||
|
||||
export async function getDevices(
|
||||
backendPath: string,
|
||||
libraryPath?: string
|
||||
): Promise<DeviceInfo[]> {
|
||||
return await invoke('plugin:llamacpp|get_devices', {
|
||||
backendPath,
|
||||
libraryPath
|
||||
});
|
||||
}
|
||||
|
||||
export async function generateApiKey(
|
||||
modelId: string,
|
||||
apiSecret: string
|
||||
): Promise<string> {
|
||||
return await invoke('plugin:llamacpp|generate_api_key', {
|
||||
modelId,
|
||||
apiSecret
|
||||
});
|
||||
}
|
||||
|
||||
export async function isProcessRunning(pid: number): Promise<boolean> {
|
||||
return await invoke('plugin:llamacpp|is_process_running', { pid });
|
||||
}
|
||||
|
||||
export async function getRandomPort(): Promise<number> {
|
||||
return await invoke('plugin:llamacpp|get_random_port');
|
||||
}
|
||||
|
||||
export async function findSessionByModel(modelId: string): Promise<SessionInfo | null> {
|
||||
return await invoke('plugin:llamacpp|find_session_by_model', { modelId });
|
||||
}
|
||||
|
||||
export async function getLoadedModels(): Promise<string[]> {
|
||||
return await invoke('plugin:llamacpp|get_loaded_models');
|
||||
}
|
||||
|
||||
export async function getAllSessions(): Promise<SessionInfo[]> {
|
||||
return await invoke('plugin:llamacpp|get_all_sessions');
|
||||
}
|
||||
|
||||
export async function getSessionByModel(modelId: string): Promise<SessionInfo | null> {
|
||||
return await invoke('plugin:llamacpp|get_session_by_model', { modelId });
|
||||
}
|
||||
|
||||
// GGUF commands
|
||||
export async function readGgufMetadata(path: string): Promise<GgufMetadata> {
|
||||
return await invoke('plugin:llamacpp|read_gguf_metadata', { path });
|
||||
}
|
||||
33
src-tauri/plugins/tauri-plugin-llamacpp/package.json
Normal file
@ -0,0 +1,33 @@
|
||||
{
|
||||
"name": "@janhq/tauri-plugin-llamacpp-api",
|
||||
"version": "0.6.6",
|
||||
"private": true,
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"types": "./dist-js/index.d.ts",
|
||||
"main": "./dist-js/index.cjs",
|
||||
"module": "./dist-js/index.js",
|
||||
"exports": {
|
||||
"types": "./dist-js/index.d.ts",
|
||||
"import": "./dist-js/index.js",
|
||||
"require": "./dist-js/index.cjs"
|
||||
},
|
||||
"files": [
|
||||
"dist-js",
|
||||
"README.md"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "rollup -c",
|
||||
"prepublishOnly": "yarn build",
|
||||
"pretest": "yarn build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tauri-apps/api": ">=2.0.0-beta.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "^12.0.0",
|
||||
"rollup": "^4.9.6",
|
||||
"tslib": "^2.6.2",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-cleanup-llama-processes"
|
||||
description = "Enables the cleanup_llama_processes command without any pre-configured scope."
|
||||
commands.allow = ["cleanup_llama_processes"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-cleanup-llama-processes"
|
||||
description = "Denies the cleanup_llama_processes command without any pre-configured scope."
|
||||
commands.deny = ["cleanup_llama_processes"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-find-session-by-model"
|
||||
description = "Enables the find_session_by_model command without any pre-configured scope."
|
||||
commands.allow = ["find_session_by_model"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-find-session-by-model"
|
||||
description = "Denies the find_session_by_model command without any pre-configured scope."
|
||||
commands.deny = ["find_session_by_model"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-generate-api-key"
|
||||
description = "Enables the generate_api_key command without any pre-configured scope."
|
||||
commands.allow = ["generate_api_key"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-generate-api-key"
|
||||
description = "Denies the generate_api_key command without any pre-configured scope."
|
||||
commands.deny = ["generate_api_key"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-get-all-sessions"
|
||||
description = "Enables the get_all_sessions command without any pre-configured scope."
|
||||
commands.allow = ["get_all_sessions"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-get-all-sessions"
|
||||
description = "Denies the get_all_sessions command without any pre-configured scope."
|
||||
commands.deny = ["get_all_sessions"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-get-devices"
|
||||
description = "Enables the get_devices command without any pre-configured scope."
|
||||
commands.allow = ["get_devices"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-get-devices"
|
||||
description = "Denies the get_devices command without any pre-configured scope."
|
||||
commands.deny = ["get_devices"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-get-loaded-models"
|
||||
description = "Enables the get_loaded_models command without any pre-configured scope."
|
||||
commands.allow = ["get_loaded_models"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-get-loaded-models"
|
||||
description = "Denies the get_loaded_models command without any pre-configured scope."
|
||||
commands.deny = ["get_loaded_models"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-get-random-port"
|
||||
description = "Enables the get_random_port command without any pre-configured scope."
|
||||
commands.allow = ["get_random_port"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-get-random-port"
|
||||
description = "Denies the get_random_port command without any pre-configured scope."
|
||||
commands.deny = ["get_random_port"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-get-session-by-model"
|
||||
description = "Enables the get_session_by_model command without any pre-configured scope."
|
||||
commands.allow = ["get_session_by_model"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-get-session-by-model"
|
||||
description = "Denies the get_session_by_model command without any pre-configured scope."
|
||||
commands.deny = ["get_session_by_model"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-is-process-running"
|
||||
description = "Enables the is_process_running command without any pre-configured scope."
|
||||
commands.allow = ["is_process_running"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-is-process-running"
|
||||
description = "Denies the is_process_running command without any pre-configured scope."
|
||||
commands.deny = ["is_process_running"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-load-llama-model"
|
||||
description = "Enables the load_llama_model command without any pre-configured scope."
|
||||
commands.allow = ["load_llama_model"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-load-llama-model"
|
||||
description = "Denies the load_llama_model command without any pre-configured scope."
|
||||
commands.deny = ["load_llama_model"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-read-gguf-metadata"
|
||||
description = "Enables the read_gguf_metadata command without any pre-configured scope."
|
||||
commands.allow = ["read_gguf_metadata"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-read-gguf-metadata"
|
||||
description = "Denies the read_gguf_metadata command without any pre-configured scope."
|
||||
commands.deny = ["read_gguf_metadata"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-unload-llama-model"
|
||||
description = "Enables the unload_llama_model command without any pre-configured scope."
|
||||
commands.allow = ["unload_llama_model"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-unload-llama-model"
|
||||
description = "Denies the unload_llama_model command without any pre-configured scope."
|
||||
commands.deny = ["unload_llama_model"]
|
||||
@ -0,0 +1,340 @@
|
||||
## Default Permission
|
||||
|
||||
Default permissions for the llamacpp plugin
|
||||
|
||||
#### This default permission set includes the following:
|
||||
|
||||
- `allow-cleanup-llama-processes`
|
||||
- `allow-load-llama-model`
|
||||
- `allow-unload-llama-model`
|
||||
- `allow-get-devices`
|
||||
- `allow-generate-api-key`
|
||||
- `allow-is-process-running`
|
||||
- `allow-get-random-port`
|
||||
- `allow-find-session-by-model`
|
||||
- `allow-get-loaded-models`
|
||||
- `allow-get-all-sessions`
|
||||
- `allow-get-session-by-model`
|
||||
- `allow-read-gguf-metadata`
|
||||
|
||||
## Permission Table
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<th>Identifier</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:allow-cleanup-llama-processes`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the cleanup_llama_processes command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:deny-cleanup-llama-processes`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the cleanup_llama_processes command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:allow-find-session-by-model`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the find_session_by_model command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:deny-find-session-by-model`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the find_session_by_model command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:allow-generate-api-key`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the generate_api_key command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:deny-generate-api-key`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the generate_api_key command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:allow-get-all-sessions`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the get_all_sessions command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:deny-get-all-sessions`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the get_all_sessions command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:allow-get-devices`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the get_devices command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:deny-get-devices`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the get_devices command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:allow-get-loaded-models`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the get_loaded_models command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:deny-get-loaded-models`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the get_loaded_models command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:allow-get-random-port`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the get_random_port command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:deny-get-random-port`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the get_random_port command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:allow-get-session-by-model`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the get_session_by_model command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:deny-get-session-by-model`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the get_session_by_model command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:allow-is-process-running`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the is_process_running command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:deny-is-process-running`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the is_process_running command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:allow-load-llama-model`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the load_llama_model command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:deny-load-llama-model`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the load_llama_model command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:allow-read-gguf-metadata`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the read_gguf_metadata command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:deny-read-gguf-metadata`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the read_gguf_metadata command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:allow-unload-llama-model`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the unload_llama_model command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`llamacpp:deny-unload-llama-model`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the unload_llama_model command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
@ -0,0 +1,21 @@
|
||||
[default]
|
||||
description = "Default permissions for the llamacpp plugin"
|
||||
permissions = [
|
||||
# Cleanup commands
|
||||
"allow-cleanup-llama-processes",
|
||||
|
||||
# LlamaCpp server commands
|
||||
"allow-load-llama-model",
|
||||
"allow-unload-llama-model",
|
||||
"allow-get-devices",
|
||||
"allow-generate-api-key",
|
||||
"allow-is-process-running",
|
||||
"allow-get-random-port",
|
||||
"allow-find-session-by-model",
|
||||
"allow-get-loaded-models",
|
||||
"allow-get-all-sessions",
|
||||
"allow-get-session-by-model",
|
||||
|
||||
# GGUF commands
|
||||
"allow-read-gguf-metadata"
|
||||
]
|
||||
@ -0,0 +1,450 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "PermissionFile",
|
||||
"description": "Permission file that can define a default permission, a set of permissions or a list of inlined permissions.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"default": {
|
||||
"description": "The default permission set for the plugin",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/DefaultPermission"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"set": {
|
||||
"description": "A list of permissions sets defined",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/PermissionSet"
|
||||
}
|
||||
},
|
||||
"permission": {
|
||||
"description": "A list of inlined permissions",
|
||||
"default": [],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Permission"
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"DefaultPermission": {
|
||||
"description": "The default permission set of the plugin.\n\nWorks similarly to a permission with the \"default\" identifier.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"permissions"
|
||||
],
|
||||
"properties": {
|
||||
"version": {
|
||||
"description": "The version of the permission.",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
],
|
||||
"format": "uint64",
|
||||
"minimum": 1.0
|
||||
},
|
||||
"description": {
|
||||
"description": "Human-readable description of what the permission does. Tauri convention is to use `<h4>` headings in markdown content for Tauri documentation generation purposes.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"permissions": {
|
||||
"description": "All permissions this set contains.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"PermissionSet": {
|
||||
"description": "A set of direct permissions grouped together under a new name.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"description",
|
||||
"identifier",
|
||||
"permissions"
|
||||
],
|
||||
"properties": {
|
||||
"identifier": {
|
||||
"description": "A unique identifier for the permission.",
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Human-readable description of what the permission does.",
|
||||
"type": "string"
|
||||
},
|
||||
"permissions": {
|
||||
"description": "All permissions this set contains.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/PermissionKind"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Permission": {
|
||||
"description": "Descriptions of explicit privileges of commands.\n\nIt can enable commands to be accessible in the frontend of the application.\n\nIf the scope is defined it can be used to fine grain control the access of individual or multiple commands.",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"identifier"
|
||||
],
|
||||
"properties": {
|
||||
"version": {
|
||||
"description": "The version of the permission.",
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
],
|
||||
"format": "uint64",
|
||||
"minimum": 1.0
|
||||
},
|
||||
"identifier": {
|
||||
"description": "A unique identifier for the permission.",
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Human-readable description of what the permission does. Tauri internal convention is to use `<h4>` headings in markdown content for Tauri documentation generation purposes.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"commands": {
|
||||
"description": "Allowed or denied commands when using this permission.",
|
||||
"default": {
|
||||
"allow": [],
|
||||
"deny": []
|
||||
},
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Commands"
|
||||
}
|
||||
]
|
||||
},
|
||||
"scope": {
|
||||
"description": "Allowed or denied scoped when using this permission.",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Scopes"
|
||||
}
|
||||
]
|
||||
},
|
||||
"platforms": {
|
||||
"description": "Target platforms this permission applies. By default all platforms are affected by this permission.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/Target"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Commands": {
|
||||
"description": "Allowed and denied commands inside a permission.\n\nIf two commands clash inside of `allow` and `deny`, it should be denied by default.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allow": {
|
||||
"description": "Allowed command.",
|
||||
"default": [],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"deny": {
|
||||
"description": "Denied command, which takes priority.",
|
||||
"default": [],
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Scopes": {
|
||||
"description": "An argument for fine grained behavior control of Tauri commands.\n\nIt can be of any serde serializable type and is used to allow or prevent certain actions inside a Tauri command. The configured scope is passed to the command and will be enforced by the command implementation.\n\n## Example\n\n```json { \"allow\": [{ \"path\": \"$HOME/**\" }], \"deny\": [{ \"path\": \"$HOME/secret.txt\" }] } ```",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allow": {
|
||||
"description": "Data that defines what is allowed by the scope.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
},
|
||||
"deny": {
|
||||
"description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"Value": {
|
||||
"description": "All supported ACL values.",
|
||||
"anyOf": [
|
||||
{
|
||||
"description": "Represents a null JSON value.",
|
||||
"type": "null"
|
||||
},
|
||||
{
|
||||
"description": "Represents a [`bool`].",
|
||||
"type": "boolean"
|
||||
},
|
||||
{
|
||||
"description": "Represents a valid ACL [`Number`].",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Number"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Represents a [`String`].",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Represents a list of other [`Value`]s.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Represents a map of [`String`] keys to [`Value`]s.",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/Value"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"Number": {
|
||||
"description": "A valid ACL number.",
|
||||
"anyOf": [
|
||||
{
|
||||
"description": "Represents an [`i64`].",
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
},
|
||||
{
|
||||
"description": "Represents a [`f64`].",
|
||||
"type": "number",
|
||||
"format": "double"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Target": {
|
||||
"description": "Platform target.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "MacOS.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"macOS"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Windows.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"windows"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Linux.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Android.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"android"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "iOS.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"iOS"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"PermissionKind": {
|
||||
"type": "string",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Enables the cleanup_llama_processes command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-cleanup-llama-processes",
|
||||
"markdownDescription": "Enables the cleanup_llama_processes command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the cleanup_llama_processes command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-cleanup-llama-processes",
|
||||
"markdownDescription": "Denies the cleanup_llama_processes command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the find_session_by_model command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-find-session-by-model",
|
||||
"markdownDescription": "Enables the find_session_by_model command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the find_session_by_model command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-find-session-by-model",
|
||||
"markdownDescription": "Denies the find_session_by_model command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the generate_api_key command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-generate-api-key",
|
||||
"markdownDescription": "Enables the generate_api_key command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the generate_api_key command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-generate-api-key",
|
||||
"markdownDescription": "Denies the generate_api_key command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_all_sessions command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-get-all-sessions",
|
||||
"markdownDescription": "Enables the get_all_sessions command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_all_sessions command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-get-all-sessions",
|
||||
"markdownDescription": "Denies the get_all_sessions command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_devices command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-get-devices",
|
||||
"markdownDescription": "Enables the get_devices command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_devices command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-get-devices",
|
||||
"markdownDescription": "Denies the get_devices command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_loaded_models command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-get-loaded-models",
|
||||
"markdownDescription": "Enables the get_loaded_models command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_loaded_models command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-get-loaded-models",
|
||||
"markdownDescription": "Denies the get_loaded_models command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_random_port command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-get-random-port",
|
||||
"markdownDescription": "Enables the get_random_port command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_random_port command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-get-random-port",
|
||||
"markdownDescription": "Denies the get_random_port command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_session_by_model command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-get-session-by-model",
|
||||
"markdownDescription": "Enables the get_session_by_model command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_session_by_model command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-get-session-by-model",
|
||||
"markdownDescription": "Denies the get_session_by_model command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the is_process_running command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-is-process-running",
|
||||
"markdownDescription": "Enables the is_process_running command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the is_process_running command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-is-process-running",
|
||||
"markdownDescription": "Denies the is_process_running command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the load_llama_model command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-load-llama-model",
|
||||
"markdownDescription": "Enables the load_llama_model command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the load_llama_model command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-load-llama-model",
|
||||
"markdownDescription": "Denies the load_llama_model command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the read_gguf_metadata command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-read-gguf-metadata",
|
||||
"markdownDescription": "Enables the read_gguf_metadata command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the read_gguf_metadata command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-read-gguf-metadata",
|
||||
"markdownDescription": "Denies the read_gguf_metadata command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the unload_llama_model command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-unload-llama-model",
|
||||
"markdownDescription": "Enables the unload_llama_model command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the unload_llama_model command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-unload-llama-model",
|
||||
"markdownDescription": "Denies the unload_llama_model command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Default permissions for the llamacpp plugin\n#### This default permission set includes:\n\n- `allow-cleanup-llama-processes`\n- `allow-load-llama-model`\n- `allow-unload-llama-model`\n- `allow-get-devices`\n- `allow-generate-api-key`\n- `allow-is-process-running`\n- `allow-get-random-port`\n- `allow-find-session-by-model`\n- `allow-get-loaded-models`\n- `allow-get-all-sessions`\n- `allow-get-session-by-model`\n- `allow-read-gguf-metadata`",
|
||||
"type": "string",
|
||||
"const": "default",
|
||||
"markdownDescription": "Default permissions for the llamacpp plugin\n#### This default permission set includes:\n\n- `allow-cleanup-llama-processes`\n- `allow-load-llama-model`\n- `allow-unload-llama-model`\n- `allow-get-devices`\n- `allow-generate-api-key`\n- `allow-is-process-running`\n- `allow-get-random-port`\n- `allow-find-session-by-model`\n- `allow-get-loaded-models`\n- `allow-get-all-sessions`\n- `allow-get-session-by-model`\n- `allow-read-gguf-metadata`"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
31
src-tauri/plugins/tauri-plugin-llamacpp/rollup.config.js
Normal file
@ -0,0 +1,31 @@
|
||||
import { readFileSync } from 'node:fs'
|
||||
import { dirname, join } from 'node:path'
|
||||
import { cwd } from 'node:process'
|
||||
import typescript from '@rollup/plugin-typescript'
|
||||
|
||||
const pkg = JSON.parse(readFileSync(join(cwd(), 'package.json'), 'utf8'))
|
||||
|
||||
export default {
|
||||
input: 'guest-js/index.ts',
|
||||
output: [
|
||||
{
|
||||
file: pkg.exports.import,
|
||||
format: 'esm'
|
||||
},
|
||||
{
|
||||
file: pkg.exports.require,
|
||||
format: 'cjs'
|
||||
}
|
||||
],
|
||||
plugins: [
|
||||
typescript({
|
||||
declaration: true,
|
||||
declarationDir: dirname(pkg.exports.import)
|
||||
})
|
||||
],
|
||||
external: [
|
||||
/^@tauri-apps\/api/,
|
||||
...Object.keys(pkg.dependencies || {}),
|
||||
...Object.keys(pkg.peerDependencies || {})
|
||||
]
|
||||
}
|
||||