Compare commits

..

No commits in common. "dev" and "v0.5.17" have entirely different histories.
dev ... v0.5.17

2379 changed files with 89460 additions and 158481 deletions

View File

@ -1,31 +0,0 @@
---
allowed-tools: Bash(gh issue view:*), Bash(gh search:*), Bash(gh issue list:*), Bash(gh api:*), Bash(gh issue comment:*)
description: Find duplicate GitHub issues
---
Find up to 3 likely duplicate issues for a given GitHub issue.
To do this, follow these steps precisely:
1. Use an agent to check if the Github issue (a) is closed, (b) does not need to be deduped (eg. because it is broad product feedback without a specific solution, or positive feedback), or (c) already has a duplicates comment that you made earlier. If so, do not proceed.
2. Use an agent to view a Github issue, and ask the agent to return a summary of the issue
3. Then, launch 5 parallel agents to search Github for duplicates of this issue, using diverse keywords and search approaches, using the summary from #1
4. Next, feed the results from #1 and #2 into another agent, so that it can filter out false positives, that are likely not actually duplicates of the original issue. If there are no duplicates remaining, do not proceed.
5. Finally, comment back on the issue with a list of up to three duplicate issues (or zero, if there are no likely duplicates)
Notes (be sure to tell this to your agents, too):
- Use `gh` to interact with Github, rather than web fetch
- Do not use other tools, beyond `gh` (eg. don't use other MCP servers, file edit, etc.)
- Make a todo list first
- For your comment, follow the following format precisely (assuming for this example that you found 3 suspected duplicates):
---
Found 3 possible duplicate issues:
1. <link to issue>
2. <link to issue>
3. <link to issue>
---

View File

@ -1,20 +1,4 @@
{ {
"name": "Jan", "name": "jan",
"image": "mcr.microsoft.com/devcontainers/base:jammy", "image": "node:20"
"features": {
"ghcr.io/devcontainers/features/node:1": {
"version": "20"
},
"ghcr.io/devcontainers/features/rust:1": {},
"ghcr.io/devcontainers-extra/features/corepack:1": {}
},
"postCreateCommand": "./.devcontainer/postCreateCommand.sh",
// appimagekit requires fuse to package appimage, to use fuse in the container you need to enable it on the host
"runArgs": [
"--device", "/dev/fuse",
"--cap-add=SYS_ADMIN",
"--security-opt", "apparmor:unconfined"
]
} }

View File

@ -1,16 +0,0 @@
#!/usr/bin/env bash
# install tauri prerequisites + xdg-utils for xdg-open + libfuse2 for using appimagekit
sudo apt update
sudo apt install -yqq libwebkit2gtk-4.1-dev \
build-essential \
curl \
wget \
file \
libxdo-dev \
libssl-dev \
libayatana-appindicator3-dev \
librsvg2-dev \
xdg-utils \
libfuse2

View File

@ -1,24 +0,0 @@
---
name: 🐛 Bug Report
about: If something isn't working as expected 🤔
title: 'bug: '
type: Bug
---
**Version:** e.g. 0.5.x-xxx
## Describe the Bug
<!-- A clear & concise description of the bug -->
## Steps to Reproduce
1.
## Screenshots / Logs
<!-- You can find logs in: Setting -> General -> Data Folder -> App Logs -->
## Operating System
- [ ] MacOS
- [ ] Windows
- [ ] Linux

View File

@ -1,12 +0,0 @@
---
name: 🚀 Feature Request
about: Suggest an idea for this project 😻!
title: 'idea: '
type: Idea
---
## Problem Statement
<!-- Describe the problem you're facing -->
## Feature Idea
<!-- Describe what you want instead. Examples are welcome! -->

View File

@ -1,27 +0,0 @@
---
name: 🌟 Epic
about: User stories and specs
title: 'epic: '
type: Epic
---
## User Stories
- As a [user type], I can [do something] so that [outcome]
## Not in scope
-
## User Flows & Designs
- Key user flows
- Figma link
- Edge cases
- Error states
## Engineering Decisions
- **Technical Approach:** Brief outline of the solution.
- **Key Trade-offs:** Whats been considered/rejected and why.
- **Dependencies:** APIs, services, libraries, teams.

View File

@ -1,24 +0,0 @@
---
name: 🎯 Goal
about: Roadmap goals for our users
title: 'goal: '
type: Goal
---
## 🎯 Goal
<!-- Short description of our goal -->
## 📖 Context
<!-- Give a description of our current context -->
## ✅ Scope
<!-- High lever description of what we are going to deliver -->
## ❌ Out of Scope
<!-- What we are not targeting / delivering / discussing in this goal -->
## 🛠 Deliverables
<!-- What we are the tangible deliverables for this goal -->
## ❓Open questions
<!-- What are we not sure about and need to discuss more -->

43
.github/ISSUE_TEMPLATE/bug_report.yml vendored Normal file
View File

@ -0,0 +1,43 @@
name: "\U0001F41B Bug Report"
description: "If something isn't working as expected \U0001F914"
labels: [ "type: bug" ]
title: 'bug: [DESCRIPTION]'
body:
- type: input
validations:
required: true
attributes:
label: "Jan version"
description: "**Tip:** The version is in the app's bottom right corner"
placeholder: "e.g. 0.5.x-xxx"
- type: textarea
validations:
required: true
attributes:
label: "Describe the Bug"
description: "A clear & concise description of the bug"
- type: textarea
attributes:
label: "Steps to Reproduce"
description: |
Please list out steps to reproduce the issue
placeholder: |
1. Go to '...'
2. Click on '...'
- type: textarea
attributes:
label: "Screenshots / Logs"
description: |
You can find logs in: ~/jan/logs/app.logs
- type: checkboxes
attributes:
label: "What is your OS?"
options:
- label: MacOS
- label: Windows
- label: Linux

View File

@ -1,5 +1,7 @@
## To encourage contributors to use issue templates, we don't allow blank issues
blank_issues_enabled: true blank_issues_enabled: true
contact_links: contact_links:
- name: Jan Discussions - name: "\1F4AC Jan Discussions"
url: https://github.com/orgs/janhq/discussions/categories/q-a url: "https://github.com/orgs/menloresearch/discussions/categories/q-a"
about: Get help, discuss features & roadmap, and share your projects about: "Get help, discuss features & roadmap, and share your projects"

View File

@ -0,0 +1,20 @@
name: "\U0001F680 Feature Request"
description: "Suggest an idea for this project \U0001F63B!"
title: 'idea: [DESCRIPTION]'
labels: 'type: feature request'
body:
- type: textarea
validations:
required: true
attributes:
label: "Problem Statement"
description: "Describe the problem you're facing"
placeholder: |
I'm always frustrated when ...
- type: textarea
validations:
required: true
attributes:
label: "Feature Idea"
description: "Describe what you want instead. Examples are welcome!"

View File

@ -0,0 +1,21 @@
name: "\U0001F929 Model Request"
description: "Request a new model to be compiled"
title: 'feat: [DESCRIPTION]'
labels: 'type: model request'
body:
- type: markdown
attributes:
value: "**Tip:** Download any HuggingFace model in app ([see guides](https://jan.ai/docs/models/manage-models#add-models)). Use this form for unsupported models only."
- type: textarea
validations:
required: true
attributes:
label: "Model Requests"
description: "If applicable, include the source URL, licenses, and any other relevant information"
- type: checkboxes
attributes:
label: "Which formats?"
options:
- label: GGUF (llama.cpp)
- label: TensorRT (TensorRT-LLM)
- label: ONNX (Onnx Runtime)

35
.github/ISSUE_TEMPLATE/roadmap.md vendored Normal file
View File

@ -0,0 +1,35 @@
---
name: Roadmap
about: Plan Roadmap items with subtasks
title: 'roadmap: '
labels: 'type: planning'
assignees: ''
---
## Goal
## Tasklist
### Frontend
- [ ] link to janhq/jan epics
**Bugs**
- [ ] link to bugs
### Backend
- [ ] link to janhq/cortex.cpp epics
**Bugs**
- [ ] link to bug issues
### Infra
- [ ] link to infra issues
### Administrative / Management
- [ ] link to infra issues
### Marketing
-------
## Resources

View File

@ -1,19 +0,0 @@
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/dependabot-options-reference#package-ecosystem-
version: 2
updates:
- package-ecosystem: 'cargo'
directory: 'src-tauri'
schedule:
interval: 'weekly'
open-pull-requests-limit: 0
- package-ecosystem: 'npm'
directories:
- '/'
- 'core'
- 'docs'
- 'extensions'
- 'extensions/*'
- 'web-app'
schedule:
interval: 'weekly'
open-pull-requests-limit: 0

View File

@ -1,63 +0,0 @@
#!/bin/bash
# Check if the correct number of arguments is provided
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <path_to_json_input_file> <channel>"
exit 1
fi
INPUT_JSON_FILE="$1"
CHANNEL="$2"
if [ "$CHANNEL" == "nightly" ]; then
UPDATER="latest"
else
UPDATER="beta"
fi
# Check if the input file exists
if [ ! -f "$INPUT_JSON_FILE" ]; then
echo "Input file not found: $INPUT_JSON_FILE"
exit 1
fi
# Use jq to transform the content
jq --arg channel "$CHANNEL" --arg updater "$UPDATER" '
.productName = "Jan-\($channel)" |
.identifier = "jan-\($channel).ai.app"
' "$INPUT_JSON_FILE" > ./tauri.conf.json.tmp
cat ./tauri.conf.json.tmp
rm $INPUT_JSON_FILE
mv ./tauri.conf.json.tmp $INPUT_JSON_FILE
# Update Info.plist if it exists
INFO_PLIST_PATH="./src-tauri/Info.plist"
if [ -f "$INFO_PLIST_PATH" ]; then
echo "Updating Info.plist..."
# Replace jan.ai.app with jan-{channel}.ai.app
sed -i '' "s|jan\.ai\.app|jan-${CHANNEL}.ai.app|g" "$INFO_PLIST_PATH"
# Replace <string>jan</string> with <string>jan-{channel}</string>
sed -i '' "s|<string>jan</string>|<string>jan-${CHANNEL}</string>|g" "$INFO_PLIST_PATH"
echo "Info.plist updated"
cat ./src-tauri/Info.plist
fi
# Update the layout file
# LAYOUT_FILE_PATH="web/app/layout.tsx"
# if [ ! -f "$LAYOUT_FILE_PATH" ]; then
# echo "File does not exist: $LAYOUT_FILE_PATH"
# exit 1
# fi
# Perform the replacements
# sed -i -e "s#Jan#Jan-$CHANNEL#g" "$LAYOUT_FILE_PATH"
# Notify completion
# echo "File has been updated: $LAYOUT_FILE_PATH"

View File

@ -7,7 +7,6 @@ on:
jobs: jobs:
assign_milestone: assign_milestone:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
permissions: permissions:
pull-requests: write pull-requests: write
issues: write issues: write

View File

@ -1,37 +0,0 @@
name: Manual trigger AutoQA Test Runner
on:
workflow_dispatch:
inputs:
jan_app_url_windows:
description: 'URL to download Jan app for Windows (.exe)'
required: true
type: string
default: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.5-758_x64-setup.exe'
jan_app_url_ubuntu:
description: 'URL to download Jan app for Ubuntu (.deb)'
required: true
type: string
default: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.5-758_amd64.deb'
jan_app_url_macos:
description: 'URL to download Jan app for macOS (.dmg)'
required: true
type: string
default: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.5-758_universal.dmg'
is_nightly:
description: 'Is this a nightly build?'
required: true
type: boolean
default: true
jobs:
call-autoqa-template:
uses: ./.github/workflows/autoqa-template.yml
with:
jan_app_windows_source: ${{ inputs.jan_app_url_windows }}
jan_app_ubuntu_source: ${{ inputs.jan_app_url_ubuntu }}
jan_app_macos_source: ${{ inputs.jan_app_url_macos }}
is_nightly: ${{ inputs.is_nightly }}
source_type: 'url'
secrets:
RP_TOKEN: ${{ secrets.RP_TOKEN }}

View File

@ -1,330 +0,0 @@
name: AutoQA Migration (Manual)
on:
workflow_dispatch:
inputs:
old_windows_installer:
description: 'Windows OLD installer URL or path (.exe)'
required: true
type: string
new_windows_installer:
description: 'Windows NEW installer URL or path (.exe)'
required: true
type: string
old_ubuntu_installer:
description: 'Ubuntu OLD installer URL or path (.deb)'
required: false
type: string
default: ''
new_ubuntu_installer:
description: 'Ubuntu NEW installer URL or path (.deb)'
required: false
type: string
default: ''
old_macos_installer:
description: 'macOS OLD installer URL or path (.dmg)'
required: false
type: string
default: ''
new_macos_installer:
description: 'macOS NEW installer URL or path (.dmg)'
required: false
type: string
default: ''
migration_test_case:
description: 'Specific migration test case key (leave empty to run all)'
required: false
type: string
default: ''
max_turns:
description: 'Maximum turns per test phase'
required: false
type: number
default: 65
jobs:
migration-windows:
runs-on: windows-11-nvidia-gpu
timeout-minutes: 60
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python 3.13
uses: actions/setup-python@v4
with:
python-version: '3.13'
- name: Clean existing Jan installations
shell: powershell
run: |
.\autoqa\scripts\windows_cleanup.ps1 -IsNightly $false
- name: Download OLD and NEW installers
shell: powershell
run: |
# Download OLD installer using existing script
.\autoqa\scripts\windows_download.ps1 `
-WorkflowInputUrl "${{ inputs.old_windows_installer }}" `
-WorkflowInputIsNightly "false" `
-RepoVariableUrl "" `
-RepoVariableIsNightly "" `
-DefaultUrl "" `
-DefaultIsNightly ""
$oldSrc = Join-Path $env:TEMP 'jan-installer.exe'
$oldOut = Join-Path $env:TEMP 'jan-old.exe'
Copy-Item -Path $oldSrc -Destination $oldOut -Force
# Download NEW installer using existing script
.\autoqa\scripts\windows_download.ps1 `
-WorkflowInputUrl "${{ inputs.new_windows_installer }}" `
-WorkflowInputIsNightly "false" `
-RepoVariableUrl "" `
-RepoVariableIsNightly "" `
-DefaultUrl "" `
-DefaultIsNightly ""
$newSrc = Join-Path $env:TEMP 'jan-installer.exe'
$newOut = Join-Path $env:TEMP 'jan-new.exe'
Copy-Item -Path $newSrc -Destination $newOut -Force
Write-Host "OLD installer: $oldOut"
Write-Host "NEW installer: $newOut"
echo "OLD_VERSION=$oldOut" | Out-File -FilePath $env:GITHUB_ENV -Append
echo "NEW_VERSION=$newOut" | Out-File -FilePath $env:GITHUB_ENV -Append
- name: Install Python dependencies
working-directory: autoqa
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Run migration tests (Windows)
working-directory: autoqa
shell: powershell
env:
RP_TOKEN: ${{ secrets.RP_TOKEN }}
ENABLE_REPORTPORTAL: 'true'
RP_ENDPOINT: 'https://reportportal.menlo.ai'
RP_PROJECT: 'default_personal'
run: |
$case = "${{ inputs.migration_test_case }}"
$caseArg = ""
if ($case -and $case.Trim() -ne "") { $caseArg = "--migration-test-case `"$case`"" }
python main.py --enable-migration-test --old-version "$env:OLD_VERSION" --new-version "$env:NEW_VERSION" --max-turns ${{ inputs.max_turns }} $caseArg
- name: Upload screen recordings
if: always()
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: migration-recordings-${{ github.run_number }}-windows
path: autoqa/recordings/
- name: Upload trajectories
if: always()
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: migration-trajectories-${{ github.run_number }}-windows
path: autoqa/trajectories/
- name: Cleanup after tests
if: always()
shell: powershell
run: |
.\autoqa\scripts\windows_post_cleanup.ps1 -IsNightly $false
migration-ubuntu:
if: inputs.old_ubuntu_installer != '' && inputs.new_ubuntu_installer != ''
runs-on: ubuntu-22-04-nvidia-gpu
timeout-minutes: 60
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python 3.13
uses: actions/setup-python@v4
with:
python-version: '3.13'
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y \
x11-utils \
python3-tk \
python3-dev \
wmctrl \
xdotool \
libnss3-dev \
libgconf-2-4 \
libxss1 \
libasound2 \
libxtst6 \
libgtk-3-0 \
libgbm-dev \
libxshmfence1 \
libxrandr2 \
libpangocairo-1.0-0 \
libatk1.0-0 \
libcairo-gobject2 \
libgdk-pixbuf2.0-0 \
gnome-screenshot \
xvfb
- name: Setup script permissions
run: |
chmod +x autoqa/scripts/setup_permissions.sh || true
./autoqa/scripts/setup_permissions.sh || true
- name: Clean existing Jan installations
run: |
./autoqa/scripts/ubuntu_cleanup.sh
- name: Download OLD and NEW installers
run: |
set -e
# Download OLD installer using existing script
./autoqa/scripts/ubuntu_download.sh \
"${{ inputs.old_ubuntu_installer }}" \
"false" \
"" \
"" \
"" \
""
cp /tmp/jan-installer.deb /tmp/jan-old.deb
# Download NEW installer using existing script
./autoqa/scripts/ubuntu_download.sh \
"${{ inputs.new_ubuntu_installer }}" \
"false" \
"" \
"" \
"" \
""
cp /tmp/jan-installer.deb /tmp/jan-new.deb
echo "OLD_VERSION=/tmp/jan-old.deb" >> $GITHUB_ENV
echo "NEW_VERSION=/tmp/jan-new.deb" >> $GITHUB_ENV
- name: Install Python dependencies
working-directory: autoqa
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Run migration tests (Ubuntu)
working-directory: autoqa
run: |
case="${{ inputs.migration_test_case }}"
caseArg=""
if [ -n "${case}" ]; then caseArg="--migration-test-case \"${case}\""; fi
xvfb-run -a python main.py --enable-migration-test --old-version "${OLD_VERSION}" --new-version "${NEW_VERSION}" --max-turns ${{ inputs.max_turns }} ${caseArg}
- name: Upload screen recordings
if: always()
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: migration-recordings-${{ github.run_number }}-ubuntu
path: autoqa/recordings/
- name: Upload trajectories
if: always()
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: migration-trajectories-${{ github.run_number }}-ubuntu
path: autoqa/trajectories/
- name: Cleanup after tests
if: always()
run: |
./autoqa/scripts/ubuntu_post_cleanup.sh "false"
migration-macos:
if: inputs.old_macos_installer != '' && inputs.new_macos_installer != ''
runs-on: macos-selfhosted-15-arm64-cua
timeout-minutes: 60
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python 3.13
uses: actions/setup-python@v4
with:
python-version: '3.13'
- name: Setup script permissions
run: |
chmod +x autoqa/scripts/setup_permissions.sh || true
./autoqa/scripts/setup_permissions.sh || true
- name: Clean existing Jan installations
run: |
./autoqa/scripts/macos_cleanup.sh
- name: Download OLD and NEW installers
run: |
set -e
# Download OLD installer using existing script
./autoqa/scripts/macos_download.sh \
"${{ inputs.old_macos_installer }}" \
"false" \
"" \
"" \
"" \
""
cp /tmp/jan-installer.dmg /tmp/jan-old.dmg
# Download NEW installer using existing script
./autoqa/scripts/macos_download.sh \
"${{ inputs.new_macos_installer }}" \
"false" \
"" \
"" \
"" \
""
cp /tmp/jan-installer.dmg /tmp/jan-new.dmg
echo "OLD_VERSION=/tmp/jan-old.dmg" >> $GITHUB_ENV
echo "NEW_VERSION=/tmp/jan-new.dmg" >> $GITHUB_ENV
- name: Install Python dependencies
working-directory: autoqa
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Run migration tests (macOS)
working-directory: autoqa
run: |
case="${{ inputs.migration_test_case }}"
caseArg=""
if [ -n "${case}" ]; then caseArg="--migration-test-case \"${case}\""; fi
python main.py --enable-migration-test --old-version "${OLD_VERSION}" --new-version "${NEW_VERSION}" --max-turns ${{ inputs.max_turns }} ${caseArg}
- name: Upload screen recordings
if: always()
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: migration-recordings-${{ github.run_number }}-macos
path: autoqa/recordings/
- name: Upload trajectories
if: always()
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: migration-trajectories-${{ github.run_number }}-macos
path: autoqa/trajectories/
- name: Cleanup after tests
if: always()
run: |
./autoqa/scripts/macos_post_cleanup.sh

View File

@ -1,121 +0,0 @@
name: AutoQA Reliability (Manual)
on:
workflow_dispatch:
inputs:
source_type:
description: 'App source type (url)'
required: true
type: choice
options: [url]
default: url
jan_app_windows_source:
description: 'Windows installer URL path (used when source_type=url or to select artifact)'
required: true
type: string
default: 'https://catalog.jan.ai/windows/Jan_0.6.8_x64-setup.exe'
jan_app_ubuntu_source:
description: 'Ubuntu .deb URL path'
required: true
type: string
default: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.4-728_amd64.deb'
jan_app_macos_source:
description: 'macOS .dmg URL path'
required: true
type: string
default: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.4-728_universal.dmg'
is_nightly:
description: 'Is the app a nightly build?'
required: true
type: boolean
default: true
reliability_phase:
description: 'Reliability phase'
required: true
type: choice
options: [development, deployment]
default: development
reliability_runs:
description: 'Custom runs (0 uses phase default)'
required: true
type: number
default: 0
reliability_test_path:
description: 'Test file path (relative to autoqa working directory)'
required: true
type: string
default: 'tests/base/settings/app-data.txt'
jobs:
reliability-windows:
runs-on: windows-11-nvidia-gpu
timeout-minutes: 60
env:
DEFAULT_JAN_APP_URL: 'https://catalog.jan.ai/windows/Jan_0.6.8_x64-setup.exe'
DEFAULT_IS_NIGHTLY: 'false'
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python 3.13
uses: actions/setup-python@v4
with:
python-version: '3.13'
- name: Clean existing Jan installations
shell: powershell
run: |
.\autoqa\scripts\windows_cleanup.ps1 -IsNightly "${{ inputs.is_nightly }}"
- name: Download/Prepare Jan app
shell: powershell
run: |
.\autoqa\scripts\windows_download.ps1 `
-WorkflowInputUrl "${{ inputs.jan_app_windows_source }}" `
-WorkflowInputIsNightly "${{ inputs.is_nightly }}" `
-RepoVariableUrl "${{ vars.JAN_APP_URL }}" `
-RepoVariableIsNightly "${{ vars.IS_NIGHTLY }}" `
-DefaultUrl "$env:DEFAULT_JAN_APP_URL" `
-DefaultIsNightly "$env:DEFAULT_IS_NIGHTLY"
- name: Install Jan app
shell: powershell
run: |
.\autoqa\scripts\windows_install.ps1 -IsNightly "$env:IS_NIGHTLY"
- name: Install Python dependencies
working-directory: autoqa
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Run reliability tests
working-directory: autoqa
shell: powershell
run: |
$runs = "${{ inputs.reliability_runs }}"
$runsArg = ""
if ([int]$runs -gt 0) { $runsArg = "--reliability-runs $runs" }
python main.py --enable-reliability-test --reliability-phase "${{ inputs.reliability_phase }}" --reliability-test-path "${{ inputs.reliability_test_path }}" $runsArg
- name: Upload screen recordings
if: always()
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: reliability-recordings-${{ github.run_number }}-${{ runner.os }}
path: autoqa/recordings/
- name: Upload trajectories
if: always()
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: reliability-trajectories-${{ github.run_number }}-${{ runner.os }}
path: autoqa/trajectories/
- name: Cleanup after tests
if: always()
shell: powershell
run: |
.\autoqa\scripts\windows_post_cleanup.ps1 -IsNightly "${{ inputs.is_nightly }}"

View File

@ -1,471 +0,0 @@
name: Auto QA Test Runner Template
on:
workflow_call:
inputs:
jan_app_windows_source:
description: 'Windows app source - can be URL or local path'
required: true
type: string
jan_app_ubuntu_source:
description: 'Ubuntu app source - can be URL or local path'
required: true
type: string
jan_app_macos_source:
description: 'macOS app source - can be URL or local path'
required: true
type: string
is_nightly:
description: 'Is this a nightly build?'
required: true
type: boolean
default: true
source_type:
description: 'Source type: url or local'
required: true
type: string
default: 'url'
artifact_name_windows:
description: 'Windows artifact name (only needed for local)'
required: false
type: string
default: ''
artifact_name_ubuntu:
description: 'Ubuntu artifact name (only needed for local)'
required: false
type: string
default: ''
artifact_name_macos:
description: 'macOS artifact name (only needed for local)'
required: false
type: string
default: ''
secrets:
RP_TOKEN:
description: 'ReportPortal API token'
required: true
jobs:
windows:
runs-on: windows-11-nvidia-gpu
timeout-minutes: 60
env:
DEFAULT_JAN_APP_URL: 'https://catalog.jan.ai/windows/Jan-nightly_0.6.5-758_x64-setup.exe'
DEFAULT_IS_NIGHTLY: 'true'
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python 3.13
uses: actions/setup-python@v4
with:
python-version: '3.13'
- name: Download artifact (if source_type is local)
if: inputs.source_type == 'local'
uses: actions/download-artifact@v4
with:
name: ${{ inputs.artifact_name_windows }}
path: ${{ runner.temp }}/windows-artifact
- name: Clean existing Jan installations
shell: powershell
run: |
.\autoqa\scripts\windows_cleanup.ps1 -IsNightly "${{ inputs.is_nightly }}"
- name: Download/Prepare Jan app
shell: powershell
run: |
if ("${{ inputs.source_type }}" -eq "local") {
# Find the exe file in the artifact
$exeFile = Get-ChildItem -Path "${{ runner.temp }}/windows-artifact" -Recurse -Filter "*.exe" | Select-Object -First 1
if ($exeFile) {
Write-Host "[SUCCESS] Found local installer: $($exeFile.FullName)"
Copy-Item -Path $exeFile.FullName -Destination "$env:TEMP\jan-installer.exe" -Force
Write-Host "[SUCCESS] Installer copied to: $env:TEMP\jan-installer.exe"
# Don't set JAN_APP_PATH here - let the install script set it to the correct installed app path
echo "IS_NIGHTLY=${{ inputs.is_nightly }}" >> $env:GITHUB_ENV
} else {
Write-Error "[FAILED] No .exe file found in artifact"
exit 1
}
} else {
# Use the existing download script for URLs
.\autoqa\scripts\windows_download.ps1 `
-WorkflowInputUrl "${{ inputs.jan_app_windows_source }}" `
-WorkflowInputIsNightly "${{ inputs.is_nightly }}" `
-RepoVariableUrl "${{ vars.JAN_APP_URL }}" `
-RepoVariableIsNightly "${{ vars.IS_NIGHTLY }}" `
-DefaultUrl "$env:DEFAULT_JAN_APP_URL" `
-DefaultIsNightly "$env:DEFAULT_IS_NIGHTLY"
}
- name: Install Jan app
shell: powershell
run: |
.\autoqa\scripts\windows_install.ps1 -IsNightly "$env:IS_NIGHTLY"
- name: Install Python dependencies
working-directory: autoqa
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Run Auto QA Tests
working-directory: autoqa
shell: powershell
env:
RP_TOKEN: ${{ secrets.RP_TOKEN }}
ENABLE_REPORTPORTAL: 'true'
RP_ENDPOINT: 'https://reportportal.menlo.ai'
RP_PROJECT: 'default_personal'
MAX_TURNS: '50'
DELAY_BETWEEN_TESTS: '3'
LAUNCH_NAME: 'CI AutoQA Run Windows - ${{ github.run_number }} - ${{ github.ref_name }}'
run: |
.\scripts\run_tests.ps1 -JanAppPath "$env:JAN_APP_PATH" -ProcessName "$env:JAN_PROCESS_NAME" -RpToken "$env:RP_TOKEN"
- name: Collect Jan logs for artifact upload
if: always()
shell: powershell
run: |
$logDirs = @(
"$env:APPDATA\Jan-nightly\data\logs",
"$env:APPDATA\Jan\data\logs"
)
$dest = "autoqa\jan-logs"
mkdir $dest -Force | Out-Null
foreach ($dir in $logDirs) {
if (Test-Path $dir) {
Copy-Item "$dir\*.log" $dest -Force -ErrorAction SilentlyContinue
}
}
- name: Upload screen recordings
if: always()
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-recordings-${{ github.run_number }}-${{ runner.os }}
path: autoqa/recordings/
- name: Upload Jan logs
if: always()
uses: actions/upload-artifact@v4
with:
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-logs-${{ github.run_number }}-${{ runner.os }}
path: autoqa/jan-logs/
- name: Cleanup after tests
if: always()
shell: powershell
run: |
.\autoqa\scripts\windows_post_cleanup.ps1 -IsNightly "${{ inputs.is_nightly }}"
ubuntu:
runs-on: ubuntu-22-04-nvidia-gpu
timeout-minutes: 60
env:
DEFAULT_JAN_APP_URL: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.4-728_amd64.deb'
DEFAULT_IS_NIGHTLY: 'true'
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python 3.13
uses: actions/setup-python@v4
with:
python-version: '3.13'
- name: Download artifact (if source_type is local)
if: inputs.source_type == 'local'
uses: actions/download-artifact@v4
with:
name: ${{ inputs.artifact_name_ubuntu }}
path: ${{ runner.temp }}/ubuntu-artifact
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y \
x11-utils \
python3-tk \
python3-dev \
wmctrl \
xdotool \
libnss3-dev \
libgconf-2-4 \
libxss1 \
libasound2 \
libxtst6 \
libgtk-3-0 \
libgbm-dev \
libxshmfence1 \
libxrandr2 \
libpangocairo-1.0-0 \
libatk1.0-0 \
libcairo-gobject2 \
libgdk-pixbuf2.0-0 \
gnome-screenshot
- name: Setup script permissions
run: |
chmod +x autoqa/scripts/setup_permissions.sh
./autoqa/scripts/setup_permissions.sh
- name: Clean existing Jan installations
run: |
./autoqa/scripts/ubuntu_cleanup.sh
- name: Download/Prepare Jan app
run: |
if [ "${{ inputs.source_type }}" = "local" ]; then
# Find the deb file in the artifact
DEB_FILE=$(find "${{ runner.temp }}/ubuntu-artifact" -name "*.deb" -type f | head -1)
if [ -n "$DEB_FILE" ]; then
echo "[SUCCESS] Found local installer: $DEB_FILE"
cp "$DEB_FILE" "/tmp/jan-installer.deb"
echo "[SUCCESS] Installer copied to: /tmp/jan-installer.deb"
echo "JAN_APP_PATH=/tmp/jan-installer.deb" >> $GITHUB_ENV
echo "IS_NIGHTLY=${{ inputs.is_nightly }}" >> $GITHUB_ENV
if [ "${{ inputs.is_nightly }}" = "true" ]; then
echo "JAN_PROCESS_NAME=Jan-nightly" >> $GITHUB_ENV
else
echo "JAN_PROCESS_NAME=Jan" >> $GITHUB_ENV
fi
else
echo "[FAILED] No .deb file found in artifact"
exit 1
fi
else
# Use the existing download script for URLs
./autoqa/scripts/ubuntu_download.sh \
"${{ inputs.jan_app_ubuntu_source }}" \
"${{ inputs.is_nightly }}" \
"${{ vars.JAN_APP_URL_LINUX }}" \
"${{ vars.IS_NIGHTLY }}" \
"$DEFAULT_JAN_APP_URL" \
"$DEFAULT_IS_NIGHTLY"
# Set the correct environment variables for the test runner
echo "JAN_APP_PATH=/tmp/jan-installer.deb" >> $GITHUB_ENV
if [ "${{ inputs.is_nightly }}" = "true" ]; then
echo "JAN_PROCESS_NAME=Jan-nightly" >> $GITHUB_ENV
else
echo "JAN_PROCESS_NAME=Jan" >> $GITHUB_ENV
fi
fi
- name: Install Jan app
run: |
./autoqa/scripts/ubuntu_install.sh "$IS_NIGHTLY"
- name: Install Python dependencies
working-directory: autoqa
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Run Auto QA Tests
working-directory: autoqa
env:
RP_TOKEN: ${{ secrets.RP_TOKEN }}
ENABLE_REPORTPORTAL: 'true'
RP_ENDPOINT: 'https://reportportal.menlo.ai'
RP_PROJECT: 'default_personal'
MAX_TURNS: '50'
DELAY_BETWEEN_TESTS: '3'
LAUNCH_NAME: 'CI AutoQA Run Ubuntu - ${{ github.run_number }} - ${{ github.ref_name }}'
run: |
./scripts/run_tests.sh "$JAN_APP_PATH" "$JAN_PROCESS_NAME" "$RP_TOKEN" "ubuntu"
- name: Collect Jan logs for artifact upload
if: always()
run: |
mkdir -p autoqa/jan-logs
cp ~/.local/share/Jan-nightly/data/logs/*.log autoqa/jan-logs/ 2>/dev/null || true
cp ~/.local/share/Jan/data/logs/*.log autoqa/jan-logs/ 2>/dev/null || true
- name: Upload screen recordings
if: always()
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-recordings-${{ github.run_number }}-${{ runner.os }}
path: autoqa/recordings/
- name: Upload Jan logs
if: always()
uses: actions/upload-artifact@v4
with:
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-logs-${{ github.run_number }}-${{ runner.os }}
path: autoqa/jan-logs/
- name: Cleanup after tests
if: always()
run: |
./autoqa/scripts/ubuntu_post_cleanup.sh "$IS_NIGHTLY"
macos:
runs-on: macos-selfhosted-15-arm64-cua
timeout-minutes: 60
env:
DEFAULT_JAN_APP_URL: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.4-728_universal.dmg'
DEFAULT_IS_NIGHTLY: 'true'
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python 3.13
uses: actions/setup-python@v4
with:
python-version: '3.13'
- name: Download artifact (if source_type is local)
if: inputs.source_type == 'local'
uses: actions/download-artifact@v4
with:
name: ${{ inputs.artifact_name_macos }}
path: ${{ runner.temp }}/macos-artifact
- name: Setup script permissions
run: |
chmod +x autoqa/scripts/setup_permissions.sh
./autoqa/scripts/setup_permissions.sh
- name: Clean existing Jan installations
run: |
./autoqa/scripts/macos_cleanup.sh
- name: Download/Prepare Jan app
run: |
if [ "${{ inputs.source_type }}" = "local" ]; then
# Find the dmg file in the artifact
DMG_FILE=$(find "${{ runner.temp }}/macos-artifact" -name "*.dmg" -type f | head -1)
if [ -n "$DMG_FILE" ]; then
echo "[SUCCESS] Found local installer: $DMG_FILE"
cp "$DMG_FILE" "/tmp/jan-installer.dmg"
echo "[SUCCESS] Installer copied to: /tmp/jan-installer.dmg"
echo "JAN_APP_PATH=/tmp/jan-installer.dmg" >> $GITHUB_ENV
echo "IS_NIGHTLY=${{ inputs.is_nightly }}" >> $GITHUB_ENV
if [ "${{ inputs.is_nightly }}" = "true" ]; then
echo "PROCESS_NAME=Jan-nightly" >> $GITHUB_ENV
else
echo "PROCESS_NAME=Jan" >> $GITHUB_ENV
fi
else
echo "[FAILED] No .dmg file found in artifact"
exit 1
fi
else
# Use the existing download script for URLs
./autoqa/scripts/macos_download.sh \
"${{ inputs.jan_app_macos_source }}" \
"${{ inputs.is_nightly }}" \
"${{ vars.JAN_APP_URL }}" \
"${{ vars.IS_NIGHTLY }}" \
"$DEFAULT_JAN_APP_URL" \
"$DEFAULT_IS_NIGHTLY"
# Set the correct environment variables for the test runner
echo "JAN_APP_PATH=/tmp/jan-installer.dmg" >> $GITHUB_ENV
if [ "${{ inputs.is_nightly }}" = "true" ]; then
echo "PROCESS_NAME=Jan-nightly" >> $GITHUB_ENV
else
echo "PROCESS_NAME=Jan" >> $GITHUB_ENV
fi
fi
- name: Install Jan app
run: |
./autoqa/scripts/macos_install.sh
- name: Install system dependencies
run: |
echo "Installing system dependencies for macOS..."
# Check if Homebrew is available
if command -v brew >/dev/null 2>&1; then
echo "Homebrew is available"
# Install python-tk if not available
python3 -c "import tkinter" 2>/dev/null || {
echo "Installing python-tk via Homebrew..."
brew install python-tk || true
}
else
echo "Homebrew not available, checking if tkinter works..."
python3 -c "import tkinter" || {
echo "[WARNING] tkinter not available and Homebrew not found"
echo "This may cause issues with mouse control"
}
fi
echo "System dependencies check completed"
- name: Install Python dependencies
run: |
cd autoqa
echo "Installing Python dependencies..."
pip install --upgrade pip
pip install -r requirements.txt
echo "[SUCCESS] Python dependencies installed"
- name: Setup ReportPortal environment
run: |
echo "Setting up ReportPortal environment..."
echo "RP_TOKEN=${{ secrets.RP_TOKEN }}" >> $GITHUB_ENV
echo "ReportPortal environment configured"
- name: Run E2E tests
env:
RP_TOKEN: ${{ secrets.RP_TOKEN }}
ENABLE_REPORTPORTAL: 'true'
RP_ENDPOINT: 'https://reportportal.menlo.ai'
RP_PROJECT: 'default_personal'
MAX_TURNS: '50'
DELAY_BETWEEN_TESTS: '3'
LAUNCH_NAME: 'CI AutoQA Run Macos - ${{ github.run_number }} - ${{ github.ref_name }}'
run: |
cd autoqa
echo "Starting E2E test execution..."
echo "Environment variables:"
echo "JAN_APP_PATH: $JAN_APP_PATH"
echo "PROCESS_NAME: $PROCESS_NAME"
echo "IS_NIGHTLY: $IS_NIGHTLY"
./scripts/run_tests.sh "$JAN_APP_PATH" "$PROCESS_NAME" "$RP_TOKEN" "macos"
- name: Collect Jan logs for artifact upload
if: always()
run: |
mkdir -p autoqa/jan-logs
cp ~/Library/Application\ Support/Jan-nightly/data/logs/*.log autoqa/jan-logs/ 2>/dev/null || true
cp ~/Library/Application\ Support/Jan/data/logs/*.log autoqa/jan-logs/ 2>/dev/null || true
- name: Upload screen recordings
if: always()
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-recordings-${{ github.run_number }}-${{ runner.os }}
path: autoqa/recordings/
- name: Upload Jan logs
if: always()
uses: actions/upload-artifact@v4
with:
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-logs-${{ github.run_number }}-${{ runner.os }}
path: autoqa/jan-logs/
- name: Cleanup after tests
if: always()
run: |
./autoqa/scripts/macos_post_cleanup.sh

View File

@ -1,31 +0,0 @@
name: Claude Issue Dedupe
description: Automatically dedupe GitHub issues using Claude Code
on:
issues:
types: [opened]
workflow_dispatch:
inputs:
issue_number:
description: 'Issue number to process for duplicate detection'
required: true
type: string
jobs:
claude-dedupe-issues:
runs-on: ubuntu-latest
timeout-minutes: 10
permissions:
contents: read
issues: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Run Claude Code dedupe
uses: anthropics/claude-code-base-action@beta
with:
prompt: "/dedupe ${{ github.repository }}/issues/${{ github.event.issue.number || inputs.issue_number }}"
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
claude_env: |
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,16 +0,0 @@
name: Adds all issues to project board
on:
issues:
types:
- opened
jobs:
add-to-project:
name: Add issue to project
runs-on: ubuntu-latest
steps:
- uses: actions/add-to-project@v1.0.2
with:
project-url: https://github.com/orgs/${{ vars.ORG_NAME }}/projects/${{ vars.JAN_PROJECT_NUMBER }}
github-token: ${{ secrets.AUTO_ADD_TICKET_PAT }}

View File

@ -1,145 +0,0 @@
name: Jan Astro Docs
on:
push:
branches:
- dev
paths:
- 'website/**'
- '.github/workflows/jan-astro-docs.yml'
pull_request:
paths:
- 'website/**'
- '.github/workflows/jan-astro-docs.yml'
# Review gh actions docs if you want to further define triggers, paths, etc
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#on
workflow_dispatch:
inputs:
update_cloud_spec:
description: 'Update Jan Server API specification'
required: false
default: 'false'
type: choice
options:
- 'true'
- 'false'
schedule:
# Run daily at 2 AM UTC to sync with Jan Server updates
- cron: '0 2 * * *'
jobs:
deploy:
name: Deploy to CloudFlare Pages
env:
CLOUDFLARE_PROJECT_NAME: astro-docs # docs.jan.ai
runs-on: ubuntu-latest
permissions:
contents: write
deployments: write
pull-requests: write
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 20
- uses: oven-sh/setup-bun@v2
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Fill env vars
continue-on-error: true
working-directory: website
run: |
env_example_file=".env.example"
touch .env
while IFS= read -r line || [[ -n "$line" ]]; do
if [[ "$line" == *"="* ]]; then
var_name=$(echo $line | cut -d '=' -f 1)
echo $var_name
var_value="$(jq -r --arg key "$var_name" '.[$key]' <<< "$SECRETS")"
echo "$var_name=$var_value" >> .env
fi
done < "$env_example_file"
env:
SECRETS: '${{ toJson(secrets) }}'
- name: Install dependencies
working-directory: website
run: bun install
- name: Update Jan Server API Spec (Scheduled/Manual)
if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.update_cloud_spec == 'true')
working-directory: website
continue-on-error: true
run: |
echo "📡 Updating Jan Server API specification..."
bun run generate:cloud-spec
# Check if the spec file was updated
if git diff --quiet public/openapi/cloud-openapi.json; then
echo "✅ No changes to API specification"
else
echo "📝 API specification updated"
# Commit the changes if this is a scheduled run on main branch
if [ "${{ github.event_name }}" = "schedule" ] && [ "${{ github.ref }}" = "refs/heads/dev" ]; then
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add public/openapi/cloud-openapi.json
git commit -m "chore: update Jan Server API specification [skip ci]"
git push
fi
fi
env:
JAN_SERVER_SPEC_URL: ${{ secrets.JAN_SERVER_SPEC_URL || 'https://api.jan.ai/api/swagger/doc.json' }}
JAN_SERVER_PROD_URL: ${{ secrets.JAN_SERVER_PROD_URL || 'https://api.jan.ai/v1' }}
- name: Build website
working-directory: website
run: |
# For PR and regular pushes, skip cloud spec generation in prebuild
# It will use the existing committed spec or fallback
if [ "${{ github.event_name }}" = "pull_request" ] || [ "${{ github.event_name }}" = "push" ]; then
echo "Using existing cloud spec for build"
export SKIP_CLOUD_SPEC_UPDATE=true
fi
bun run build
env:
SKIP_CLOUD_SPEC_UPDATE: ${{ github.event_name == 'pull_request' || github.event_name == 'push' }}
- name: copy redirects and headers
continue-on-error: true
working-directory: website
run: |
cp _redirects dist/_redirects
cp _headers dist/_headers
- name: Publish to Cloudflare Pages PR Preview and Staging
if: github.event_name == 'pull_request'
uses: cloudflare/pages-action@v1
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
projectName: ${{ env.CLOUDFLARE_PROJECT_NAME }}
directory: ./website/dist
# Optional: Enable this if you want to have GitHub Deployments triggered
gitHubToken: ${{ secrets.GITHUB_TOKEN }}
id: deployCloudflarePages
- uses: mshick/add-pr-comment@v2
if: github.event_name == 'pull_request'
with:
message: |
Preview URL Astro Docs: ${{ steps.deployCloudflarePages.outputs.url }}
- name: Publish to Cloudflare Pages Production
if: (github.event_name == 'push' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/dev')
uses: cloudflare/pages-action@v1
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
projectName: ${{ env.CLOUDFLARE_PROJECT_NAME }}
directory: ./website/dist
branch: main
# Optional: Enable this if you want to have GitHub Deployments triggered
gitHubToken: ${{ secrets.GITHUB_TOKEN }}

View File

@ -0,0 +1,63 @@
name: Deploy Docs on new release
on:
release:
types:
- published
- edited
- released
jobs:
deploy:
name: Deploy to CloudFlare Pages
env:
CLOUDFLARE_PROJECT_NAME: docs
runs-on: ubuntu-latest
permissions:
contents: write
deployments: write
pull-requests: write
steps:
- uses: actions/checkout@v4
with:
ref: dev
- uses: actions/setup-node@v3
with:
node-version: 18
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Fill env vars
working-directory: docs
run: |
env_example_file=".env.example"
touch .env
while IFS= read -r line || [[ -n "$line" ]]; do
if [[ "$line" == *"="* ]]; then
var_name=$(echo $line | cut -d '=' -f 1)
echo $var_name
var_value="$(jq -r --arg key "$var_name" '.[$key]' <<< "$SECRETS")"
echo "$var_name=$var_value" >> .env
fi
done < "$env_example_file"
env:
SECRETS: '${{ toJson(secrets) }}'
- name: Install dependencies
working-directory: docs
run: yarn install
- name: Build website
working-directory: docs
run: export NODE_ENV=production && yarn build && cp _redirects out/_redirects && cp _headers out/_headers
- name: Publish to Cloudflare Pages Production
uses: cloudflare/pages-action@v1
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
projectName: ${{ env.CLOUDFLARE_PROJECT_NAME }}
directory: ./docs/out
branch: main
# Optional: Enable this if you want to have GitHub Deployments triggered
gitHubToken: ${{ secrets.GITHUB_TOKEN }}

View File

@ -26,10 +26,10 @@ jobs:
deployments: write deployments: write
pull-requests: write pull-requests: write
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3
with: with:
node-version: 20 node-version: 18
- name: Install jq - name: Install jq
uses: dcarbone/install-jq-action@v2.0.1 uses: dcarbone/install-jq-action@v2.0.1
@ -53,9 +53,6 @@ jobs:
- name: Install dependencies - name: Install dependencies
working-directory: docs working-directory: docs
run: yarn install run: yarn install
- name: Clean output directory
working-directory: docs
run: rm -rf out/* .next/*
- name: Build website - name: Build website
working-directory: docs working-directory: docs
run: export NODE_ENV=production && yarn build && cp _redirects out/_redirects && cp _headers out/_headers run: export NODE_ENV=production && yarn build && cp _redirects out/_redirects && cp _headers out/_headers
@ -79,7 +76,7 @@ jobs:
Preview URL: ${{ steps.deployCloudflarePages.outputs.url }} Preview URL: ${{ steps.deployCloudflarePages.outputs.url }}
- name: Publish to Cloudflare Pages Production - name: Publish to Cloudflare Pages Production
if: (github.event_name == 'push' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && startsWith(github.ref, 'refs/heads/release/')) if: (github.event_name == 'push' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/dev')
uses: cloudflare/pages-action@v1 uses: cloudflare/pages-action@v1
with: with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}

View File

@ -0,0 +1,86 @@
name: Electron Builder - Beta Build
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+-beta"]
jobs:
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
build-macos:
uses: ./.github/workflows/template-build-macos.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
beta: true
nightly: false
cortex_api_port: "39271"
build-windows-x64:
uses: ./.github/workflows/template-build-windows-x64.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
beta: true
nightly: false
cortex_api_port: "39271"
build-linux-x64:
uses: ./.github/workflows/template-build-linux-x64.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
beta: true
nightly: false
cortex_api_port: "39271"
sync-temp-to-latest:
needs: [build-macos, build-windows-x64, build-linux-x64]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: Sync temp to latest
run: |
# sync temp-beta to beta by copy files that are different or new
aws s3 sync "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/" "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/beta/"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: "true"
noti-discord-and-update-url-readme:
needs: [build-macos, get-update-version, build-windows-x64, build-linux-x64, sync-temp-to-latest]
runs-on: ubuntu-latest
steps:
- name: Set version to environment variable
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
VERSION="${VERSION#v}"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Notify Discord
uses: Ilshidur/action-discord@master
with:
args: |
Jan-beta App version {{ VERSION }}, has been released, use the following links to download the app with faster speed or visit the Github release page for more information:
- Windows: https://delta.jan.ai/beta/jan-beta-win-x64-{{ VERSION }}.exe
- macOS Universal: https://delta.jan.ai/beta/jan-beta-mac-universal-{{ VERSION }}.dmg
- Linux Deb: https://delta.jan.ai/beta/jan-beta-linux-amd64-{{ VERSION }}.deb
- Linux AppImage: https://delta.jan.ai/beta/jan-beta-linux-x86_64-{{ VERSION }}.AppImage
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_JAN_BETA }}

View File

@ -0,0 +1,150 @@
name: Electron Builder - Nightly / Manual
on:
schedule:
- cron: '0 20 * * 1,2,3' # At 8 PM UTC on Monday, Tuesday, and Wednesday which is 3 AM UTC+7 Tuesday, Wednesday, and Thursday
workflow_dispatch:
inputs:
public_provider:
type: choice
description: 'Public Provider'
options:
- none
- aws-s3
default: none
pull_request_review:
types: [submitted]
jobs:
set-public-provider:
runs-on: ubuntu-latest
outputs:
public_provider: ${{ steps.set-public-provider.outputs.public_provider }}
ref: ${{ steps.set-public-provider.outputs.ref }}
steps:
- name: Set public provider
id: set-public-provider
run: |
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo "::set-output name=public_provider::${{ github.event.inputs.public_provider }}"
echo "::set-output name=ref::${{ github.ref }}"
else
if [ "${{ github.event_name }}" == "schedule" ]; then
echo "::set-output name=public_provider::aws-s3"
echo "::set-output name=ref::refs/heads/dev"
elif [ "${{ github.event_name }}" == "push" ]; then
echo "::set-output name=public_provider::aws-s3"
echo "::set-output name=ref::${{ github.ref }}"
elif [ "${{ github.event_name }}" == "pull_request_review" ]; then
echo "::set-output name=public_provider::none"
echo "::set-output name=ref::${{ github.ref }}"
else
echo "::set-output name=public_provider::none"
echo "::set-output name=ref::${{ github.ref }}"
fi
fi
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
build-macos:
uses: ./.github/workflows/template-build-macos.yml
needs: [get-update-version, set-public-provider]
secrets: inherit
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
nightly: true
beta: false
cortex_api_port: "39261"
build-windows-x64:
uses: ./.github/workflows/template-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
nightly: true
beta: false
cortex_api_port: "39261"
build-linux-x64:
uses: ./.github/workflows/template-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
nightly: true
beta: false
cortex_api_port: "39261"
sync-temp-to-latest:
needs: [set-public-provider, build-windows-x64, build-linux-x64, build-macos]
runs-on: ubuntu-latest
steps:
- name: Sync temp to latest
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
run: |
aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: "true"
noti-discord-nightly-and-update-url-readme:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
secrets: inherit
if: github.event_name == 'schedule'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Nightly
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-pre-release-and-update-url-readme:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
secrets: inherit
if: github.event_name == 'push'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Pre-release
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-manual-and-update-url-readme:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
secrets: inherit
if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Manual
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
comment-pr-build-url:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
runs-on: ubuntu-latest
if: github.event_name == 'pull_request_review'
steps:
- name: Set up GitHub CLI
run: |
curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz
sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/
- name: Comment build URL on PR
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_URL=${{ github.event.pull_request.html_url }}
RUN_ID=${{ github.run_id }}
COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})."
gh pr comment $PR_URL --body "$COMMENT"

View File

@ -0,0 +1,91 @@
name: Electron Builder - Tag
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+"]
jobs:
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
create-draft-release:
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
outputs:
upload_url: ${{ steps.create_release.outputs.upload_url }}
version: ${{ steps.get_version.outputs.version }}
permissions:
contents: write
steps:
- name: Extract tag name without v prefix
id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
env:
GITHUB_REF: ${{ github.ref }}
- name: Create Draft Release
id: create_release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ github.ref_name }}
token: ${{ secrets.GITHUB_TOKEN }}
name: "${{ env.VERSION }}"
draft: true
prerelease: false
build-macos:
uses: ./.github/workflows/template-build-macos.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
beta: false
nightly: false
new_version: ${{ needs.get-update-version.outputs.new_version }}
build-windows-x64:
uses: ./.github/workflows/template-build-windows-x64.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
beta: false
nightly: false
new_version: ${{ needs.get-update-version.outputs.new_version }}
build-linux-x64:
uses: ./.github/workflows/template-build-linux-x64.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
beta: false
nightly: false
new_version: ${{ needs.get-update-version.outputs.new_version }}
update_release_draft:
needs: [build-macos, build-windows-x64, build-linux-x64]
permissions:
# write permission is required to create a github release
contents: write
# write permission is required for autolabeler
# otherwise, read permission is required at least
pull-requests: write
runs-on: ubuntu-latest
steps:
# (Optional) GitHub Enterprise requires GHE_HOST variable set
#- name: Set GHE_HOST
# run: |
# echo "GHE_HOST=${GITHUB_SERVER_URL##https:\/\/}" >> $GITHUB_ENV
# Drafts your next Release notes as Pull Requests are merged into "master"
- uses: release-drafter/release-drafter@v5
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
# with:
# config-name: my-config.yml
# disable-autolabeler: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -0,0 +1,379 @@
name: Test - Linter & Playwright
on:
workflow_dispatch:
push:
branches:
- main
- dev
paths:
- 'electron/**'
- .github/workflows/jan-electron-linter-and-test.yml
- 'web/**'
- 'joi/**'
- 'package.json'
- 'node_modules/**'
- 'yarn.lock'
- 'core/**'
- 'extensions/**'
- '!README.md'
- 'Makefile'
pull_request:
branches:
- main
- dev
- release/**
paths:
- 'electron/**'
- .github/workflows/jan-electron-linter-and-test.yml
- 'web/**'
- 'joi/**'
- 'package.json'
- 'node_modules/**'
- 'yarn.lock'
- 'Makefile'
- 'extensions/**'
- 'core/**'
- '!README.md'
jobs:
base_branch_cov:
runs-on: ubuntu-latest
continue-on-error: true
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.base_ref }}
- name: Use Node.js 20.x
uses: actions/setup-node@v3
with:
node-version: 20
- name: Install dependencies
run: |
make config-yarn
yarn
yarn build:joi
yarn build:core
- name: Run test coverage
run: yarn test:coverage
- name: Upload code coverage for ref branch
uses: actions/upload-artifact@v4
with:
name: ref-lcov.info
path: ./coverage/lcov.info
test-on-macos:
if: (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
runs-on: macos-latest
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
- name: Set IS_TEST environment variable
run: |
echo "IS_TEST=true" >> $GITHUB_ENV
- name: 'Cleanup cache'
continue-on-error: true
run: |
rm -rf ~/jan
make clean
- name: Get Commit Message for PR
if: github.event_name == 'pull_request'
run: |
echo "REPORT_PORTAL_DESCRIPTION=${{github.event.after}})" >> $GITHUB_ENV
- name: Get Commit Message for push event
if: github.event_name == 'push'
run: |
echo "REPORT_PORTAL_DESCRIPTION=${{github.sha}})" >> $GITHUB_ENV
# - name: 'Config report portal'
# run: |
# make update-playwright-config REPORT_PORTAL_URL=${{ secrets.REPORT_PORTAL_URL }} REPORT_PORTAL_API_KEY=${{ secrets.REPORT_PORTAL_API_KEY }} REPORT_PORTAL_PROJECT_NAME=${{ secrets.REPORT_PORTAL_PROJECT_NAME }} REPORT_PORTAL_LAUNCH_NAME="Jan App macos" REPORT_PORTAL_DESCRIPTION="${{env.REPORT_PORTAL_DESCRIPTION}}"
- name: Linter and test
run: |
make test
env:
CSC_IDENTITY_AUTO_DISCOVERY: 'false'
test-on-macos-pr-target:
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository
runs-on: macos-latest
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
- name: 'Cleanup cache'
continue-on-error: true
run: |
rm -rf ~/jan
make clean
- name: Linter and test
run: |
make test
env:
CSC_IDENTITY_AUTO_DISCOVERY: 'false'
test-on-windows:
if: github.event_name == 'push'
strategy:
fail-fast: false
matrix:
antivirus-tools: ['mcafee', 'default-windows-security', 'bit-defender']
runs-on: windows-desktop-${{ matrix.antivirus-tools }}
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
# Clean cache, continue on error
- name: 'Cleanup cache'
shell: powershell
continue-on-error: true
run: |
$path = "$Env:APPDATA\jan"
if (Test-Path $path) {
Remove-Item "\\?\$path" -Recurse -Force
} else {
Write-Output "Folder does not exist."
}
make clean
- name: Get Commit Message for push event
if: github.event_name == 'push'
shell: bash
run: |
echo "REPORT_PORTAL_DESCRIPTION=${{github.sha}}" >> $GITHUB_ENV
# - name: 'Config report portal'
# shell: bash
# run: |
# make update-playwright-config REPORT_PORTAL_URL=${{ secrets.REPORT_PORTAL_URL }} REPORT_PORTAL_API_KEY=${{ secrets.REPORT_PORTAL_API_KEY }} REPORT_PORTAL_PROJECT_NAME=${{ secrets.REPORT_PORTAL_PROJECT_NAME }} REPORT_PORTAL_LAUNCH_NAME="Jan App Windows ${{ matrix.antivirus-tools }}" REPORT_PORTAL_DESCRIPTION="${{env.REPORT_PORTAL_DESCRIPTION}}"
- name: Linter and test
shell: powershell
run: |
make test
test-on-windows-pr:
if: (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || github.event_name == 'workflow_dispatch'
runs-on: windows-latest
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
# Clean cache, continue on error
- name: 'Cleanup cache'
shell: powershell
continue-on-error: true
run: |
$path = "$Env:APPDATA\jan"
if (Test-Path $path) {
Remove-Item "\\?\$path" -Recurse -Force
} else {
Write-Output "Folder does not exist."
}
make clean
- name: Get Commit Message for PR
if: github.event_name == 'pull_request'
shell: bash
run: |
echo "REPORT_PORTAL_DESCRIPTION=${{github.event.after}}" >> $GITHUB_ENV
# - name: 'Config report portal'
# shell: bash
# run: |
# make update-playwright-config REPORT_PORTAL_URL=${{ secrets.REPORT_PORTAL_URL }} REPORT_PORTAL_API_KEY=${{ secrets.REPORT_PORTAL_API_KEY }} REPORT_PORTAL_PROJECT_NAME=${{ secrets.REPORT_PORTAL_PROJECT_NAME }} REPORT_PORTAL_LAUNCH_NAME="Jan App Windows" REPORT_PORTAL_DESCRIPTION="${{env.REPORT_PORTAL_DESCRIPTION}}"
- name: Linter and test
shell: powershell
run: |
make test
test-on-windows-pr-target:
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository
runs-on: windows-latest
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
# Clean cache, continue on error
- name: 'Cleanup cache'
shell: powershell
continue-on-error: true
run: |
$path = "$Env:APPDATA\jan"
if (Test-Path $path) {
Remove-Item "\\?\$path" -Recurse -Force
} else {
Write-Output "Folder does not exist."
}
make clean
- name: Linter and test
shell: powershell
run: |
make test
test-on-ubuntu:
runs-on: ubuntu-latest
if: (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
- name: 'Cleanup cache'
continue-on-error: true
run: |
rm -rf ~/jan
make clean
- name: Get Commit Message for PR
if: github.event_name == 'pull_request'
run: |
echo "REPORT_PORTAL_DESCRIPTION=${{github.event.after}}" >> $GITHUB_ENV
- name: Get Commit Message for push event
if: github.event_name == 'push'
run: |
echo "REPORT_PORTAL_DESCRIPTION=${{github.sha}}" >> $GITHUB_ENV
# - name: 'Config report portal'
# shell: bash
# run: |
# make update-playwright-config REPORT_PORTAL_URL=${{ secrets.REPORT_PORTAL_URL }} REPORT_PORTAL_API_KEY=${{ secrets.REPORT_PORTAL_API_KEY }} REPORT_PORTAL_PROJECT_NAME=${{ secrets.REPORT_PORTAL_PROJECT_NAME }} REPORT_PORTAL_LAUNCH_NAME="Jan App Linux" REPORT_PORTAL_DESCRIPTION="${{env.REPORT_PORTAL_DESCRIPTION}}"
- name: Linter and test
run: |
export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
echo -e "Display ID: $DISPLAY"
make test
- uses: actions/upload-artifact@v4
if: always()
with:
name: playwright-report
path: electron/playwright-report/
retention-days: 2
coverage-check:
runs-on: ubuntu-latest
needs: base_branch_cov
if: (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
- name: Install yarn
run: npm install -g yarn
- name: 'Cleanup cache'
continue-on-error: true
run: |
rm -rf ~/jan
make clean
- name: Download code coverage report from base branch
uses: actions/download-artifact@v4
with:
name: ref-lcov.info
- name: Linter and test coverage
run: |
export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
echo -e "Display ID: $DISPLAY"
make lint
yarn build:test
yarn test:coverage
- name: Generate Code Coverage report
id: code-coverage
uses: barecheck/code-coverage-action@v1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
lcov-file: './coverage/lcov.info'
base-lcov-file: './lcov.info'
send-summary-comment: true
show-annotations: 'warning'
test-on-ubuntu-pr-target:
runs-on: ubuntu-latest
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
- name: 'Cleanup cache'
continue-on-error: true
run: |
rm -rf ~/jan
make clean
- name: Linter and test
run: |
export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
echo -e "Display ID: $DISPLAY"
make test

View File

@ -1,257 +0,0 @@
name: Linter & Test
on:
workflow_dispatch:
push:
branches:
- main
- dev
paths:
- .github/workflows/jan-linter-and-test.yml
- 'web/**'
- 'joi/**'
- 'package.json'
- 'node_modules/**'
- 'yarn.lock'
- 'core/**'
- 'extensions/**'
- '!README.md'
- 'Makefile'
pull_request:
branches:
- main
- dev
- release/**
paths:
- .github/workflows/jan-linter-and-test.yml
- 'web/**'
- 'joi/**'
- 'package.json'
- 'node_modules/**'
- 'yarn.lock'
- 'Makefile'
- 'extensions/**'
- 'core/**'
- 'src-tauri/**'
- 'web-app/**'
- '!README.md'
jobs:
base_branch_cov:
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
runs-on: ubuntu-latest
continue-on-error: true
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.base_ref }}
- name: Use Node.js 20.x
uses: actions/setup-node@v3
with:
node-version: 20
- name: 'Cleanup cache'
continue-on-error: true
run: |
rm -rf ~/jan
make clean
- name: Install dependencies
run: |
make lint
- name: Run test coverage
run: |
yarn test:coverage
- name: Upload code coverage for ref branch
uses: actions/upload-artifact@v4
with:
name: ref-lcov.info
path: coverage/lcov.info
test-on-macos:
runs-on: ${{ (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) && 'macos-latest' || 'macos-selfhosted-15-arm64' }}
if: github.event_name == 'pull_request' || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
- name: 'Cleanup cache'
continue-on-error: true
run: |
rm -rf ~/jan
make clean
- name: Linter and test
run: |
make test
env:
CSC_IDENTITY_AUTO_DISCOVERY: 'false'
test-on-windows:
if: github.event_name == 'push'
strategy:
fail-fast: false
matrix:
antivirus-tools: ['mcafee', 'default-windows-security', 'bit-defender']
runs-on: windows-desktop-${{ matrix.antivirus-tools }}
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
# Clean cache, continue on error
- name: 'Cleanup cache'
shell: powershell
continue-on-error: true
run: |
$path = "$Env:APPDATA\jan"
if (Test-Path $path) {
Remove-Item "\\?\$path" -Recurse -Force
} else {
Write-Output "Folder does not exist."
}
make clean
- name: Linter and test
shell: powershell
run: |
make test
test-on-windows-pr:
if: github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch'
runs-on: 'windows-latest'
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: install dependencies
run: |
choco install --yes --no-progress make
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
- name: 'Cleanup cache'
shell: powershell
continue-on-error: true
run: |
$path = "$Env:APPDATA\jan"
if (Test-Path $path) {
Remove-Item "\\?\$path" -Recurse -Force
} else {
Write-Output "Folder does not exist."
}
make clean
- name: Install WebView2 Runtime (Bootstrapper)
shell: powershell
run: |
Invoke-WebRequest -Uri 'https://go.microsoft.com/fwlink/p/?LinkId=2124703' -OutFile 'setup.exe'
Start-Process -FilePath setup.exe -Verb RunAs -Wait
- name: Linter and test
shell: powershell
run: |
make test
env:
NODE_OPTIONS: '--max-old-space-size=2048'
test-on-ubuntu:
runs-on: ${{ (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) && 'ubuntu-latest' || 'ubuntu-latest' }}
if: github.event_name == 'pull_request' || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
- name: Install Tauri dependencies
run: |
sudo apt update
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2 webkit2gtk-driver
- name: 'Cleanup cache'
continue-on-error: true
run: |
rm -rf ~/jan
make clean
- name: Linter and test
run: |
export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
echo -e "Display ID: $DISPLAY"
make test
- uses: actions/upload-artifact@v4
if: always()
with:
name: playwright-report
path: electron/playwright-report/
retention-days: 2
coverage-check:
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
runs-on: ubuntu-latest
needs: base_branch_cov
continue-on-error: true
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
- name: 'Cleanup cache'
continue-on-error: true
run: |
rm -rf ~/jan
make clean
- name: Install dependencies
run: |
make lint
- name: Run test coverage
run: |
yarn test:coverage
- name: Download code coverage report from base branch
uses: actions/download-artifact@v4
with:
name: ref-lcov.info
- name: Generate Code Coverage report
id: code-coverage
uses: barecheck/code-coverage-action@v1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
lcov-file: './coverage/lcov.info'
base-lcov-file: './lcov.info'
send-summary-comment: true
show-annotations: 'warning'

View File

@ -1,60 +0,0 @@
name: Jan Web Server build image and push to Harbor Registry
on:
push:
branches:
- dev-web
pull_request:
branches:
- dev-web
jobs:
build-and-preview:
runs-on: [ubuntu-24-04-docker]
env:
MENLO_PLATFORM_BASE_URL: "https://api-dev.jan.ai/v1"
permissions:
pull-requests: write
contents: write
steps:
- name: Checkout source repo
uses: actions/checkout@v4
- name: Login to Harbor Registry
uses: docker/login-action@v3
with:
registry: registry.menlo.ai
username: ${{ secrets.HARBOR_USERNAME }}
password: ${{ secrets.HARBOR_PASSWORD }}
- name: Install dependencies
run: |
(type -p wget >/dev/null || (sudo apt update && sudo apt install wget -y)) \
&& sudo mkdir -p -m 755 /etc/apt/keyrings \
&& out=$(mktemp) && wget -nv -O$out https://cli.github.com/packages/githubcli-archive-keyring.gpg \
&& cat $out | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
&& sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
&& sudo mkdir -p -m 755 /etc/apt/sources.list.d \
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
&& sudo apt update
sudo apt-get install -y jq gettext
- name: Set image tag
id: vars
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
IMAGE_TAG="web:preview-${{ github.sha }}"
else
IMAGE_TAG="web:dev-${{ github.sha }}"
fi
echo "IMAGE_TAG=${IMAGE_TAG}" >> $GITHUB_OUTPUT
echo "FULL_IMAGE=registry.menlo.ai/jan-server/${IMAGE_TAG}" >> $GITHUB_OUTPUT
- name: Build docker image
run: |
docker build --build-arg MENLO_PLATFORM_BASE_URL=${{ env.MENLO_PLATFORM_BASE_URL }} -t ${{ steps.vars.outputs.FULL_IMAGE }} .
- name: Push docker image
if: github.event_name == 'push'
run: |
docker push ${{ steps.vars.outputs.FULL_IMAGE }}

View File

@ -1,58 +0,0 @@
name: Jan Web Server deploy to production
on:
push:
branches:
- prod-web
jobs:
build-and-deploy:
runs-on: ubuntu-latest
permissions:
contents: write
deployments: write
pull-requests: write
env:
MENLO_PLATFORM_BASE_URL: "https://api.jan.ai/v1"
GA_MEASUREMENT_ID: "G-YK53MX8M8M"
CLOUDFLARE_PROJECT_NAME: "jan-server-web"
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v3
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
# - name: Fill env vars
# run: |
# env_example_file=".env.example"
# touch .env
# while IFS= read -r line || [[ -n "$line" ]]; do
# if [[ "$line" == *"="* ]]; then
# var_name=$(echo $line | cut -d '=' -f 1)
# echo $var_name
# var_value="$(jq -r --arg key "$var_name" '.[$key]' <<< "$SECRETS")"
# echo "$var_name=$var_value" >> .env
# fi
# done < "$env_example_file"
# env:
# SECRETS: '${{ toJson(secrets) }}'
- name: Install dependencies
run: make config-yarn && yarn install && yarn build:core && make build-web-app
env:
MENLO_PLATFORM_BASE_URL: ${{ env.MENLO_PLATFORM_BASE_URL }}
GA_MEASUREMENT_ID: ${{ env.GA_MEASUREMENT_ID }}
- name: Publish to Cloudflare Pages Production
uses: cloudflare/pages-action@v1
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
projectName: ${{ env.CLOUDFLARE_PROJECT_NAME }}
directory: ./web-app/dist-web
branch: main
# Optional: Enable this if you want to have GitHub Deployments triggered
gitHubToken: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,60 +0,0 @@
name: Jan Web Server build image and push to Harbor Registry
on:
push:
branches:
- stag-web
pull_request:
branches:
- stag-web
jobs:
build-and-preview:
runs-on: [ubuntu-24-04-docker]
env:
MENLO_PLATFORM_BASE_URL: "https://api-stag.jan.ai/v1"
permissions:
pull-requests: write
contents: write
steps:
- name: Checkout source repo
uses: actions/checkout@v4
- name: Login to Harbor Registry
uses: docker/login-action@v3
with:
registry: registry.menlo.ai
username: ${{ secrets.HARBOR_USERNAME }}
password: ${{ secrets.HARBOR_PASSWORD }}
- name: Install dependencies
run: |
(type -p wget >/dev/null || (sudo apt update && sudo apt install wget -y)) \
&& sudo mkdir -p -m 755 /etc/apt/keyrings \
&& out=$(mktemp) && wget -nv -O$out https://cli.github.com/packages/githubcli-archive-keyring.gpg \
&& cat $out | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
&& sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
&& sudo mkdir -p -m 755 /etc/apt/sources.list.d \
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
&& sudo apt update
sudo apt-get install -y jq gettext
- name: Set image tag
id: vars
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
IMAGE_TAG="web:preview-${{ github.sha }}"
else
IMAGE_TAG="web:stag-${{ github.sha }}"
fi
echo "IMAGE_TAG=${IMAGE_TAG}" >> $GITHUB_OUTPUT
echo "FULL_IMAGE=registry.menlo.ai/jan-server/${IMAGE_TAG}" >> $GITHUB_OUTPUT
- name: Build docker image
run: |
docker build --build-arg MENLO_PLATFORM_BASE_URL=${{ env.MENLO_PLATFORM_BASE_URL }} -t ${{ steps.vars.outputs.FULL_IMAGE }} .
- name: Push docker image
if: github.event_name == 'push'
run: |
docker push ${{ steps.vars.outputs.FULL_IMAGE }}

View File

@ -1,156 +0,0 @@
name: Tauri Builder - Beta Build
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+-beta"]
jobs:
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
create-draft-release:
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
outputs:
upload_url: ${{ steps.create_release.outputs.upload_url }}
version: ${{ steps.get_version.outputs.version }}
permissions:
contents: write
steps:
- name: Extract tag name without v prefix
id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
env:
GITHUB_REF: ${{ github.ref }}
- name: Create Draft Release
id: create_release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ github.ref_name }}
token: ${{ secrets.GITHUB_TOKEN }}
name: "${{ env.VERSION }}"
draft: true
prerelease: false
generate_release_notes: true
build-macos:
uses: ./.github/workflows/template-tauri-build-macos.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: beta
cortex_api_port: "39271"
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
build-windows-x64:
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: beta
cortex_api_port: "39271"
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
build-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: beta
cortex_api_port: "39271"
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
sync-temp-to-latest:
needs: [create-draft-release, get-update-version, build-macos, build-windows-x64, build-linux-x64]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: create latest.json file
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
LINUX_URL="https://delta.jan.ai/beta/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
WINDOWS_URL="https://delta.jan.ai/beta/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
DARWIN_URL="https://delta.jan.ai/beta/Jan-beta_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
jq --arg version "$VERSION" \
--arg pub_date "$PUB_DATE" \
--arg linux_signature "$LINUX_SIGNATURE" \
--arg linux_url "$LINUX_URL" \
--arg windows_signature "$WINDOWS_SIGNATURE" \
--arg windows_url "$WINDOWS_URL" \
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
--arg darwin_arm_url "$DARWIN_URL" \
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
--arg darwin_amd_url "$DARWIN_URL" \
'.version = $version
| .pub_date = $pub_date
| .platforms["linux-x86_64"].signature = $linux_signature
| .platforms["linux-x86_64"].url = $linux_url
| .platforms["windows-x86_64"].signature = $windows_signature
| .platforms["windows-x86_64"].url = $windows_url
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
| .platforms["darwin-aarch64"].url = $darwin_arm_url
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
src-tauri/latest.json.template > latest.json
cat latest.json
- name: Sync temp to latest
run: |
# sync temp-beta to beta by copy files that are different or new
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/latest.json
aws s3 sync "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/" "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/beta/"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: "true"
- name: Upload release assert if public provider is github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
asset_path: ./latest.json
asset_name: latest.json
asset_content_type: text/json
noti-discord-and-update-url-readme:
needs: [build-macos, get-update-version, build-windows-x64, build-linux-x64, sync-temp-to-latest]
runs-on: ubuntu-latest
steps:
- name: Set version to environment variable
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
VERSION="${VERSION#v}"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Notify Discord
uses: Ilshidur/action-discord@master
with:
args: |
Jan-beta App version {{ VERSION }}, has been released, use the following links to download the app with faster speed or visit the Github release page for more information:
- Windows: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_x64-setup.exe
- macOS Universal: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_universal.dmg
- Linux Deb: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_amd64.deb
- Linux AppImage: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_amd64.AppImage
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_JAN_BETA }}

View File

@ -1,20 +0,0 @@
name: Tauri Builder Flatpak
on:
workflow_dispatch:
inputs:
version:
description: 'Version to build. For example: 0.6.8'
required: false
jobs:
build-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64-flatpak.yml
secrets: inherit
with:
ref: ${{ github.ref }}
public_provider: none
channel: stable
new_version: ${{ inputs.version }}
disable_updater: true

View File

@ -1,46 +0,0 @@
name: Tauri Builder - Nightly / External PRs
on:
pull_request:
branches:
- dev
paths:
- '.github/workflows/jan-tauri-build-nightly-external.yaml'
- '.github/workflows/template-tauri-build-*-external.yml'
- 'src-tauri/**'
- 'core/**'
- 'web-app/**'
- 'extensions/**'
- 'scripts/**'
- 'pre-install/**'
- 'Makefile'
- 'package.json'
jobs:
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
build-macos:
uses: ./.github/workflows/template-tauri-build-macos-external.yml
needs: [get-update-version]
with:
ref: ${{ github.ref }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
build-windows-x64:
uses: ./.github/workflows/template-tauri-build-windows-x64-external.yml
needs: [get-update-version]
with:
ref: ${{ github.ref }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
build-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64-external.yml
needs: [get-update-version]
with:
ref: ${{ github.ref }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
disable_updater: false

View File

@ -1,253 +0,0 @@
name: Tauri Builder - Nightly / Manual
on:
schedule:
- cron: '0 20 * * 1,2,3' # At 8 PM UTC on Monday, Tuesday, and Wednesday which is 3 AM UTC+7 Tuesday, Wednesday, and Thursday
workflow_dispatch:
inputs:
public_provider:
type: choice
description: 'Public Provider'
options:
- none
- aws-s3
default: none
disable_updater:
type: boolean
description: 'If true, builds both .deb and .appimage but disables auto-updater'
default: false
pull_request:
branches:
- release/**
- dev
paths:
- '.github/workflows/jan-tauri-build-nightly.yaml'
- '.github/workflows/template-get-update-version.yml'
- '.github/workflows/template-tauri-build-macos.yml'
- '.github/workflows/template-tauri-build-windows-x64.yml'
- '.github/workflows/template-tauri-build-linux-x64.yml'
- '.github/workflows/template-noti-discord-and-update-url-readme.yml'
- 'src-tauri/**'
- 'core/**'
- 'web-app/**'
- 'extensions/**'
- 'scripts/**'
- 'pre-install/**'
- 'Makefile'
- 'package.json'
jobs:
set-public-provider:
runs-on: ubuntu-latest
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
outputs:
public_provider: ${{ steps.set-public-provider.outputs.public_provider }}
ref: ${{ steps.set-public-provider.outputs.ref }}
steps:
- name: Set public provider
id: set-public-provider
run: |
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo "::set-output name=public_provider::${{ github.event.inputs.public_provider }}"
echo "::set-output name=ref::${{ github.ref }}"
else
if [ "${{ github.event_name }}" == "schedule" ]; then
echo "::set-output name=public_provider::aws-s3"
echo "::set-output name=ref::refs/heads/dev"
elif [ "${{ github.event_name }}" == "push" ]; then
echo "::set-output name=public_provider::aws-s3"
echo "::set-output name=ref::${{ github.ref }}"
elif [ "${{ github.event_name }}" == "pull_request_review" ]; then
echo "::set-output name=public_provider::none"
echo "::set-output name=ref::${{ github.ref }}"
else
echo "::set-output name=public_provider::none"
echo "::set-output name=ref::${{ github.ref }}"
fi
fi
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
uses: ./.github/workflows/template-get-update-version.yml
build-macos:
uses: ./.github/workflows/template-tauri-build-macos.yml
needs: [get-update-version, set-public-provider]
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
secrets: inherit
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
cortex_api_port: '39261'
build-windows-x64:
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
cortex_api_port: '39261'
build-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
cortex_api_port: '39261'
disable_updater: ${{ github.event.inputs.disable_updater == 'true' }}
sync-temp-to-latest:
needs:
[
get-update-version,
set-public-provider,
build-windows-x64,
build-linux-x64,
build-macos,
]
runs-on: ubuntu-latest
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: create latest.json file
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
LINUX_URL="https://delta.jan.ai/nightly/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
WINDOWS_URL="https://delta.jan.ai/nightly/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
DARWIN_URL="https://delta.jan.ai/nightly/Jan-nightly_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
jq --arg version "$VERSION" \
--arg pub_date "$PUB_DATE" \
--arg linux_signature "$LINUX_SIGNATURE" \
--arg linux_url "$LINUX_URL" \
--arg windows_signature "$WINDOWS_SIGNATURE" \
--arg windows_url "$WINDOWS_URL" \
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
--arg darwin_arm_url "$DARWIN_URL" \
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
--arg darwin_amd_url "$DARWIN_URL" \
'.version = $version
| .pub_date = $pub_date
| .platforms["linux-x86_64"].signature = $linux_signature
| .platforms["linux-x86_64"].url = $linux_url
| .platforms["windows-x86_64"].signature = $windows_signature
| .platforms["windows-x86_64"].url = $windows_url
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
| .platforms["darwin-aarch64"].url = $darwin_arm_url
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
src-tauri/latest.json.template > latest.json
cat latest.json
- name: Sync temp to latest
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
run: |
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/latest.json
aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: 'true'
# noti-discord-nightly-and-update-url-readme:
# needs:
# [
# build-macos,
# build-windows-x64,
# build-linux-x64,
# get-update-version,
# set-public-provider,
# sync-temp-to-latest,
# ]
# secrets: inherit
# if: github.event_name == 'schedule'
# uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
# with:
# ref: refs/heads/dev
# build_reason: Nightly
# push_to_branch: dev
# new_version: ${{ needs.get-update-version.outputs.new_version }}
# noti-discord-pre-release-and-update-url-readme:
# needs:
# [
# build-macos,
# build-windows-x64,
# build-linux-x64,
# get-update-version,
# set-public-provider,
# sync-temp-to-latest,
# ]
# secrets: inherit
# if: github.event_name == 'push'
# uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
# with:
# ref: refs/heads/dev
# build_reason: Pre-release
# push_to_branch: dev
# new_version: ${{ needs.get-update-version.outputs.new_version }}
# noti-discord-manual-and-update-url-readme:
# needs:
# [
# build-macos,
# build-windows-x64,
# build-linux-x64,
# get-update-version,
# set-public-provider,
# sync-temp-to-latest,
# ]
# secrets: inherit
# if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3'
# uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
# with:
# ref: refs/heads/dev
# build_reason: Manual
# push_to_branch: dev
# new_version: ${{ needs.get-update-version.outputs.new_version }}
comment-pr-build-url:
needs:
[
build-macos,
build-windows-x64,
build-linux-x64,
get-update-version,
set-public-provider,
sync-temp-to-latest,
]
runs-on: ubuntu-latest
if: github.event_name == 'pull_request_review'
steps:
- name: Set up GitHub CLI
run: |
curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz
sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/
- name: Comment build URL on PR
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_URL=${{ github.event.pull_request.html_url }}
RUN_ID=${{ github.run_id }}
COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})."
gh pr comment $PR_URL --body "$COMMENT"

View File

@ -1,122 +0,0 @@
name: Tauri Builder - Tag
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+"]
jobs:
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
create-draft-release:
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
outputs:
upload_url: ${{ steps.create_release.outputs.upload_url }}
version: ${{ steps.get_version.outputs.version }}
permissions:
contents: write
steps:
- name: Extract tag name without v prefix
id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
env:
GITHUB_REF: ${{ github.ref }}
- name: Create Draft Release
id: create_release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ github.ref_name }}
token: ${{ secrets.GITHUB_TOKEN }}
name: "${{ env.VERSION }}"
draft: true
prerelease: false
generate_release_notes: true
build-macos:
uses: ./.github/workflows/template-tauri-build-macos.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
channel: stable
new_version: ${{ needs.get-update-version.outputs.new_version }}
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
build-windows-x64:
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
channel: stable
new_version: ${{ needs.get-update-version.outputs.new_version }}
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
build-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
channel: stable
new_version: ${{ needs.get-update-version.outputs.new_version }}
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
sync-temp-to-latest:
needs: [create-draft-release, get-update-version, build-macos, build-windows-x64, build-linux-x64]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: create latest.json file
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
LINUX_URL="https://github.com/janhq/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
WINDOWS_URL="https://github.com/janhq/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
DARWIN_URL="https://github.com/janhq/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-macos.outputs.TAR_NAME }}"
jq --arg version "$VERSION" \
--arg pub_date "$PUB_DATE" \
--arg linux_signature "$LINUX_SIGNATURE" \
--arg linux_url "$LINUX_URL" \
--arg windows_signature "$WINDOWS_SIGNATURE" \
--arg windows_url "$WINDOWS_URL" \
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
--arg darwin_arm_url "$DARWIN_URL" \
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
--arg darwin_amd_url "$DARWIN_URL" \
'.version = $version
| .pub_date = $pub_date
| .platforms["linux-x86_64"].signature = $linux_signature
| .platforms["linux-x86_64"].url = $linux_url
| .platforms["windows-x86_64"].signature = $windows_signature
| .platforms["windows-x86_64"].url = $windows_url
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
| .platforms["darwin-aarch64"].url = $darwin_arm_url
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
src-tauri/latest.json.template > latest.json
cat latest.json
- name: Upload release assert if public provider is github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
asset_path: ./latest.json
asset_name: latest.json
asset_content_type: text/json

View File

@ -0,0 +1,127 @@
name: Nightly Update cortex cpp
on:
schedule:
- cron: '30 19 * * 1-5' # At 01:30 on every day-of-week from Monday through Friday UTC +7
workflow_dispatch:
jobs:
update-submodule:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
actions: write
outputs:
pr_number: ${{ steps.check-update.outputs.pr_number }}
pr_created: ${{ steps.check-update.outputs.pr_created }}
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
submodules: recursive
ref: dev
fetch-depth: 0
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
- name: Configure Git
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
- name: Update submodule to latest release
id: check-update
env:
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
run: |
curl -s https://api.github.com/repos/menloresearch/cortex/releases > /tmp/github_api_releases.json
latest_prerelease_name=$(cat /tmp/github_api_releases.json | jq -r '.[] | select(.prerelease) | .name' | head -n 1)
get_asset_count() {
local version_name=$1
cat /tmp/github_api_releases.json | jq -r --arg version_name "$version_name" '.[] | select(.name == $version_name) | .assets | length'
}
cortex_cpp_version_file_path="extensions/inference-nitro-extension/bin/version.txt"
current_version_name=$(cat "$cortex_cpp_version_file_path" | head -n 1)
current_version_asset_count=$(get_asset_count "$current_version_name")
latest_prerelease_asset_count=$(get_asset_count "$latest_prerelease_name")
if [ "$current_version_name" = "$latest_prerelease_name" ]; then
echo "cortex cpp remote repo doesn't have update today, skip update cortex.cpp for today nightly build"
echo "::set-output name=pr_created::false"
exit 0
fi
if [ "$current_version_asset_count" != "$latest_prerelease_asset_count" ]; then
echo "Latest prerelease version has different number of assets, somethink went wrong, skip update cortex.cpp for today nightly build"
echo "::set-output name=pr_created::false"
exit 1
fi
echo $latest_prerelease_name > $cortex_cpp_version_file_path
echo "Updated version from $current_version_name to $latest_prerelease_name."
echo "::set-output name=pr_created::true"
git add -f $cortex_cpp_version_file_path
git commit -m "Update cortex cpp nightly to version $latest_prerelease_name"
branch_name="update-nightly-$(date +'%Y-%m-%d-%H-%M')"
git checkout -b $branch_name
git push origin $branch_name
pr_title="Update cortex cpp nightly to version $latest_prerelease_name"
pr_body="This PR updates the Update cortex cpp nightly to version $latest_prerelease_name"
gh pr create --title "$pr_title" --body "$pr_body" --head $branch_name --base dev --reviewer Van-QA
pr_number=$(gh pr list --head $branch_name --json number --jq '.[0].number')
echo "::set-output name=pr_number::$pr_number"
check-and-merge-pr:
needs: update-submodule
if: needs.update-submodule.outputs.pr_created == 'true'
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
submodules: recursive
fetch-depth: 0
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
- name: Wait for CI to pass
env:
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
run: |
pr_number=${{ needs.update-submodule.outputs.pr_number }}
while true; do
ci_completed=$(gh pr checks $pr_number --json completedAt --jq '.[].completedAt')
if echo "$ci_completed" | grep -q "0001-01-01T00:00:00Z"; then
echo "CI is still running, waiting..."
sleep 60
else
echo "CI has completed, checking states..."
ci_states=$(gh pr checks $pr_number --json state --jq '.[].state')
if echo "$ci_states" | grep -vqE "SUCCESS|SKIPPED"; then
echo "CI failed, exiting..."
exit 1
else
echo "CI passed, merging PR..."
break
fi
fi
done
- name: Merge the PR
env:
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
run: |
pr_number=${{ needs.update-submodule.outputs.pr_number }}
gh pr merge $pr_number --merge --admin

View File

@ -1,16 +1,18 @@
name: Publish core Package to npmjs name: Publish core Package to npmjs
on: on:
push: push:
tags: ['v[0-9]+.[0-9]+.[0-9]+-core'] tags: ["v[0-9]+.[0-9]+.[0-9]+-core"]
paths: ['core/**', '.github/workflows/publish-npm-core.yml'] paths: ["core/**", ".github/workflows/publish-npm-core.yml"]
workflow_dispatch: pull_request:
paths: ["core/**", ".github/workflows/publish-npm-core.yml"]
jobs: jobs:
build-and-publish-plugins: build-and-publish-plugins:
environment: production
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: '0' fetch-depth: "0"
token: ${{ secrets.PAT_SERVICE_ACCOUNT }} token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
- name: Install jq - name: Install jq
@ -22,7 +24,7 @@ jobs:
env: env:
GITHUB_REF: ${{ github.ref }} GITHUB_REF: ${{ github.ref }}
- name: 'Get Semantic Version from tag' - name: "Get Semantic Version from tag"
if: github.event_name == 'push' if: github.event_name == 'push'
run: | run: |
# Get the tag from the event # Get the tag from the event
@ -40,8 +42,8 @@ jobs:
# Setup .npmrc file to publish to npm # Setup .npmrc file to publish to npm
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3
with: with:
node-version: '20.x' node-version: "20.x"
registry-url: 'https://registry.npmjs.org' registry-url: "https://registry.npmjs.org"
- run: cd core && corepack enable && corepack prepare yarn@4.5.3 --activate && yarn --version && yarn config set -H enableImmutableInstalls false && yarn install && yarn build - run: cd core && corepack enable && corepack prepare yarn@4.5.3 --activate && yarn --version && yarn config set -H enableImmutableInstalls false && yarn install && yarn build

53
.github/workflows/publish-npm-joi.yml vendored Normal file
View File

@ -0,0 +1,53 @@
name: Publish joi Package to npmjs
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+-joi"]
paths: ["joi/**", ".github/workflows/publish-npm-joi.yml"]
pull_request:
paths: ["joi/**", ".github/workflows/publish-npm-joi.yml"]
jobs:
build-and-publish-plugins:
environment: production
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: "0"
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Extract tag name without v prefix
id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
env:
GITHUB_REF: ${{ github.ref }}
- name: "Get Semantic Version from tag"
if: github.event_name == 'push'
run: |
# Get the tag from the event
tag=${GITHUB_REF#refs/tags/v}
# remove the -joi suffix
new_version=$(echo $tag | sed -n 's/-joi//p')
echo $new_version
# Replace the old version with the new version in package.json
jq --arg version "$new_version" '.version = $version' joi/package.json > /tmp/package.json && mv /tmp/package.json joi/package.json
# Print the new version
echo "Updated package.json version to: $new_version"
cat joi/package.json
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@v3
with:
node-version: "20.x"
registry-url: "https://registry.npmjs.org"
- run: cd joi && corepack enable && corepack prepare yarn@4.5.3 --activate && yarn --version && yarn config set -H enableImmutableInstalls false && yarn install && yarn build
- run: cd joi && yarn publish --access public
if: github.event_name == 'push'
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@ -0,0 +1,187 @@
name: build-linux-x64
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
aws_s3_prefix:
required: false
type: string
default: '/latest/'
beta:
required: false
type: boolean
default: false
nightly:
required: false
type: boolean
default: false
cortex_api_port:
required: false
type: string
default: null
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
jobs:
build-linux-x64:
runs-on: ubuntu-latest
environment: production
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.beta == true && inputs.nightly != true
shell: bash
run: |
rm -rf electron/icons/*
cp electron/icons_dev/jan-beta-512x512.png electron/icons/512x512.png
cp electron/icons_dev/jan-beta.ico electron/icons/icon.ico
cp electron/icons_dev/jan-beta.png electron/icons/icon.png
cp electron/icons_dev/jan-beta-tray@2x.png electron/icons/icon-tray@2x.png
cp electron/icons_dev/jan-beta-tray.png electron/icons/icon-tray.png
- name: Replace Icons for Nightly Build
if: inputs.nightly == true && inputs.beta != true
shell: bash
run: |
rm -rf electron/icons/*
cp electron/icons_dev/jan-nightly-512x512.png electron/icons/512x512.png
cp electron/icons_dev/jan-nightly.ico electron/icons/icon.ico
cp electron/icons_dev/jan-nightly.png electron/icons/icon.png
cp electron/icons_dev/jan-nightly-tray@2x.png electron/icons/icon-tray@2x.png
cp electron/icons_dev/jan-nightly-tray.png electron/icons/icon-tray.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Update app version base public_provider
if: inputs.public_provider != 'github'
run: |
echo "Version: ${{ inputs.new_version }}"
# Update the version in electron/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
jq '.build.publish = [{"provider": "generic", "url": "https://delta.jan.ai/nightly", "channel": "latest"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "temp-nightly", "channel": "latest"}]' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
cat electron/package.json
chmod +x .github/scripts/rename-app.sh
.github/scripts/rename-app.sh ./electron/package.json nightly
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json nightly
echo "------------------------"
cat ./electron/package.json
echo "------------------------"
- name: Change App Name for beta version
if: inputs.beta == true
shell: bash
run: |
chmod +x .github/scripts/rename-app.sh
.github/scripts/rename-app.sh ./electron/package.json beta
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json beta
echo "------------------------"
cat ./electron/package.json
echo "------------------------"
cat ./package.json
jq '.build.publish = [{"provider": "generic", "url": "https://delta.jan.ai/beta", "channel": "beta"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "temp-beta", "channel": "beta"}]' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
cat electron/package.json
- name: Update app version base on tag
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
run: |
jq --arg version "${VERSION_TAG#v}" '.version = $version' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg version "${VERSION_TAG#v}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
env:
VERSION_TAG: ${{ inputs.new_version }}
- name: Build and publish app to aws s3 r2 or github artifactory
if: inputs.public_provider != 'github'
run: |
# check public_provider is true or not
echo "public_provider is ${{ inputs.public_provider }}"
if [ "${{ inputs.public_provider }}" == "none" ]; then
make build
else
make build-and-publish
fi
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
- name: Build and publish app to github
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github' && inputs.beta == false
run: |
make build-and-publish
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
- name: Build and publish app to github
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github' && inputs.beta == true
run: |
make build-and-publish
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
- name: Upload Artifact .deb file
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-deb
path: ./electron/dist/*.deb
- name: Upload Artifact .AppImage file
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
path: ./electron/dist/*.AppImage

View File

@ -0,0 +1,233 @@
name: build-macos
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
aws_s3_prefix:
required: false
type: string
default: '/latest/'
beta:
required: false
type: boolean
default: false
nightly:
required: false
type: boolean
default: false
cortex_api_port:
required: false
type: string
default: null
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
CODE_SIGN_P12_BASE64:
required: false
CODE_SIGN_P12_PASSWORD:
required: false
APPLE_ID:
required: false
APPLE_APP_SPECIFIC_PASSWORD:
required: false
DEVELOPER_ID:
required: false
jobs:
build-macos:
runs-on: macos-latest
environment: production
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.beta == true && inputs.nightly != true
shell: bash
run: |
rm -rf electron/icons/*
cp electron/icons_dev/jan-beta-512x512.png electron/icons/512x512.png
cp electron/icons_dev/jan-beta.ico electron/icons/icon.ico
cp electron/icons_dev/jan-beta.png electron/icons/icon.png
cp electron/icons_dev/jan-beta-tray@2x.png electron/icons/icon-tray@2x.png
cp electron/icons_dev/jan-beta-tray.png electron/icons/icon-tray.png
- name: Replace Icons for Nightly Build
if: inputs.nightly == true && inputs.beta != true
shell: bash
run: |
rm -rf electron/icons/*
cp electron/icons_dev/jan-nightly-512x512.png electron/icons/512x512.png
cp electron/icons_dev/jan-nightly.ico electron/icons/icon.ico
cp electron/icons_dev/jan-nightly.png electron/icons/icon.png
cp electron/icons_dev/jan-nightly-tray@2x.png electron/icons/icon-tray@2x.png
cp electron/icons_dev/jan-nightly-tray.png electron/icons/icon-tray.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Update app version based on latest release tag with build number
if: inputs.public_provider != 'github'
run: |
echo "Version: ${{ inputs.new_version }}"
# Update the version in electron/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
jq '.build.publish = [{"provider": "generic", "url": "https://delta.jan.ai/nightly", "channel": "latest"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "temp-nightly", "channel": "latest"}]' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg teamid "${{ secrets.APPLE_TEAM_ID }}" '.build.mac.notarize.teamId = $teamid' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
# cat electron/package.json
chmod +x .github/scripts/rename-app.sh
.github/scripts/rename-app.sh ./electron/package.json nightly
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json nightly
echo "------------------------"
cat ./electron/package.json
echo "------------------------"
- name: Change App Name for beta version
if: inputs.beta == true
shell: bash
run: |
chmod +x .github/scripts/rename-app.sh
.github/scripts/rename-app.sh ./electron/package.json beta
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json beta
echo "------------------------"
cat ./electron/package.json
echo "------------------------"
cat ./package.json
jq '.build.publish = [{"provider": "generic", "url": "https://delta.jan.ai/beta", "channel": "beta"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "temp-beta", "channel": "beta"}]' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
cat electron/package.json
- name: Update app version base on tag
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
run: |
jq --arg version "${VERSION_TAG#v}" '.version = $version' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg version "${VERSION_TAG#v}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
jq --arg teamid "${{ secrets.APPLE_TEAM_ID }}" '.build.mac.notarize.teamId = $teamid' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
cat electron/package.json
env:
VERSION_TAG: ${{ inputs.new_version }}
- name: Get Cer for code signing
run: base64 -d <<< "$CODE_SIGN_P12_BASE64" > /tmp/codesign.p12
shell: bash
env:
CODE_SIGN_P12_BASE64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
- uses: apple-actions/import-codesign-certs@v2
continue-on-error: true
with:
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
- name: Build and publish app to aws s3 r2 or github artifactory
if: inputs.public_provider != 'github'
run: |
# check public_provider is true or not
echo "public_provider is ${{ inputs.public_provider }}"
if [ "${{ inputs.public_provider }}" == "none" ]; then
make build
else
make build-and-publish
fi
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CSC_LINK: '/tmp/codesign.p12'
CSC_KEY_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
CSC_IDENTITY_AUTO_DISCOVERY: 'true'
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
APP_PATH: '.'
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
- name: Build and publish app to github
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github' && inputs.beta == false
run: |
make build-and-publish
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CSC_LINK: '/tmp/codesign.p12'
CSC_KEY_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
CSC_IDENTITY_AUTO_DISCOVERY: 'true'
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
APP_PATH: '.'
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
- name: Build and publish app to github
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github' && inputs.beta == true
run: |
make build-and-publish
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CSC_LINK: '/tmp/codesign.p12'
CSC_KEY_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
CSC_IDENTITY_AUTO_DISCOVERY: 'true'
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
APP_PATH: '.'
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-mac-universal-${{ inputs.new_version }}
path: ./electron/dist/*.dmg

View File

@ -0,0 +1,229 @@
name: build-windows-x64
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
aws_s3_prefix:
required: false
type: string
default: '/latest/'
beta:
required: false
type: boolean
default: false
nightly:
required: false
type: boolean
default: false
cortex_api_port:
required: false
type: string
default: null
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
AZURE_KEY_VAULT_URI:
required: false
AZURE_CLIENT_ID:
required: false
AZURE_TENANT_ID:
required: false
AZURE_CLIENT_SECRET:
required: false
AZURE_CERT_NAME:
required: false
jobs:
build-windows-x64:
runs-on: windows-latest
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.beta == true && inputs.nightly != true
shell: bash
run: |
rm -rf electron/icons/*
cp electron/icons_dev/jan-beta-512x512.png electron/icons/512x512.png
cp electron/icons_dev/jan-beta.ico electron/icons/icon.ico
cp electron/icons_dev/jan-beta.png electron/icons/icon.png
cp electron/icons_dev/jan-beta-tray@2x.png electron/icons/icon-tray@2x.png
cp electron/icons_dev/jan-beta-tray.png electron/icons/icon-tray.png
- name: Replace Icons for Nightly Build
if: inputs.nightly == true && inputs.beta != true
shell: bash
run: |
rm -rf electron/icons/*
cp electron/icons_dev/jan-nightly-512x512.png electron/icons/512x512.png
cp electron/icons_dev/jan-nightly.ico electron/icons/icon.ico
cp electron/icons_dev/jan-nightly.png electron/icons/icon.png
cp electron/icons_dev/jan-nightly-tray@2x.png electron/icons/icon-tray@2x.png
cp electron/icons_dev/jan-nightly-tray.png electron/icons/icon-tray.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Update app version base on tag
if: inputs.public_provider != 'github'
id: version_update
shell: bash
run: |
echo "Version: ${{ inputs.new_version }}"
# Update the version in electron/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
jq '.build.publish = [{"provider": "generic", "url": "https://delta.jan.ai/nightly", "channel": "latest"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "temp-nightly", "channel": "latest"}]' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq '.build.win.sign = "./sign.js"' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
cat electron/package.json
chmod +x .github/scripts/rename-app.sh
.github/scripts/rename-app.sh ./electron/package.json nightly
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json nightly
chmod +x .github/scripts/rename-uninstaller.sh
.github/scripts/rename-uninstaller.sh nightly
echo "------------------------"
cat ./electron/package.json
echo "------------------------"
cat ./package.json
echo "------------------------"
- name: Change App Name for beta version
if: inputs.beta == true
shell: bash
run: |
chmod +x .github/scripts/rename-app.sh
.github/scripts/rename-app.sh ./electron/package.json beta
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json beta
chmod +x .github/scripts/rename-uninstaller.sh
.github/scripts/rename-uninstaller.sh beta
echo "------------------------"
cat ./electron/package.json
echo "------------------------"
cat ./package.json
echo "------------------------"
cat ./electron/scripts/uninstaller.nsh
jq '.build.publish = [{"provider": "generic", "url": "https://delta.jan.ai/beta", "channel": "beta"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "temp-beta", "channel": "beta"}]' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
cat electron/package.json
- name: Update app version base on tag
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
shell: bash
run: |
jq --arg version "${VERSION_TAG#v}" '.version = $version' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg version "${VERSION_TAG#v}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
jq '.build.win.sign = "./sign.js"' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
env:
VERSION_TAG: ${{ inputs.new_version }}
- name: Install AzureSignTool
run: |
dotnet tool install --global AzureSignTool
- name: Build and publish app to aws s3 r2 or github artifactory
shell: bash
if: inputs.public_provider != 'github'
run: |
# check public_provider is true or not
echo "public_provider is ${{ inputs.public_provider }}"
if [ "${{ inputs.public_provider }}" == "none" ]; then
make build
else
make build-and-publish
fi
env:
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_CERT_NAME: homebrewltd
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
- name: Build app and publish app to github
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github' && inputs.beta == false
run: |
make build-and-publish
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_CERT_NAME: homebrewltd
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
- name: Build app and publish app to github
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github' && inputs.beta == true
run: |
make build-and-publish
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
# AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }}
AZURE_CERT_NAME: homebrewltd
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-win-x64-${{ inputs.new_version }}
path: ./electron/dist/*.exe

View File

@ -9,6 +9,7 @@ on:
jobs: jobs:
get-update-version: get-update-version:
runs-on: ubuntu-latest runs-on: ubuntu-latest
environment: production
outputs: outputs:
new_version: ${{ steps.version_update.outputs.new_version }} new_version: ${{ steps.version_update.outputs.new_version }}
steps: steps:
@ -29,7 +30,7 @@ jobs:
local max_retries=3 local max_retries=3
local tag local tag
while [ $retries -lt $max_retries ]; do while [ $retries -lt $max_retries ]; do
tag=$(curl -s https://api.github.com/repos/janhq/jan/releases/latest | jq -r .tag_name) tag=$(curl -s https://api.github.com/repos/menloresearch/jan/releases/latest | jq -r .tag_name)
if [ -n "$tag" ] && [ "$tag" != "null" ]; then if [ -n "$tag" ] && [ "$tag" != "null" ]; then
echo $tag echo $tag
return return
@ -45,10 +46,7 @@ jobs:
if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then
echo "Tag detected, set output follow tag" echo "Tag detected, set output follow tag"
sanitized_tag="${{ steps.tag.outputs.tag }}" echo "::set-output name=new_version::${{ steps.tag.outputs.tag }}"
# Remove the 'v' prefix if it exists
sanitized_tag="${sanitized_tag#v}"
echo "::set-output name=new_version::$sanitized_tag"
else else
# Get the latest release tag from GitHub API # Get the latest release tag from GitHub API
LATEST_TAG=$(get_latest_tag) LATEST_TAG=$(get_latest_tag)

View File

@ -26,6 +26,7 @@ on:
jobs: jobs:
noti-discord-and-update-url-readme: noti-discord-and-update-url-readme:
environment: production
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
contents: write contents: write
@ -46,10 +47,10 @@ jobs:
with: with:
args: | args: |
Jan App ${{ inputs.build_reason }} build artifact version {{ VERSION }}: Jan App ${{ inputs.build_reason }} build artifact version {{ VERSION }}:
- Windows: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_x64-setup.exe - Windows: https://delta.jan.ai/nightly/jan-nightly-win-x64-{{ VERSION }}.exe
- macOS Universal: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_universal.dmg - macOS Universal: https://delta.jan.ai/nightly/jan-nightly-mac-universal-{{ VERSION }}.dmg
- Linux Deb: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_amd64.deb - Linux Deb: https://delta.jan.ai/nightly/jan-nightly-linux-amd64-{{ VERSION }}.deb
- Linux AppImage: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_amd64.AppImage - Linux AppImage: https://delta.jan.ai/nightly/jan-nightly-linux-x86_64-{{ VERSION }}.AppImage
- Github action run: https://github.com/janhq/jan/actions/runs/{{ GITHUB_RUN_ID }} - Github action run: https://github.com/menloresearch/jan/actions/runs/{{ GITHUB_RUN_ID }}
env: env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }} DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}

View File

@ -1,138 +0,0 @@
name: tauri-build-linux-x64-external
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
new_version:
required: true
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
disable_updater:
required: false
type: boolean
default: false
description: 'If true, builds both .deb and .appimage but disables auto-updater'
jobs:
build-linux-x64-external:
runs-on: ubuntu-latest
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Free Disk Space Before Build
run: |
echo "Disk space before cleanup:"
df -h
sudo rm -rf /usr/local/.ghcup
sudo rm -rf /opt/hostedtoolcache/CodeQL
sudo rm -rf /usr/local/lib/android/sdk/ndk
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf /usr/local/share/boost
sudo apt-get clean
echo "Disk space after cleanup:"
df -h
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Install Tauri dependencies
run: |
sudo apt update
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2 libayatana-appindicator3-dev
- name: Update app version
run: |
echo "Version: ${{ inputs.new_version }}"
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
fi
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
mv /tmp/package.json web-app/package.json
# Update tauri plugin versions
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/Cargo.toml---------"
cat ./src-tauri/Cargo.toml
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
fi
- name: Build app
run: |
make build
env:
RELEASE_CHANNEL: '${{ inputs.channel }}'
AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }}
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-deb
path: ./src-tauri/target/release/bundle/deb/*.deb
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
path: ./src-tauri/target/release/bundle/appimage/*.AppImage

View File

@ -1,185 +0,0 @@
name: tauri-build-linux-x64-flatpak
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ''
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
disable_updater:
required: false
type: boolean
default: false
description: 'If true, builds both .deb and .appimage but disables auto-updater'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
jobs:
build-linux-x64:
runs-on: ubuntu-22.04
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Free Disk Space Before Build
run: |
echo "Disk space before cleanup:"
df -h
sudo rm -rf /usr/local/.ghcup
sudo rm -rf /opt/hostedtoolcache/CodeQL
sudo rm -rf /usr/local/lib/android/sdk/ndk
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf /usr/local/share/boost
sudo apt-get clean
echo "Disk space after cleanup:"
df -h
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Install Tauri dependencies
run: |
sudo apt update
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2
- name: Update app version base public_provider
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
fi
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
mv /tmp/package.json web-app/package.json
# Update tauri plugin versions
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/Cargo.toml---------"
cat ./src-tauri/Cargo.toml
# Temporarily enable devtool on prod build
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
cat ./src-tauri/Cargo.toml
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
fi
- name: Build app
run: |
make build
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep AppImage | head -1)
yarn tauri signer sign \
--private-key "$TAURI_SIGNING_PRIVATE_KEY" \
--password "$TAURI_SIGNING_PRIVATE_KEY_PASSWORD" \
"$APP_IMAGE"
env:
RELEASE_CHANNEL: '${{ inputs.channel }}'
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }}
# Publish app
## Artifacts, for dev and test
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-flatpak-${{ inputs.new_version }}-deb
path: ./src-tauri/target/release/bundle/deb/*.deb
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-flatpak-${{ inputs.new_version }}-AppImage
path: ./src-tauri/target/release/bundle/appimage/*.AppImage

View File

@ -1,264 +0,0 @@
name: tauri-build-linux-x64
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ''
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
disable_updater:
required: false
type: boolean
default: false
description: 'If true, builds both .deb and .appimage but disables auto-updater'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
outputs:
DEB_SIG:
value: ${{ jobs.build-linux-x64.outputs.DEB_SIG }}
APPIMAGE_SIG:
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_SIG }}
APPIMAGE_FILE_NAME:
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}
jobs:
build-linux-x64:
runs-on: ubuntu-latest
outputs:
DEB_SIG: ${{ steps.packageinfo.outputs.DEB_SIG }}
APPIMAGE_SIG: ${{ steps.packageinfo.outputs.APPIMAGE_SIG }}
APPIMAGE_FILE_NAME: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Free Disk Space Before Build
run: |
echo "Disk space before cleanup:"
df -h
sudo rm -rf /usr/local/.ghcup
sudo rm -rf /opt/hostedtoolcache/CodeQL
sudo rm -rf /usr/local/lib/android/sdk/ndk
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf /usr/local/share/boost
sudo apt-get clean
echo "Disk space after cleanup:"
df -h
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Install Tauri dependencies
run: |
sudo apt update
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2 libayatana-appindicator3-dev
- name: Update app version base public_provider
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
fi
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
mv /tmp/package.json web-app/package.json
# Update tauri plugin versions
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/Cargo.toml---------"
cat ./src-tauri/Cargo.toml
# Temporarily enable devtool on prod build
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
cat ./src-tauri/Cargo.toml
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
fi
- name: Build app
run: |
make build
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep AppImage | head -1)
yarn tauri signer sign \
--private-key "$TAURI_SIGNING_PRIVATE_KEY" \
--password "$TAURI_SIGNING_PRIVATE_KEY_PASSWORD" \
"$APP_IMAGE"
env:
RELEASE_CHANNEL: '${{ inputs.channel }}'
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }}
# Publish app
## Artifacts, for dev and test
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-deb
path: ./src-tauri/target/release/bundle/deb/*.deb
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
## Set output filename for linux
- name: Set output filename for linux
id: packageinfo
run: |
cd ./src-tauri/target/release/bundle
if [ "${{ inputs.channel }}" != "stable" ]; then
DEB_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
APPIMAGE_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
DEB_SIG=$(cat deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig)
APPIMAGE_SIG=$(cat appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig)
else
DEB_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.deb
APPIMAGE_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.AppImage
DEB_SIG=$(cat deb/Jan_${{ inputs.new_version }}_amd64.deb.sig)
APPIMAGE_SIG=$(cat appimage/Jan_${{ inputs.new_version }}_amd64.AppImage.sig)
fi
echo "DEB_SIG=$DEB_SIG" >> $GITHUB_OUTPUT
echo "APPIMAGE_SIG=$APPIMAGE_SIG" >> $GITHUB_OUTPUT
echo "DEB_FILE_NAME=$DEB_FILE_NAME" >> $GITHUB_OUTPUT
echo "APPIMAGE_FILE_NAME=$APPIMAGE_FILE_NAME" >> $GITHUB_OUTPUT
## Upload to s3 for nightly and beta
- name: upload to aws s3 if public provider is aws
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
run: |
cd ./src-tauri/target/release/bundle
# Upload for tauri updater
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: 'true'
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/appimage/${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
asset_name: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
asset_content_type: application/octet-stream
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/deb/${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
asset_name: ${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
asset_content_type: application/octet-stream

View File

@ -1,103 +0,0 @@
name: tauri-build-macos-external
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
new_version:
required: true
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
jobs:
build-macos-external:
runs-on: macos-latest
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Update app version
run: |
echo "Version: ${{ inputs.new_version }}"
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
mv /tmp/package.json web-app/package.json
# Update tauri plugin versions
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/Cargo.toml---------"
cat ./src-tauri/Cargo.toml
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
fi
- name: Build app
run: |
make build
env:
APP_PATH: '.'
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.dmg
path: |
./src-tauri/target/universal-apple-darwin/release/bundle/dmg/*.dmg

View File

@ -1,264 +0,0 @@
name: tauri-build-macos
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ''
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
CODE_SIGN_P12_BASE64:
required: false
CODE_SIGN_P12_PASSWORD:
required: false
APPLE_ID:
required: false
APPLE_APP_SPECIFIC_PASSWORD:
required: false
DEVELOPER_ID:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
outputs:
MAC_UNIVERSAL_SIG:
value: ${{ jobs.build-macos.outputs.MAC_UNIVERSAL_SIG }}
FILE_NAME:
value: ${{ jobs.build-macos.outputs.FILE_NAME }}
DMG_NAME:
value: ${{ jobs.build-macos.outputs.DMG_NAME }}
TAR_NAME:
value: ${{ jobs.build-macos.outputs.TAR_NAME }}
jobs:
build-macos:
runs-on: macos-latest
outputs:
MAC_UNIVERSAL_SIG: ${{ steps.metadata.outputs.MAC_UNIVERSAL_SIG }}
FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }}
DMG_NAME: ${{ steps.metadata.outputs.DMG_NAME }}
TAR_NAME: ${{ steps.metadata.outputs.TAR_NAME }}
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Update app version based on latest release tag with build number
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
mv /tmp/package.json web-app/package.json
# Update tauri plugin versions
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/Cargo.toml---------"
cat ./src-tauri/Cargo.toml
# Temporarily enable devtool on prod build
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
cat ./src-tauri/Cargo.toml
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
fi
- name: Get key for notarize
run: base64 -d <<< "$NOTARIZE_P8_BASE64" > /tmp/notary-key.p8
shell: bash
env:
NOTARIZE_P8_BASE64: ${{ secrets.NOTARIZE_P8_BASE64 }}
- uses: apple-actions/import-codesign-certs@v2
continue-on-error: true
with:
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
- name: Build app
run: |
make build
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
APP_PATH: '.'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
APPLE_CERTIFICATE: ${{ secrets.CODE_SIGN_P12_BASE64 }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
APPLE_API_ISSUER: ${{ secrets.NOTARY_ISSUER }}
APPLE_API_KEY: ${{ secrets.NOTARY_KEY_ID }}
APPLE_API_KEY_PATH: /tmp/notary-key.p8
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
# Publish app
## Artifacts, for dev and test
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.dmg
path: |
./src-tauri/target/universal-apple-darwin/release/bundle/dmg/*.dmg
## Set output filename for mac
- name: Set output filename for mac
run: |
cd ./src-tauri/target/universal-apple-darwin/release/bundle/macos
if [ "${{ inputs.channel }}" != "stable" ]; then
zip -r jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip Jan-${{ inputs.channel }}.app
FILE_NAME=jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip
DMG_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
MAC_UNIVERSAL_SIG=$(cat Jan-${{ inputs.channel }}.app.tar.gz.sig)
TAR_NAME=Jan-${{ inputs.channel }}.app.tar.gz
else
zip -r jan-mac-universal-${{ inputs.new_version }}.zip Jan.app
FILE_NAME=jan-mac-universal-${{ inputs.new_version }}.zip
MAC_UNIVERSAL_SIG=$(cat Jan.app.tar.gz.sig)
DMG_NAME=Jan_${{ inputs.new_version }}_universal.dmg
TAR_NAME=Jan.app.tar.gz
fi
echo "::set-output name=MAC_UNIVERSAL_SIG::$MAC_UNIVERSAL_SIG"
echo "::set-output name=FILE_NAME::$FILE_NAME"
echo "::set-output name=DMG_NAME::$DMG_NAME"
echo "::set-output name=TAR_NAME::$TAR_NAME"
id: metadata
## Upload to s3 for nightly and beta
- name: upload to aws s3 if public provider is aws
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
run: |
cd ./src-tauri/target/universal-apple-darwin/release/bundle
# Upload for tauri updater
aws s3 cp ./dmg/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz.sig
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: 'true'
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
asset_content_type: application/gzip
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/dmg/${{ steps.metadata.outputs.DMG_NAME }}
asset_name: ${{ steps.metadata.outputs.DMG_NAME }}
asset_content_type: application/octet-stream
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.TAR_NAME }}
asset_name: ${{ steps.metadata.outputs.TAR_NAME }}
asset_content_type: application/gzip

View File

@ -1,156 +0,0 @@
name: tauri-build-windows-x64-external
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
new_version:
required: true
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
jobs:
build-windows-x64-external:
runs-on: windows-latest
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Update app version
shell: bash
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
jq '.bundle.windows.signCommand = "echo External build - skipping signature: %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
mv /tmp/package.json web-app/package.json
# Update tauri plugin versions
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/Cargo.toml---------"
cat ./src-tauri/Cargo.toml
generate_build_version() {
### Examble
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
local new_version="$1"
local base_version
local t_value
# Check if it has a "-"
if [[ "$new_version" == *-* ]]; then
base_version="${new_version%%-*}" # part before -
suffix="${new_version#*-}" # part after -
# Check if it is rcX-beta
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
t_value="${BASH_REMATCH[1]}"
else
t_value="$suffix"
fi
else
base_version="$new_version"
t_value="0"
fi
# Export two values
new_base_version="$base_version"
new_build_version="${base_version}.${t_value}"
}
generate_build_version ${{ inputs.new_version }}
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
# Update product name
jq --arg name "Jan-${{ inputs.channel }}" '.productName = $name' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
echo "---------tauri.conf.json---------"
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
else
sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
fi
echo "---------nsis.template---------"
cat ./src-tauri/tauri.bundle.windows.nsis.template
- name: Build app
shell: bash
run: |
make build
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-windows-${{ inputs.new_version }}
path: |
./src-tauri/target/release/bundle/nsis/*.exe

View File

@ -1,291 +0,0 @@
name: tauri-build-windows-x64
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ''
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
AZURE_KEY_VAULT_URI:
required: false
AZURE_CLIENT_ID:
required: false
AZURE_TENANT_ID:
required: false
AZURE_CLIENT_SECRET:
required: false
AZURE_CERT_NAME:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
outputs:
WIN_SIG:
value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }}
FILE_NAME:
value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }}
MSI_FILE_NAME:
value: ${{ jobs.build-windows-x64.outputs.MSI_FILE_NAME }}
jobs:
build-windows-x64:
runs-on: windows-latest
outputs:
WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }}
FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }}
MSI_FILE_NAME: ${{ steps.metadata.outputs.MSI_FILE_NAME }}
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Update app version base on tag
id: version_update
shell: bash
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
mv /tmp/package.json web-app/package.json
# Add sign commands to tauri.windows.conf.json
jq '.bundle.windows.signCommand = "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
# Update tauri plugin versions
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------./src-tauri/Cargo.toml---------"
cat ./src-tauri/Cargo.toml
generate_build_version() {
### Example
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
local new_version="$1"
local base_version
local t_value
# Check if it has a "-"
if [[ "$new_version" == *-* ]]; then
base_version="${new_version%%-*}" # part before -
suffix="${new_version#*-}" # part after -
# Check if it is rcX-beta
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
t_value="${BASH_REMATCH[1]}"
else
t_value="$suffix"
fi
else
base_version="$new_version"
t_value="0"
fi
# Export two values
new_base_version="$base_version"
new_build_version="${base_version}.${t_value}"
}
generate_build_version ${{ inputs.new_version }}
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
echo "---------tauri.windows.conf.json---------"
cat ./src-tauri/tauri.windows.conf.json
# Temporarily enable devtool on prod build
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
cat ./src-tauri/Cargo.toml
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
# Update updater endpoint
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
# Update product name
jq --arg name "Jan-${{ inputs.channel }}" '.productName = $name' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
echo "---------tauri.conf.json---------"
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
else
sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
fi
echo "---------nsis.template---------"
cat ./src-tauri/tauri.bundle.windows.nsis.template
- name: Install AzureSignTool
run: |
dotnet tool install --global --version 6.0.0 AzureSignTool
- name: Build app
shell: bash
run: |
make build
env:
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-windows-exe-${{ inputs.new_version }}
path: |
./src-tauri/target/release/bundle/nsis/*.exe
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-windows-msi-${{ inputs.new_version }}
path: |
./src-tauri/target/release/bundle/msi/*.msi
## Set output filename for windows
- name: Set output filename for windows
shell: bash
run: |
cd ./src-tauri/target/release/bundle/nsis
if [ "${{ inputs.channel }}" != "stable" ]; then
FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe
WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig)
MSI_FILE="Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64_en-US.msi"
else
FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe
WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig)
MSI_FILE="Jan_${{ inputs.new_version }}_x64_en-US.msi"
fi
echo "::set-output name=WIN_SIG::$WIN_SIG"
echo "::set-output name=FILE_NAME::$FILE_NAME"
echo "::set-output name=MSI_FILE_NAME::$MSI_FILE"
id: metadata
## Upload to s3 for nightly and beta
- name: upload to aws s3 if public provider is aws
shell: bash
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
run: |
cd ./src-tauri/target/release/bundle/nsis
# Upload for tauri updater
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }}.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}.sig
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: 'true'
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
asset_content_type: application/octet-stream

84
.gitignore vendored
View File

@ -1,65 +1,53 @@
.vscode
.idea
.env .env
.idea
# Jan inference
error.log error.log
node_modules node_modules
*.tgz *.tgz
!charts/server/charts/*.tgz
dist dist
build build
.DS_Store .DS_Store
electron/renderer
electron/models
electron/docs
electron/engines
electron/themes
electron/playwright-report
server/pre-install
package-lock.json package-lock.json
coverage coverage
*.log *.log
core/lib/** core/lib/**
# Nitro binary files
extensions/*-extension/bin/*/nitro
extensions/*-extension/bin/*/*.metal
extensions/*-extension/bin/*/*.exe
extensions/*-extension/bin/*/*.dll
extensions/*-extension/bin/*/*.exp
extensions/*-extension/bin/*/*.lib
extensions/*-extension/bin/saved-*
extensions/*-extension/bin/*.tar.gz
extensions/*-extension/bin/vulkaninfoSDK.exe
extensions/*-extension/bin/vulkaninfo
# Turborepo
.turbo
electron/test-data
electron/test-results
core/test_results.html
coverage
.yarn .yarn
.yarnrc .yarnrc
*.tsbuildinfo
test_results.html test_results.html
pre-install *.tsbuildinfo
electron/shared/**
# docs # docs
docs/yarn.lock docs/yarn.lock
src-tauri/resources/lib electron/.version.bak
src-tauri/icons
!src-tauri/icons/icon.png
src-tauri/gen/apple
src-tauri/gen/android
src-tauri/resources/bin
# Helper tools
.opencode
OpenCode.md
Claude.md
archive/
.cache/
# auto qa
autoqa/trajectories
autoqa/recordings
autoqa/__pycache__
# Astro / Starlight specific
website/dist/
website/.astro/
website/src/content/config.ts.timestamp-*
# Nextra specific
docs/out/
docs/.next/
# General Node.js
**/node_modules
**/.env
**/.env.*
**/npm-debug.log*
**/yarn-debug.log*
**/yarn-error.log*
**/pnpm-debug.log*
## cargo
target
Cargo.lock
src-tauri/resources/
## test
test-data
llm-docs
.claude/agents

View File

@ -1,252 +1,32 @@
# Contributing to Jan # Contributing to jan
First off, thank you for considering contributing to Jan. It's people like you that make Jan such an amazing project. First off, thank you for considering contributing to jan. It's people like you that make jan such an amazing project.
Jan is an AI assistant that can run 100% offline on your device. Think ChatGPT, but private, local, and under your complete control. If you're thinking about contributing, you're already awesome - let's make AI accessible to everyone, one commit at a time.
## Quick Links to Component Guides
- **[Web App](./web-app/CONTRIBUTING.md)** - React UI and logic
- **[Core SDK](./core/CONTRIBUTING.md)** - TypeScript SDK and extension system
- **[Extensions](./extensions/CONTRIBUTING.md)** - Supportive modules for the frontend
- **[Tauri Backend](./src-tauri/CONTRIBUTING.md)** - Rust native integration
- **[Tauri Plugins](./src-tauri/plugins/CONTRIBUTING.md)** - Hardware and system plugins
## How Jan Actually Works
Jan is a desktop app that runs local AI models. Here's how the components actually connect:
```
┌──────────────────────────────────────────────────────────┐
│ Web App (Frontend) │
│ (web-app/) │
│ • React UI │
│ • Chat Interface │
│ • Settings Pages │
│ • Model Hub │
└────────────┬─────────────────────────────┬───────────────┘
│ │
│ imports │ imports
▼ ▼
┌──────────────────────┐ ┌──────────────────────┐
│ Core SDK │ │ Extensions │
│ (core/) │ │ (extensions/) │
│ │ │ │
│ • TypeScript APIs │◄─────│ • Assistant Mgmt │
│ • Extension System │ uses │ • Conversations │
│ • Event Bus │ │ • Downloads │
│ • Type Definitions │ │ • LlamaCPP │
└──────────┬───────────┘ └───────────┬──────────┘
│ │
│ ┌──────────────────────┐ │
│ │ Web App │ │
│ └──────────┬───────────┘ │
│ │ │
└──────────────┼───────────────┘
Tauri IPC
(invoke commands)
┌───────────────────────────────────────────────────────────┐
│ Tauri Backend (Rust) │
│ (src-tauri/) │
│ │
│ • Window Management • File System Access │
│ • Process Control • System Integration │
│ • IPC Command Handler • Security & Permissions │
└───────────────────────────┬───────────────────────────────┘
┌───────────────────────────────────────────────────────────┐
│ Tauri Plugins (Rust) │
│ (src-tauri/plugins/) │
│ │
│ ┌──────────────────┐ ┌──────────────────┐ │
│ │ Hardware Plugin │ │ LlamaCPP Plugin │ │
│ │ │ │ │ │
│ │ • CPU/GPU Info │ │ • Process Mgmt │ │
│ │ • Memory Stats │ │ • Model Loading │ │
│ │ • System Info │ │ • Inference │ │
│ └──────────────────┘ └──────────────────┘ │
└───────────────────────────────────────────────────────────┘
```
### The Communication Flow
1. **JavaScript Layer Relationships**:
- Web App imports Core SDK and Extensions as JavaScript modules
- Extensions use Core SDK for shared functionality
- All run in the browser/webview context
2. **All Three → Backend**: Through Tauri IPC
- **Web App** → Backend: `await invoke('app_command', data)`
- **Core SDK** → Backend: `await invoke('core_command', data)`
- **Extensions** → Backend: `await invoke('ext_command', data)`
- Each component can independently call backend commands
3. **Backend → Plugins**: Native Rust integration
- Backend loads plugins as Rust libraries
- Direct function calls, no IPC overhead
4. **Response Flow**:
- Plugin → Backend → IPC → Requester (Web App/Core/Extension) → UI updates
### Real-World Example: Loading a Model
Here's what actually happens when you click "Download Llama 3":
1. **Web App** (`web-app/`) - User clicks download button
2. **Extension** (`extensions/download-extension`) - Handles the download logic
3. **Tauri Backend** (`src-tauri/`) - Actually downloads the file to disk
4. **Extension** (`extensions/llamacpp-extension`) - Prepares model for loading
5. **Tauri Plugin** (`src-tauri/plugins/llamacpp`) - Starts llama.cpp process
6. **Hardware Plugin** (`src-tauri/plugins/hardware`) - Detects GPU, optimizes settings
7. **Model ready!** - User can start chatting
## Project Structure
```
jan/
├── web-app/ # React frontend (what users see)
├── src-tauri/ # Rust backend (system integration)
│ ├── src/core/ # Core Tauri commands
│ └── plugins/ # Tauri plugins (hardware, llamacpp)
├── core/ # TypeScript SDK (API layer)
├── extensions/ # JavaScript extensions
│ ├── assistant-extension/
│ ├── conversational-extension/
│ ├── download-extension/
│ └── llamacpp-extension/
├── docs/ # Documentation website
├── website/ # Marketing website
├── autoqa/ # Automated testing
├── scripts/ # Build utilities
├── package.json # Root workspace configuration
├── Makefile # Build automation commands
├── LICENSE # Apache 2.0 license
└── README.md # Project overview
```
## Development Setup
### The Scenic Route (Build from Source)
**Prerequisites:**
- Node.js ≥ 20.0.0
- Yarn ≥ 1.22.0
- Rust (for Tauri)
- Make ≥ 3.81
**Option 1: The Easy Way (Make)**
```bash
git clone https://github.com/janhq/jan
cd jan
make dev
```
## How Can I Contribute? ## How Can I Contribute?
### Reporting Bugs ### Reporting Bugs
- **Ensure the bug was not already reported** by searching on GitHub under [Issues](https://github.com/janhq/jan/issues) - **Ensure the bug was not already reported** by searching on GitHub under [Issues](https://github.com/menloresearch/jan/issues).
- If you're unable to find an open issue addressing the problem, [open a new one](https://github.com/janhq/jan/issues/new) - If you're unable to find an open issue addressing the problem, [open a new one](https://github.com/menloresearch/jan/issues/new).
- Include your system specs and error logs - it helps a ton
### Suggesting Enhancements ### Suggesting Enhancements
- Open a new issue with a clear title and description - Open a new issue with a clear title and description.
- Explain why this enhancement would be useful
- Include mockups or examples if you can
### Your First Code Contribution ### Your First Code Contribution
**Choose Your Adventure:** - Fork the repo.
- **Frontend UI and logic**`web-app/` - Create a new branch (`git checkout -b feature-name`).
- **Shared API declarations**`core/` - Commit your changes (`git commit -am 'Add some feature'`).
- **Backend system integration**`src-tauri/` - Push to the branch (`git push origin feature-name`).
- **Business logic features**`extensions/` - Open a new Pull Request.
- **Dedicated backend handler**`src-tauri/plugins/`
**The Process:** ## Styleguides
1. Fork the repo
2. Create a new branch (`git checkout -b feature-name`)
3. Make your changes (and write tests!)
4. Commit your changes (`git commit -am 'Add some feature'`)
5. Push to the branch (`git push origin feature-name`)
6. Open a new Pull Request against `dev` branch
## Testing ### Git Commit Messages
```bash - Use the present tense ("Add feature" not "Added feature").
yarn test # All tests
cd src-tauri && cargo test # Rust tests
cd autoqa && python main.py # End-to-end tests
```
## Code Standards
### TypeScript/JavaScript
- TypeScript required (we're not animals)
- ESLint + Prettier
- Functional React components
- Proper typing (no `any` - seriously!)
### Rust
- `cargo fmt` + `cargo clippy`
- `Result<T, E>` for error handling
- Document public APIs
## Git Conventions
### Branches
- `main` - stable releases
- `dev` - development (target this for PRs)
- `feature/*` - new features
- `fix/*` - bug fixes
### Commit Messages
- Use the present tense ("Add feature" not "Added feature")
- Be descriptive but concise
- Reference issues when applicable
Examples:
```
feat: add support for Qwen models
fix: resolve memory leak in model loading
docs: update installation instructions
```
## Troubleshooting
If things go sideways:
1. **Check our [troubleshooting docs](https://jan.ai/docs/troubleshooting)**
2. **Clear everything and start fresh:** `make clean` then `make dev`
3. **Copy your error logs and system specs**
4. **Ask for help in our [Discord](https://discord.gg/FTk2MvZwJH)** `#🆘|jan-help` channel
Common issues:
- **Build failures**: Check Node.js and Rust versions
- **Extension not loading**: Verify it's properly registered
- **Model not working**: Check hardware requirements and GPU drivers
## Getting Help
- [Documentation](https://jan.ai/docs) - The manual you should read
- [Discord Community](https://discord.gg/jan) - Where the community lives
- [GitHub Issues](https://github.com/janhq/jan/issues) - Report bugs here
- [GitHub Discussions](https://github.com/janhq/jan/discussions) - Ask questions
## License
Apache 2.0 - Because sharing is caring. See [LICENSE](./LICENSE) for the legal stuff.
## Additional Notes ## Additional Notes
We're building something pretty cool here - an AI assistant that respects your privacy and runs entirely on your machine. Every contribution, no matter how small, helps make AI more accessible to everyone. Thank you for contributing to jan!
Thanks for being part of the journey. Let's build the future of local AI together! 🚀

View File

@ -1,50 +0,0 @@
# Stage 1: Build stage with Node.js and Yarn v4
FROM node:20-alpine AS builder
ARG MENLO_PLATFORM_BASE_URL=https://api-dev.menlo.ai/v1
ENV MENLO_PLATFORM_BASE_URL=$MENLO_PLATFORM_BASE_URL
# Install build dependencies
RUN apk add --no-cache \
make \
g++ \
python3 \
py3-pip \
git
# Enable corepack and install Yarn 4
RUN corepack enable && corepack prepare yarn@4.5.3 --activate
# Verify Yarn version
RUN yarn --version
# Set working directory
WORKDIR /app
# Copy source code
COPY ./extensions-web ./extensions-web
COPY ./web-app ./web-app
COPY ./Makefile ./Makefile
COPY ./.* /
COPY ./package.json ./package.json
COPY ./yarn.lock ./yarn.lock
COPY ./pre-install ./pre-install
COPY ./core ./core
# Build web application
RUN yarn install && yarn build:core && make build-web-app
# Stage 2: Production stage with Nginx
FROM nginx:alpine
# Copy static files from build stage
COPY --from=builder /app/web-app/dist-web /usr/share/nginx/html
# Copy custom nginx config
COPY nginx.conf /etc/nginx/conf.d/default.conf
# Expose port 80
EXPOSE 80
# Start nginx
CMD ["nginx", "-g", "daemon off;"]

667
LICENSE
View File

@ -1,19 +1,660 @@
Jan # GNU AFFERO GENERAL PUBLIC LICENSE
Copyright 2025 Menlo Research Version 3, 19 November 2007
This product includes software developed by Menlo Research (https://menlo.ai). Copyright (C) 2007 Free Software Foundation, Inc.
<https://fsf.org/>
Licensed under the Apache License, Version 2.0 (the "License"); Everyone is permitted to copy and distribute verbatim copies of this
You may not use this file except in compliance with the License. license document, but changing it is not allowed.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0 ## Preamble
Unless required by applicable law or agreed to in writing, software The GNU Affero General Public License is a free, copyleft license for
distributed under the License is distributed on an "AS IS" BASIS, software and other kinds of works, specifically designed to ensure
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. cooperation with the community in the case of network server software.
See the License for the specific language governing permissions and
limitations under the License.
Attribution is requested in user-facing documentation and materials, where appropriate. The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains
free software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing
under this license.
The precise terms and conditions for copying, distribution and
modification follow.
## TERMS AND CONDITIONS
### 0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public
License.
"Copyright" also means copyright-like laws that apply to other kinds
of works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of
an exact copy. The resulting work is called a "modified version" of
the earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user
through a computer network, with no transfer of a copy, is not
conveying.
An interactive user interface displays "Appropriate Legal Notices" to
the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
### 1. Source Code.
The "source code" for a work means the preferred form of the work for
making modifications to it. "Object code" means any non-source form of
a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users can
regenerate automatically from other parts of the Corresponding Source.
The Corresponding Source for a work in source code form is that same
work.
### 2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not convey,
without conditions so long as your license otherwise remains in force.
You may convey covered works to others for the sole purpose of having
them make modifications exclusively for you, or provide you with
facilities for running those works, provided that you comply with the
terms of this License in conveying all material for which you do not
control copyright. Those thus making or running the covered works for
you must do so exclusively on your behalf, under your direction and
control, on terms that prohibit them from making any copies of your
copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under the
conditions stated below. Sublicensing is not allowed; section 10 makes
it unnecessary.
### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such
circumvention is effected by exercising rights under this License with
respect to the covered work, and you disclaim any intention to limit
operation or modification of the work as a means of enforcing, against
the work's users, your or third parties' legal rights to forbid
circumvention of technological measures.
### 4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
### 5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these
conditions:
- a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
- b) The work must carry prominent notices stating that it is
released under this License and any conditions added under
section 7. This requirement modifies the requirement in section 4
to "keep intact all notices".
- c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
- d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
### 6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms of
sections 4 and 5, provided that you also convey the machine-readable
Corresponding Source under the terms of this License, in one of these
ways:
- a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
- b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the Corresponding
Source from a network server at no charge.
- c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
- d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
- e) Convey the object code using peer-to-peer transmission,
provided you inform other peers where the object code and
Corresponding Source of the work are being offered to the general
public at no charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal,
family, or household purposes, or (2) anything designed or sold for
incorporation into a dwelling. In determining whether a product is a
consumer product, doubtful cases shall be resolved in favor of
coverage. For a particular product received by a particular user,
"normally used" refers to a typical or common use of that class of
product, regardless of the status of the particular user or of the way
in which the particular user actually uses, or expects or is expected
to use, the product. A product is a consumer product regardless of
whether the product has substantial commercial, industrial or
non-consumer uses, unless such uses represent the only significant
mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to
install and execute modified versions of a covered work in that User
Product from a modified version of its Corresponding Source. The
information must suffice to ensure that the continued functioning of
the modified object code is in no case prevented or interfered with
solely because modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or
updates for a work that has been modified or installed by the
recipient, or for the User Product in which it has been modified or
installed. Access to a network may be denied when the modification
itself materially and adversely affects the operation of the network
or violates the rules and protocols for communication across the
network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
### 7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders
of that material) supplement the terms of this License with terms:
- a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
- b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
- c) Prohibiting misrepresentation of the origin of that material,
or requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
- d) Limiting the use for publicity purposes of names of licensors
or authors of the material; or
- e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
- f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions
of it) with contractual assumptions of liability to the recipient,
for any liability that these contractual assumptions directly
impose on those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions; the
above requirements apply either way.
### 8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your license
from a particular copyright holder is reinstated (a) provisionally,
unless and until the copyright holder explicitly and finally
terminates your license, and (b) permanently, if the copyright holder
fails to notify you of the violation by some reasonable means prior to
60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
### 9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or run
a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
### 10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
### 11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims owned
or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within the
scope of its coverage, prohibits the exercise of, or is conditioned on
the non-exercise of one or more of the rights that are specifically
granted under this License. You may not convey a covered work if you
are a party to an arrangement with a third party that is in the
business of distributing software, under which you make payment to the
third party based on the extent of your activity of conveying the
work, and under which the third party grants, to any of the parties
who would receive the covered work from you, a discriminatory patent
license (a) in connection with copies of the covered work conveyed by
you (or copies made from those copies), or (b) primarily for and in
connection with specific products or compilations that contain the
covered work, unless you entered into that arrangement, or that patent
license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
### 12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under
this License and any other pertinent obligations, then as a
consequence you may not convey it at all. For example, if you agree to
terms that obligate you to collect a royalty for further conveying
from those to whom you convey the Program, the only way you could
satisfy both those terms and this License would be to refrain entirely
from conveying the Program.
### 13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your
version supports such interaction) an opportunity to receive the
Corresponding Source of your version by providing access to the
Corresponding Source from a network server at no charge, through some
standard or customary means of facilitating copying of software. This
Corresponding Source shall include the Corresponding Source for any
work covered by version 3 of the GNU General Public License that is
incorporated pursuant to the following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
### 14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Affero General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever
published by the Free Software Foundation.
If the Program specifies that a proxy can decide which future versions
of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
### 15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT
WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND
PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
CORRECTION.
### 16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
### 17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
## How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these
terms.
To do so, attach the following notices to the program. It is safest to
attach them to the start of each source file to most effectively state
the exclusion of warranty; and each file should have at least the
"copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper
mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for
the specific requirements.
You should also get your employer (if you work as a programmer) or
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. For more information on this, and how to apply and follow
the GNU AGPL, see <https://www.gnu.org/licenses/>.

196
Makefile
View File

@ -22,112 +22,97 @@ config-yarn:
install-and-build: config-yarn install-and-build: config-yarn
ifeq ($(OS),Windows_NT) ifeq ($(OS),Windows_NT)
echo "skip" echo "skip"
else ifeq ($(shell uname -s),Linux)
chmod +x src-tauri/build-utils/*
endif endif
yarn install yarn install
yarn build:tauri:plugin:api yarn build:joi
yarn build:core yarn build:core
yarn build:extensions && yarn build:extensions-web yarn build:server
yarn build:extensions
# Install required Rust targets for macOS universal builds check-file-counts: install-and-build
install-rust-targets: ifeq ($(OS),Windows_NT)
ifeq ($(shell uname -s),Darwin) powershell -Command "if ((Get-ChildItem -Path pre-install -Filter *.tgz | Measure-Object | Select-Object -ExpandProperty Count) -ne (Get-ChildItem -Path extensions -Directory | Where-Object Name -like *-extension* | Measure-Object | Select-Object -ExpandProperty Count)) { Write-Host 'Number of .tgz files in pre-install does not match the number of subdirectories in extensions with package.json'; exit 1 } else { Write-Host 'Extension build successful' }"
@echo "Detected macOS, installing universal build targets..."
rustup target add x86_64-apple-darwin
rustup target add aarch64-apple-darwin
@echo "Rust targets installed successfully!"
else else
@echo "Not macOS; skipping Rust target installation." @tgz_count=$$(find pre-install -type f -name "*.tgz" | wc -l); dir_count=$$(find extensions -mindepth 1 -maxdepth 1 -type d -exec test -e '{}/package.json' \; -print | wc -l); if [ $$tgz_count -ne $$dir_count ]; then echo "Number of .tgz files in pre-install ($$tgz_count) does not match the number of subdirectories in extension ($$dir_count)"; exit 1; else echo "Extension build successful"; fi
endif endif
# Install required Rust targets for Android builds dev: check-file-counts
install-android-rust-targets:
@echo "Checking and installing Android Rust targets..."
@rustup target list --installed | grep -q "aarch64-linux-android" || rustup target add aarch64-linux-android
@rustup target list --installed | grep -q "armv7-linux-androideabi" || rustup target add armv7-linux-androideabi
@rustup target list --installed | grep -q "i686-linux-android" || rustup target add i686-linux-android
@rustup target list --installed | grep -q "x86_64-linux-android" || rustup target add x86_64-linux-android
@echo "Android Rust targets ready!"
# Install required Rust targets for iOS builds
install-ios-rust-targets:
@echo "Checking and installing iOS Rust targets..."
@rustup target list --installed | grep -q "aarch64-apple-ios" || rustup target add aarch64-apple-ios
@rustup target list --installed | grep -q "aarch64-apple-ios-sim" || rustup target add aarch64-apple-ios-sim
@rustup target list --installed | grep -q "x86_64-apple-ios" || rustup target add x86_64-apple-ios
@echo "iOS Rust targets ready!"
dev: install-and-build
yarn download:bin
yarn dev yarn dev
# Web application targets
install-web-app: config-yarn
yarn install
dev-web-app: install-web-app
yarn build:core
yarn dev:web-app
build-web-app: install-web-app
yarn build:core
yarn build:web-app
serve-web-app:
yarn serve:web-app
build-serve-web-app: build-web-app
yarn serve:web-app
# Mobile
dev-android: install-and-build install-android-rust-targets
@echo "Setting up Android development environment..."
@if [ ! -d "src-tauri/gen/android" ]; then \
echo "Android app not initialized. Initializing..."; \
yarn tauri android init; \
fi
@echo "Sourcing Android environment setup..."
@bash autoqa/scripts/setup-android-env.sh echo "Android environment ready"
@echo "Starting Android development server..."
yarn dev:android
dev-ios: install-and-build install-ios-rust-targets
@echo "Setting up iOS development environment..."
ifeq ($(shell uname -s),Darwin)
@if [ ! -d "src-tauri/gen/ios" ]; then \
echo "iOS app not initialized. Initializing..."; \
yarn tauri ios init; \
fi
@echo "Checking iOS development requirements..."
@xcrun --version > /dev/null 2>&1 || (echo "❌ Xcode command line tools not found. Install with: xcode-select --install" && exit 1)
@xcrun simctl list devices available | grep -q "iPhone\|iPad" || (echo "❌ No iOS simulators found. Install simulators through Xcode." && exit 1)
@echo "Starting iOS development server..."
yarn dev:ios
else
@echo "❌ iOS development is only supported on macOS"
@exit 1
endif
# Linting # Linting
lint: install-and-build lint: check-file-counts
yarn lint yarn lint
update-playwright-config:
ifeq ($(OS),Windows_NT)
echo -e "const RPconfig = {\n\
apiKey: '$(REPORT_PORTAL_API_KEY)',\n\
endpoint: '$(REPORT_PORTAL_URL)',\n\
project: '$(REPORT_PORTAL_PROJECT_NAME)',\n\
launch: '$(REPORT_PORTAL_LAUNCH_NAME)',\n\
attributes: [\n\
{\n\
key: 'key',\n\
value: 'value',\n\
},\n\
{\n\
value: 'value',\n\
},\n\
],\n\
description: '$(REPORT_PORTAL_DESCRIPTION)',\n\
}\n$$(cat electron/playwright.config.ts)" > electron/playwright.config.ts;
sed -i "s/^ reporter: .*/ reporter: [['@reportportal\/agent-js-playwright', RPconfig]],/" electron/playwright.config.ts
else ifeq ($(shell uname -s),Linux)
echo "const RPconfig = {\n\
apiKey: '$(REPORT_PORTAL_API_KEY)',\n\
endpoint: '$(REPORT_PORTAL_URL)',\n\
project: '$(REPORT_PORTAL_PROJECT_NAME)',\n\
launch: '$(REPORT_PORTAL_LAUNCH_NAME)',\n\
attributes: [\n\
{\n\
key: 'key',\n\
value: 'value',\n\
},\n\
{\n\
value: 'value',\n\
},\n\
],\n\
description: '$(REPORT_PORTAL_DESCRIPTION)',\n\
}\n$$(cat electron/playwright.config.ts)" > electron/playwright.config.ts;
sed -i "s/^ reporter: .*/ reporter: [['@reportportal\/agent-js-playwright', RPconfig]],/" electron/playwright.config.ts
else
echo "const RPconfig = {\n\
apiKey: '$(REPORT_PORTAL_API_KEY)',\n\
endpoint: '$(REPORT_PORTAL_URL)',\n\
project: '$(REPORT_PORTAL_PROJECT_NAME)',\n\
launch: '$(REPORT_PORTAL_LAUNCH_NAME)',\n\
attributes: [\n\
{\n\
key: 'key',\n\
value: 'value',\n\
},\n\
{\n\
value: 'value',\n\
},\n\
],\n\
description: '$(REPORT_PORTAL_DESCRIPTION)',\n\
}\n$$(cat electron/playwright.config.ts)" > electron/playwright.config.ts;
sed -i '' "s|^ reporter: .*| reporter: [['@reportportal\/agent-js-playwright', RPconfig]],|" electron/playwright.config.ts
endif
# Testing # Testing
test: lint test: lint
yarn download:bin yarn build:test
ifeq ($(OS),Windows_NT) yarn test:coverage
endif
yarn test yarn test
yarn copy:assets:tauri
yarn build:icon # Builds and publishes the app
cargo test --manifest-path src-tauri/Cargo.toml --no-default-features --features test-tauri -- --test-threads=1 build-and-publish: check-file-counts
cargo test --manifest-path src-tauri/plugins/tauri-plugin-hardware/Cargo.toml yarn build:publish
cargo test --manifest-path src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
cargo test --manifest-path src-tauri/utils/Cargo.toml
# Build # Build
build: install-and-build install-rust-targets build: check-file-counts
yarn build yarn build
clean: clean:
@ -137,8 +122,6 @@ ifeq ($(OS),Windows_NT)
-powershell -Command "Remove-Item -Recurse -Force ./pre-install/*.tgz" -powershell -Command "Remove-Item -Recurse -Force ./pre-install/*.tgz"
-powershell -Command "Remove-Item -Recurse -Force ./extensions/*/*.tgz" -powershell -Command "Remove-Item -Recurse -Force ./extensions/*/*.tgz"
-powershell -Command "Remove-Item -Recurse -Force ./electron/pre-install/*.tgz" -powershell -Command "Remove-Item -Recurse -Force ./electron/pre-install/*.tgz"
-powershell -Command "Remove-Item -Recurse -Force ./src-tauri/resources"
-powershell -Command "Remove-Item -Recurse -Force ./src-tauri/target"
-powershell -Command "if (Test-Path \"$($env:USERPROFILE)\jan\extensions\") { Remove-Item -Path \"$($env:USERPROFILE)\jan\extensions\" -Recurse -Force }" -powershell -Command "if (Test-Path \"$($env:USERPROFILE)\jan\extensions\") { Remove-Item -Path \"$($env:USERPROFILE)\jan\extensions\" -Recurse -Force }"
else ifeq ($(shell uname -s),Linux) else ifeq ($(shell uname -s),Linux)
find . -name "node_modules" -type d -prune -exec rm -rf '{}' + find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
@ -153,25 +136,20 @@ else ifeq ($(shell uname -s),Linux)
rm -rf ./pre-install/*.tgz rm -rf ./pre-install/*.tgz
rm -rf ./extensions/*/*.tgz rm -rf ./extensions/*/*.tgz
rm -rf ./electron/pre-install/*.tgz rm -rf ./electron/pre-install/*.tgz
rm -rf ./src-tauri/resources
rm -rf ./src-tauri/target
rm -rf "~/jan/extensions" rm -rf "~/jan/extensions"
rm -rf "~/.cache/jan*" rm -rf "~/.cache/jan*"
rm -rf "./.cache"
else else
find . -name "node_modules" -type d -prune -exec rm -rfv '{}' + find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
find . -name ".next" -type d -exec rm -rfv '{}' + find . -name ".next" -type d -exec rm -rf '{}' +
find . -name "dist" -type d -exec rm -rfv '{}' + find . -name "dist" -type d -exec rm -rf '{}' +
find . -name "build" -type d -exec rm -rfv '{}' + find . -name "build" -type d -exec rm -rf '{}' +
find . -name "out" -type d -exec rm -rfv '{}' + find . -name "out" -type d -exec rm -rf '{}' +
find . -name ".turbo" -type d -exec rm -rfv '{}' + find . -name ".turbo" -type d -exec rm -rf '{}' +
find . -name ".yarn" -type d -exec rm -rfv '{}' + find . -name ".yarn" -type d -exec rm -rf '{}' +
find . -name "package-lock.json" -type f -exec rm -rfv '{}' + find . -name "package-lock.json" -type f -exec rm -rf '{}' +
rm -rfv ./pre-install/*.tgz rm -rf ./pre-install/*.tgz
rm -rfv ./extensions/*/*.tgz rm -rf ./extensions/*/*.tgz
rm -rfv ./electron/pre-install/*.tgz rm -rf ./electron/pre-install/*.tgz
rm -rfv ./src-tauri/resources rm -rf ~/jan/extensions
rm -rfv ./src-tauri/target rm -rf ~/Library/Caches/jan*
rm -rfv ~/jan/extensions
rm -rfv ~/Library/Caches/jan*
endif endif

307
README.md
View File

@ -1,146 +1,259 @@
# Jan - Open-source ChatGPT replacement # Jan - Local AI Assistant
<img width="2048" height="280" alt="github jan banner" src="https://github.com/user-attachments/assets/f3f87889-c133-433b-b250-236218150d3f" /> ![Jan banner](./JanBanner.png)
<p align="center"> <p align="center">
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section --> <!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
<img alt="GitHub commit activity" src="https://img.shields.io/github/commit-activity/m/janhq/jan"/> <img alt="GitHub commit activity" src="https://img.shields.io/github/commit-activity/m/menloresearch/jan"/>
<img alt="Github Last Commit" src="https://img.shields.io/github/last-commit/janhq/jan"/> <img alt="Github Last Commit" src="https://img.shields.io/github/last-commit/menloresearch/jan"/>
<img alt="Github Contributors" src="https://img.shields.io/github/contributors/janhq/jan"/> <img alt="Github Contributors" src="https://img.shields.io/github/contributors/menloresearch/jan"/>
<img alt="GitHub closed issues" src="https://img.shields.io/github/issues-closed/janhq/jan"/> <img alt="GitHub closed issues" src="https://img.shields.io/github/issues-closed/menloresearch/jan"/>
<img alt="Discord" src="https://img.shields.io/discord/1107178041848909847?label=discord"/> <img alt="Discord" src="https://img.shields.io/discord/1107178041848909847?label=discord"/>
</p> </p>
<p align="center"> <p align="center">
<a href="https://www.jan.ai/docs/desktop">Getting Started</a> <a href="https://jan.ai/docs/quickstart">Getting Started</a>
- <a href="https://discord.gg/Exe46xPMbK">Community</a> - <a href="https://jan.ai/docs">Docs</a>
- <a href="https://jan.ai/changelog">Changelog</a> - <a href="https://github.com/menloresearch/jan/releases">Changelog</a>
- <a href="https://github.com/janhq/jan/issues">Bug reports</a> - <a href="https://github.com/menloresearch/jan/issues">Bug reports</a>
- <a href="https://discord.gg/AsJ8krTT3N">Discord</a>
</p> </p>
Jan is bringing the best of open-source AI in an easy-to-use product. Download and run LLMs with **full control** and **privacy**. <p align="center">
⚠️ <b> Jan is currently in Development</b>: Expect breaking changes and bugs!
</p>
## Installation Jan is a ChatGPT-alternative that runs 100% offline on your device. Our goal is to make it easy for a layperson to download and run LLMs and use AI with **full control** and **privacy**.
The easiest way to get started is by downloading one of the following versions for your respective operating system: Jan is powered by [Cortex](https://github.com/menloresearch/cortex.cpp), our embeddable local AI engine that runs on any hardware.
From PCs to multi-GPU clusters, Jan & Cortex supports universal architectures:
- [x] NVIDIA GPUs (fast)
- [x] Apple M-series (fast)
- [x] Apple Intel
- [x] Linux Debian
- [x] Windows x64
#### Features:
- [Model Library](https://jan.ai/docs/models/manage-models#add-models) with popular LLMs like Llama, Gemma, Mistral, or Qwen
- Connect to [Remote AI APIs](https://jan.ai/docs/remote-models/openai) like Groq and OpenRouter
- Local API Server with OpenAI-equivalent API
- [Extensions](https://jan.ai/docs/extensions) for customizing Jan
## Download
<table> <table>
<tr> <tr style="text-align:center">
<td><b>Platform</b></td> <td style="text-align:center"><b>Version Type</b></td>
<td><b>Download</b></td> <td style="text-align:center"><b>Windows</b></td>
<td style="text-align:center"><b>MacOS Universal</b></td>
<td colspan="2" style="text-align:center"><b>Linux</b></td>
</tr> </tr>
<tr> <tr style="text-align:center">
<td><b>Windows</b></td> <td style="text-align:center"><b>Stable (Recommended)</b></td>
<td><a href='https://app.jan.ai/download/latest/win-x64'>jan.exe</a></td> <td style="text-align:center">
<a href='https://app.jan.ai/download/latest/win-x64'>
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/windows.png' style="height:14px; width: 14px" />
<b>jan.exe</b>
</a>
</td>
<td style="text-align:center">
<a href='https://app.jan.ai/download/latest/mac-universal'>
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/mac.png' style="height:15px; width: 15px" />
<b>jan.dmg</b>
</a>
</td>
<td style="text-align:center">
<a href='https://app.jan.ai/download/latest/linux-amd64-deb'>
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.deb</b>
</a>
</td>
<td style="text-align:center">
<a href='https://app.jan.ai/download/latest/linux-amd64-appimage'>
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.AppImage</b>
</a>
</td>
</tr> </tr>
<tr> <tr style="text-align:center">
<td><b>macOS</b></td> <td style="text-align:center"><b>Beta (Preview)</b></td>
<td><a href='https://app.jan.ai/download/latest/mac-universal'>jan.dmg</a></td> <td style="text-align:center">
<a href='https://app.jan.ai/download/beta/win-x64'>
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/windows.png' style="height:14px; width: 14px" />
<b>jan.exe</b>
</a>
</td>
<td style="text-align:center">
<a href='https://app.jan.ai/download/beta/mac-universal'>
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/mac.png' style="height:15px; width: 15px" />
<b>jan.dmg</b>
</a>
</td>
<td style="text-align:center">
<a href='https://app.jan.ai/download/beta/linux-amd64-deb'>
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.deb</b>
</a>
</td>
<td style="text-align:center">
<a href='https://app.jan.ai/download/beta/linux-amd64-appimage'>
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.AppImage</b>
</a>
</td>
</tr> </tr>
<tr> <tr style="text-align:center">
<td><b>Linux (deb)</b></td> <td style="text-align:center"><b>Nightly Build (Experimental)</b></td>
<td><a href='https://app.jan.ai/download/latest/linux-amd64-deb'>jan.deb</a></td> <td style="text-align:center">
</tr> <a href='https://app.jan.ai/download/nightly/win-x64'>
<tr> <img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/windows.png' style="height:14px; width: 14px" />
<td><b>Linux (AppImage)</b></td> <b>jan.exe</b>
<td><a href='https://app.jan.ai/download/latest/linux-amd64-appimage'>jan.AppImage</a></td> </a>
</td>
<td style="text-align:center">
<a href='https://app.jan.ai/download/nightly/mac-universal'>
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/mac.png' style="height:15px; width: 15px" />
<b>jan.dmg</b>
</a>
</td>
<td style="text-align:center">
<a href='https://app.jan.ai/download/nightly/linux-amd64-deb'>
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.deb</b>
</a>
</td>
<td style="text-align:center">
<a href='https://app.jan.ai/download/nightly/linux-amd64-appimage'>
<img src='https://github.com/menloresearch/jan/blob/dev/docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.AppImage</b>
</a>
</td>
</tr> </tr>
</table> </table>
Download the latest version of Jan at https://jan.ai/ or visit the [GitHub Releases](https://github.com/menloresearch/jan/releases) to download any previous release.
Download from [jan.ai](https://jan.ai/) or [GitHub Releases](https://github.com/janhq/jan/releases). ## Demo
## Features https://github.com/user-attachments/assets/c3592fa2-c504-4d9d-a885-7e00122a50f3
- **Local AI Models**: Download and run LLMs (Llama, Gemma, Qwen, GPT-oss etc.) from HuggingFace _Real-time Video: Jan v0.5.7 on a Mac M2, 16GB Sonoma 14.2_
- **Cloud Integration**: Connect to GPT models via OpenAI, Claude models via Anthropic, Mistral, Groq, and others
- **Custom Assistants**: Create specialized AI assistants for your tasks
- **OpenAI-Compatible API**: Local server at `localhost:1337` for other applications
- **Model Context Protocol**: MCP integration for agentic capabilities
- **Privacy First**: Everything runs locally when you want it to
## Build from Source ## Quicklinks
For those who enjoy the scenic route: ### Jan
### Prerequisites - [Jan Website](https://jan.ai/)
- [Jan GitHub](https://github.com/menloresearch/jan)
- [Documentation](https://jan.ai/docs)
- [Jan Changelog](https://jan.ai/changelog)
- [Jan Blog](https://jan.ai/blog)
- Node.js ≥ 20.0.0 ### Cortex.cpp
- Yarn ≥ 1.22.0
- Make ≥ 3.81
- Rust (for Tauri)
### Run with Make Jan is powered by **Cortex.cpp**. It is a C++ command-line interface (CLI) designed as an alternative to [Ollama](https://ollama.com/). By default, it runs on the llama.cpp engine but also supports other engines, including ONNX and TensorRT-LLM, making it a multi-engine platform.
```bash - [Cortex Website](https://cortex.so/)
git clone https://github.com/janhq/jan - [Cortex GitHub](https://github.com/menloresearch/cortex.cpp)
cd jan - [Documentation](https://cortex.so/docs/)
make dev - [Models Library](https://cortex.so/models)
``` - API Reference: _Under development_
This handles everything: installs dependencies, builds core components, and launches the app. ## Requirements for running Jan
**Available make targets:** - **MacOS**: 13 or higher
- `make dev` - Full development setup and launch - **Windows**:
- `make build` - Production build - Windows 10 or higher
- `make test` - Run tests and linting - To enable GPU support:
- `make clean` - Delete everything and start fresh - Nvidia GPU with CUDA Toolkit 11.7 or higher
- Nvidia driver 470.63.01 or higher
### Manual Commands - **Linux**:
- glibc 2.27 or higher (check with `ldd --version`)
```bash - gcc 11, g++ 11, cpp 11 or higher, refer to this [link](https://jan.ai/guides/troubleshooting/gpu-not-used/#specific-requirements-for-linux) for more information
yarn install - To enable GPU support:
yarn build:tauri:plugin:api - Nvidia GPU with CUDA Toolkit 11.7 or higher
yarn build:core - Nvidia driver 470.63.01 or higher
yarn build:extensions
yarn dev
```
## System Requirements
**Minimum specs for a decent experience:**
- **macOS**: 13.6+ (8GB RAM for 3B models, 16GB for 7B, 32GB for 13B)
- **Windows**: 10+ with GPU support for NVIDIA/AMD/Intel Arc
- **Linux**: Most distributions work, GPU acceleration available
For detailed compatibility, check our [installation guides](https://jan.ai/docs/desktop/mac).
## Troubleshooting ## Troubleshooting
If things go sideways: As Jan is in development mode, you might get stuck on a some common issues:
1. Check our [troubleshooting docs](https://jan.ai/docs/troubleshooting) - [Troubleshooting a broken build](https://jan.ai/docs/troubleshooting#broken-build)
2. Copy your error logs and system specs - [Troubleshooting NVIDIA GPU](https://jan.ai/docs/troubleshooting#troubleshooting-nvidia-gpu)
3. Ask for help in our [Discord](https://discord.gg/FTk2MvZwJH) `#🆘|jan-help` channel - [Troubleshooting Something's Amiss](https://jan.ai/docs/troubleshooting#somethings-amiss)
If you can't find what you need in our troubleshooting guide, feel free reach out to us for extra help:
1. Copy your [error logs & device specifications](https://jan.ai/docs/troubleshooting#how-to-get-error-logs).
2. Go to our [Discord](https://discord.com/invite/FTk2MvZwJH) & send it to **#🆘|get-help** channel for further support.
_Check the logs to ensure the information is what you intend to send. Note that we retain your logs for only 24 hours, so report any issues promptly._
## Contributing ## Contributing
Contributions welcome. See [CONTRIBUTING.md](CONTRIBUTING.md) for the full spiel. Contributions are welcome! Please read the [CONTRIBUTING.md](CONTRIBUTING.md) file
## Links ### Pre-requisites
- [Documentation](https://jan.ai/docs) - The manual you should read - node >= 20.0.0
- [API Reference](https://jan.ai/api-reference) - For the technically inclined - yarn >= 1.22.0
- [Changelog](https://jan.ai/changelog) - What we broke and fixed - make >= 3.81
- [Discord](https://discord.gg/FTk2MvZwJH) - Where the community lives
## Contact ### Instructions
- **Bugs**: [GitHub Issues](https://github.com/janhq/jan/issues) 1. **Clone the repository and prepare:**
- **Business**: hello@jan.ai
- **Jobs**: hr@jan.ai
- **General Discussion**: [Discord](https://discord.gg/FTk2MvZwJH)
## License ```bash
git clone https://github.com/menloresearch/jan
cd jan
git checkout -b DESIRED_BRANCH
```
Apache 2.0 - Because sharing is caring. 2. **Run development and use Jan Desktop**
```bash
make dev
```
This will start the development server and open the desktop app.
### For production build
```bash
# Do steps 1 and 2 in the previous section
# Build the app
make build
```
This will build the app MacOS m1/m2 for production (with code signing already done) and put the result in `dist` folder.
## Acknowledgements ## Acknowledgements
Built on the shoulders of giants: Jan builds on top of other open-source projects:
- [Llama.cpp](https://github.com/ggerganov/llama.cpp) - [llama.cpp](https://github.com/ggml-org/llama.cpp)
- [Tauri](https://tauri.app/) - [LangChain](https://github.com/langchain-ai)
- [Scalar](https://github.com/scalar/scalar) - [TensorRT](https://github.com/NVIDIA/TensorRT)
- [TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM)
## Contact
- Bugs & requests: file a GitHub ticket
- For discussion: join our Discord [here](https://discord.gg/FTk2MvZwJH)
- For business inquiries: email hello@jan.ai
- For jobs: please email hr@jan.ai
## Trust & Safety
Beware of scams!
- We will never request your personal information.
- Our product is completely free; no paid version exists.
- We do not have a token or ICO.
- We are a [bootstrapped company](https://en.wikipedia.org/wiki/Bootstrapping), and don't have any external investors (_yet_). We're open to exploring opportunities with strategic partners want to tackle [our mission](https://jan.ai/about#mission) together.
## License
Jan is free and open source, under the **AGPLv3** license.

9
ai.menlo.jan.desktop Normal file
View File

@ -0,0 +1,9 @@
[Desktop Entry]
Name=Jan
Comment=Local AI Assistant that runs 100% offline
Exec=run.sh
Icon=ai.menlo.jan
Type=Application
Categories=Development;
Keywords=AI;Assistant;LLM;ChatGPT;Local;Offline;
StartupNotify=true

42
ai.menlo.jan.metainfo.xml Normal file
View File

@ -0,0 +1,42 @@
<?xml version="1.0" encoding="UTF-8"?>
<component type="desktop-application">
<id>ai.menlo.jan</id>
<metadata_license>FSFAP</metadata_license>
<project_license>AGPL-3.0-only</project_license>
<name>Jan</name>
<summary>Local AI Assistant that runs 100% offline on your device</summary>
<description>
<p>
Jan is a ChatGPT-alternative that runs 100% offline on your device. Our goal is to make it easy for anyone to download and run LLMs and use AI with full control and privacy.
</p>
<p>Features:</p>
<ul>
<li>Model Library with popular LLMs like Llama, Gemma, Mistral, or Qwen</li>
<li>Connect to Remote AI APIs like Groq and OpenRouter</li>
<li>Local API Server with OpenAI-equivalent API</li>
<li>Extensions for customizing Jan</li>
</ul>
</description>
<launchable type="desktop-id">ai.menlo.jan.desktop</launchable>
<screenshots>
<screenshot type="default">

</screenshot>
</screenshots>
<url type="homepage">https://jan.ai/</url>
<url type="bugtracker">https://github.com/menloresearch/jan/issues</url>
<content_rating type="oars-1.1" />
<releases>
<release version="0.5.12" date="2024-01-02">
<description>
<p>Latest stable release of Jan AI</p>
</description>
</release>
</releases>
</component>

View File

@ -1,319 +0,0 @@
# E2E Test Runner with ReportPortal Integration
🚀 An automated end-to-end test runner for Jan application with ReportPortal integration, screen recording, and comprehensive test monitoring.
## Features
- ✅ **Automated Jan App Testing**: Automatically starts/stops Jan application
- 🖥️ **Auto Computer Server**: Automatically starts computer server in background
- 📹 **Screen Recording**: Records test execution for debugging
- 📊 **ReportPortal Integration**: Optional test results upload to ReportPortal
- 🔄 **Turn Monitoring**: Prevents infinite loops with configurable turn limits
- 🎯 **Flexible Configuration**: Command-line arguments and environment variables
- 🌐 **Cross-platform**: Windows, macOS, and Linux support
- 📁 **Test Discovery**: Automatically scans test files from directory
## Prerequisites
- Python 3.8+
- Jan application installed
- Windows Sandbox (for computer provider)
- Computer server package installed
- Required Python packages (see requirements.txt)
## Installation
1. Clone the repository:
```bash
git clone <repository-url>
cd autoqa
```
2. Install dependencies:
```bash
## For Windows and Linux
pip install -r requirements.txt
```
3. Ensure Jan application is installed in one of the default locations:
- Windows: `%LOCALAPPDATA%\Programs\jan\Jan.exe`
- macOS: `~/Applications/Jan.app/Contents/MacOS/Jan`
- Linux: `jan` (in PATH)
## Quick Start
### Local Development (No ReportPortal)
```bash
# Run all tests in ./tests directory (auto-starts computer server)
python main.py
# Run with custom test directory
python main.py --tests-dir "my_tests"
# Run with custom Jan app path
python main.py --jan-app-path "C:/Custom/Path/Jan.exe"
# Skip auto computer server start (if already running)
python main.py --skip-server-start
```
### With ReportPortal Integration
```bash
# Enable ReportPortal with token
python main.py --enable-reportportal --rp-token "YOUR_API_TOKEN"
# Full ReportPortal configuration
python main.py \
--enable-reportportal \
--rp-endpoint "https://reportportal.example.com" \
--rp-project "my_project" \
--rp-token "YOUR_API_TOKEN"
```
## Configuration
### Command Line Arguments
| Argument | Environment Variable | Default | Description |
| ----------------------- | --------------------- | ------------------------------- | ------------------------------------------------- |
| **Computer Server** |
| `--skip-server-start` | `SKIP_SERVER_START` | `false` | Skip automatic computer server startup |
| **ReportPortal** |
| `--enable-reportportal` | `ENABLE_REPORTPORTAL` | `false` | Enable ReportPortal integration |
| `--rp-endpoint` | `RP_ENDPOINT` | `https://reportportal.menlo.ai` | ReportPortal endpoint URL |
| `--rp-project` | `RP_PROJECT` | `default_personal` | ReportPortal project name |
| `--rp-token` | `RP_TOKEN` | - | ReportPortal API token (required when RP enabled) |
| **Jan Application** |
| `--jan-app-path` | `JAN_APP_PATH` | _auto-detected_ | Path to Jan application executable |
| `--jan-process-name` | `JAN_PROCESS_NAME` | `Jan.exe` | Jan process name for monitoring |
| **Model Configuration** |
| `--model-name` | `MODEL_NAME` | `ByteDance-Seed/UI-TARS-1.5-7B` | AI model name |
| `--model-base-url` | `MODEL_BASE_URL` | `http://10.200.108.58:1234/v1` | Model API endpoint |
| `--model-provider` | `MODEL_PROVIDER` | `oaicompat` | Model provider type |
| `--model-loop` | `MODEL_LOOP` | `uitars` | Agent loop type |
| **Test Execution** |
| `--max-turns` | `MAX_TURNS` | `30` | Maximum turns per test |
| `--tests-dir` | `TESTS_DIR` | `tests` | Directory containing test files |
| `--delay-between-tests` | `DELAY_BETWEEN_TESTS` | `3` | Delay between tests (seconds) |
### Environment Variables
Create a `.env` file or set environment variables:
```bash
# Computer Server
SKIP_SERVER_START=false
# ReportPortal Configuration
ENABLE_REPORTPORTAL=true
RP_ENDPOINT=https://reportportal.example.com
RP_PROJECT=my_project
RP_TOKEN=your_secret_token
# Jan Application
JAN_APP_PATH=C:\Custom\Path\Jan.exe
JAN_PROCESS_NAME=Jan.exe
# Model Configuration
MODEL_NAME=gpt-4
MODEL_BASE_URL=https://api.openai.com/v1
MODEL_PROVIDER=openai
MODEL_LOOP=uitars
# Test Settings
MAX_TURNS=50
TESTS_DIR=e2e_tests
DELAY_BETWEEN_TESTS=5
```
## Test Structure
### Test Files
- Test files should be `.txt` files containing test prompts
- Place test files in the `tests/` directory (or custom directory)
- Support nested directories for organization
Example test file (`tests/basic/login_test.txt`):
```
Test the login functionality of Jan application.
Navigate to login screen, enter valid credentials, and verify successful login.
```
### Directory Structure
```
autoqa/
├── main.py # Main test runner
├── utils.py # Jan app utilities
├── test_runner.py # Test execution logic
├── screen_recorder.py # Screen recording functionality
├── reportportal_handler.py # ReportPortal integration
├── tests/ # Test files directory
│ ├── basic/
│ │ ├── login_test.txt
│ │ └── navigation_test.txt
│ └── advanced/
│ └── complex_workflow.txt
├── recordings/ # Screen recordings (auto-created)
├── trajectories/ # Agent trajectories (auto-created)
└── README.md
```
## Usage Examples
### Basic Usage
```bash
# Run all tests locally (auto-starts computer server)
python main.py
# Get help
python main.py --help
# Run without auto-starting computer server
python main.py --skip-server-start
```
### Advanced Usage
```bash
# Custom configuration
python main.py \
--tests-dir "integration_tests" \
--max-turns 40 \
--delay-between-tests 10 \
--model-name "gpt-4"
# Environment + Arguments
ENABLE_REPORTPORTAL=true RP_TOKEN=secret python main.py --max-turns 50
# Different model provider
python main.py \
--model-provider "openai" \
--model-name "gpt-4" \
--model-base-url "https://api.openai.com/v1"
# External computer server (skip auto-start)
SKIP_SERVER_START=true python main.py
```
### CI/CD Usage
```bash
# GitHub Actions / CI environment
ENABLE_REPORTPORTAL=true \
RP_TOKEN=${{ secrets.RP_TOKEN }} \
MODEL_NAME=production-model \
MAX_TURNS=40 \
SKIP_SERVER_START=false \
python main.py
```
## Computer Server Management
The test runner automatically manages the computer server:
### Automatic Server Management (Default)
- **Auto-start**: Computer server starts automatically in background thread
- **Auto-cleanup**: Server stops when main program exits (daemon thread)
- **Error handling**: Graceful fallback if server fails to start
### Manual Server Management
```bash
# If you prefer to manage computer server manually:
python -m computer_server # In separate terminal
# Then run tests without auto-start:
python main.py --skip-server-start
```
### Server Logs
```
2025-07-15 15:30:45 - INFO - Starting computer server in background...
2025-07-15 15:30:45 - INFO - Calling computer_server.run_cli()...
2025-07-15 15:30:45 - INFO - Computer server thread started
2025-07-15 15:30:50 - INFO - Computer server is running successfully
```
## Output
### Local Development
- **Console logs**: Detailed execution information
- **Screen recordings**: Saved to `recordings/` directory as MP4 files
- **Trajectories**: Agent interaction data in `trajectories/` directory
- **Local results**: Test results logged to console
### ReportPortal Integration
When enabled, results are uploaded to ReportPortal including:
- Test execution status (PASSED/FAILED)
- Screen recordings as attachments
- Detailed turn-by-turn interaction logs
- Error messages and debugging information
## Troubleshooting
### Common Issues
1. **Computer server startup failed**:
```bash
# Install required dependencies
pip install computer_server
# Check if computer_server is available
python -c "import computer_server; print('OK')"
# Use manual server if auto-start fails
python main.py --skip-server-start
```
2. **Jan app not found**:
```bash
# Specify custom path
python main.py --jan-app-path "D:/Apps/Jan/Jan.exe"
```
3. **Windows dependencies missing**:
```bash
# Install Windows-specific packages
pip install pywin32 psutil
```
4. **ReportPortal connection failed**:
- Verify endpoint URL and token
- Check network connectivity
- Ensure project exists
5. **Screen recording issues**:
- Check disk space in `recordings/` directory
- Verify screen recording permissions
6. **Test timeouts**:
```bash
# Increase turn limit
python main.py --max-turns 50
```
### Debug Mode
Enable detailed logging by modifying the logging level in `main.py`:
```python
logging.basicConfig(level=logging.DEBUG)
```

View File

@ -1,264 +0,0 @@
# I. Before release
## A. Initial update / migration Data check
Before testing, set-up the following in the old version to make sure that we can see the data is properly migrated:
- [ ] Changing appearance / theme to something that is obviously different from default set-up
- [ ] Ensure there are a few chat threads
- [ ] Ensure there are a few favourites / star threads
- [ ] Ensure there are 2 model downloaded
- [ ] Ensure there are 2 import on local provider (llama.cpp)
- [ ] Modify MCP servers list and add some ENV value to MCP servers
- [ ] Modify Local API Server
- [ ] HTTPS proxy config value
- [ ] Add 2 custom assistants to Jan
- [ ] Create a new chat with the custom assistant
- [ ] Change the `App Data` to some other folder
- [ ] Create a Custom Provider
- [ ] Disabled some model providers
- [NEW] Change llama.cpp setting of 2 models
#### Validate that the update does not corrupt existing user data or settings (before and after update show the same information):
- [ ] Threads
- [ ] Previously used model and assistants is shown correctly
- [ ] Can resume chat in threads with the previous context
- [ ] Assistants
- Settings:
- [ ] Appearance
- [ ] MCP Servers
- [ ] Local API Server
- [ ] HTTPS Proxy
- [ ] Custom Provider Set-up
#### In `Hub`:
- [ ] Can see model from HF listed properly
- [ ] Downloaded model will show `Use` instead of `Download`
- [ ] Toggling on `Downloaded` on the right corner show the correct list of downloaded models
#### In `Settings -> General`:
- [ ] Ensure the `App Data` path is the same
- [ ] Click Open Logs, App Log will show
#### In `Settings -> Model Providers`:
- [ ] Llama.cpp still listed downloaded models and user can chat with the models
- [ ] Llama.cpp still listed imported models and user can chat with the models
- [ ] Remote model still retain previously set up API keys and user can chat with model from the provider without having to re-enter API keys
- [ ] Enabled and Disabled Model Providers stay the same as before update
#### In `Settings -> Extensions`, check that following exists:
- [ ] Conversational
- [ ] Jan Assistant
- [ ] Download Manager
- [ ] llama.cpp Inference Engine
## B. `Settings`
#### In `General`:
- [ ] Ensure `Community` links work and point to the correct website
- [ ] Ensure the `Check for Updates` function detect the correct latest version
- [ ] [ENG] Create a folder with un-standard character as title (e.g. Chinese character) => change the `App data` location to that folder => test that model is still able to load and run properly.
#### In `Appearance`:
- [ ] Toggle between different `Theme` options to check that they change accordingly and that all elements of the UI are legible with the right contrast:
- [ ] Light
- [ ] Dark
- [ ] System (should follow your OS system settings)
- [ ] Change the following values => close the application => re-open the application => ensure that the change is persisted across session:
- [ ] Theme
- [ ] Font Size
- [ ] Window Background
- [ ] App Main View
- [ ] Primary
- [ ] Accent
- [ ] Destructive
- [ ] Chat Width
- [ ] Ensure that when this value is changed, there is no broken UI caused by it
- [ ] Code Block
- [ ] Show Line Numbers
- [ENG] Ensure that when click on `Reset` in the `Appearance` section, it reset back to the default values
- [ENG] Ensure that when click on `Reset` in the `Code Block` section, it reset back to the default values
#### In `Model Providers`:
In `Llama.cpp`:
- [ ] After downloading a model from hub, the model is listed with the correct name under `Models`
- [ ] Can import `gguf` model with no error
- [ ] Imported model will be listed with correct name under the `Models`
- [ ] Check that when click `delete` the model will be removed from the list
- [ ] Deleted model doesn't appear in the selectable models section in chat input (even in old threads that use the model previously)
- [ ] Ensure that user can re-import deleted imported models
- [ ] Enable `Auto-Unload Old Models`, and ensure that only one model can run / start at a time. If there are two model running at the time of enable, both of them will be stopped.
- [ ] Disable `Auto-Unload Old Models`, and ensure that multiple models can run at the same time.
- [ ] Enable `Context Shift` and ensure that context can run for long without encountering memory error. Use the `banana test` by turn on fetch MCP => ask local model to fetch and summarize the history of banana (banana has a very long history on wiki it turns out). It should run out of context memory sufficiently fast if `Context Shift` is not enabled.
- [ ] Ensure that user can change the Jinja chat template of individual model and it doesn't affect the template of other model
- [ ] Ensure that there is a recommended `llama.cpp` for each system and that it works out of the box for users.
- [ ] [0.6.9] Take a `gguf` file and delete the `.gguf` extensions from the file name, import it into Jan and verify that it works.
In Remote Model Providers:
- [ ] Check that the following providers are presence:
- [ ] OpenAI
- [ ] Anthropic
- [ ] Cohere
- [ ] OpenRouter
- [ ] Mistral
- [ ] Groq
- [ ] Gemini
- [ ] Hugging Face
- [ ] Models should appear as available on the selectable dropdown in chat input once some value is input in the API key field. (it could be the wrong API key)
- [ ] Once a valid API key is used, user can select a model from that provider and chat without any error.
- [ ] Delete a model and ensure that it doesn't show up in the `Modesl` list view or in the selectable dropdown in chat input.
- [ ] Ensure that a deleted model also not selectable or appear in old threads that used it.
- [ ] Adding of new model manually works and user can chat with the newly added model without error (you can add back the model you just delete for testing)
- [ ] [0.6.9] Make sure that Ollama set-up as a custom provider work with Jan
In Custom Providers:
- [ ] Ensure that user can create a new custom providers with the right baseURL and API key.
- [ ] Click `Refresh` should retrieve a list of available models from the Custom Providers.
- [ ] User can chat with the custom providers
- [ ] Ensure that Custom Providers can be deleted and won't reappear in a new session
In general:
- [ ] Disabled Model Provider should not show up as selectable in chat input of new thread and old thread alike (old threads' chat input should show `Select Model` instead of disabled model)
#### In `Shortcuts`:
Make sure the following shortcut key combo is visible and works:
- [ ] New chat
- [ ] Toggle Sidebar
- [ ] Zoom In
- [ ] Zoom Out
- [ ] Send Message
- [ ] New Line
- [ ] Navigation
#### In `Hardware`:
Ensure that the following section information show up for hardware
- [ ] Operating System
- [ ] CPU
- [ ] Memory
- [ ] GPU (If the machine has one)
- [ ] Enabling and Disabling GPUs and ensure that model still run correctly in both mode
- [ ] Enabling or Disabling GPU should not affect the UI of the application
#### In `MCP Servers`:
- [ ] Ensure that an user can create a MCP server successfully when enter in the correct information
- [ ] Ensure that `Env` value is masked by `*` in the quick view.
- [ ] If an `Env` value is missing, there should be a error pop up.
- [ ] Ensure that deleted MCP server disappear from the `MCP Server` list without any error
- [ ] Ensure that before a MCP is deleted, it will be disable itself first and won't appear on the tool list after deleted.
- [ ] Ensure that when the content of a MCP server is edited, it will be updated and reflected accordingly in the UI and when running it.
- [ ] Toggling enable and disabled of a MCP server work properly
- [ ] A disabled MCP should not appear in the available tool list in chat input
- [ ] An disabled MCP should not be callable even when forced prompt by the model (ensure there is no ghost MCP server)
- [ ] Ensure that enabled MCP server start automatically upon starting of the application
- [ ] An enabled MCP should show functions in the available tool list
- [ ] User can use a model and call different tool from multiple enabled MCP servers in the same thread
- [ ] If `Allow All MCP Tool Permissions` is disabled, in every new thread, before a tool is called, there should be a confirmation dialog pop up to confirm the action.
- [ ] When the user click `Deny`, the tool call will not be executed and return a message indicate so in the tool call result.
- [ ] When the user click `Allow Once` on the pop up, a confirmation dialog will appear again when the tool is called next time.
- [ ] When the user click `Always Allow` on the pop up, the tool will retain permission and won't ask for confirmation again. (this applied at an individual tool level, not at the MCP server level)
- [ ] If `Allow All MCP Tool Permissions` is enabled, in every new thread, there should not be any confirmation dialog pop up when a tool is called.
- [ ] When the pop-up appear, make sure that the `Tool Parameters` is also shown with detail in the pop-up.a
- [ ] [0.6.9] Go to Enter JSON configuration when created a new MCp => paste the JSON config inside => click `Save` => server works
- [ ] [0.6.9] If individual JSON config format is failed, the MCP server should not be activated
- [ ] [0.6.9] Make sure that MCP server can be used with streamable-http transport => connect to Smithery and test MCP server
#### In `Local API Server`:
- [ ] User can `Start Server` and chat with the default endpoint
- [ ] User should see the correct model name at `v1/models`
- [ ] User should be able to chat with it at `v1/chat/completions`
- [ ] `Open Logs` show the correct query log send to the server and return from the server
- [ ] Make sure that changing all the parameter in `Server Configuration` is reflected when `Start Server`
- [ ] [0.6.9] When the startup configuration, the last used model is also automatically start (users does not have to manually start a model before starting the server)
- [ ] [0.6.9] Make sure that you can send an image to a Local API Server and it also works (can set up Local API Server as a Custom Provider in Jan to test)
#### In `HTTPS Proxy`:
- [ ] Model download request goes through proxy endpoint
## C. Hub
- [ ] User can click `Download` to download a model
- [ ] User can cancel a model in the middle of downloading
- [ ] User can add a Hugging Face model detail to the list by pasting a model name / model url into the search bar and press enter
- [ ] Clicking on a listing will open up the model card information within Jan and render the HTML properly
- [ ] Clicking download work on the `Show variants` section
- [ ] Clicking download work inside the Model card HTML
- [ ] [0.6.9] Check that the model recommendation base on user hardware work as expected in the Model Hub
## D. Threads
#### In the left bar:
- [ ] User can delete an old thread, and it won't reappear even when app restart
- [ ] Change the title of the thread should update its last modification date and re-organise its position in the correct chronological order on the left bar.
- [ ] The title of a new thread is the first message from the user.
- [ ] Users can starred / un-starred threads accordingly
- [ ] Starred threads should move to `Favourite` section and other threads should stay in `Recent`
- [ ] Ensure that the search thread feature return accurate result based on thread titles and contents (including from both `Favourite` and `Recent`)
- [ ] `Delete All` should delete only threads in the `Recents` section
- [ ] `Unstar All` should un-star all of the `Favourites` threads and return them to `Recent`
#### In a thread:
- [ ] When `New Chat` is clicked, the assistant is set as the last selected assistant, the model selected is set as the last used model, and the user can immediately chat with the model.
- [ ] User can conduct multi-turn conversation in a single thread without lost of data (given that `Context Shift` is not enabled)
- [ ] User can change to a different model in the middle of a conversation in a thread and the model work.
- [ ] User can click on `Regenerate` button on a returned message from the model to get a new response base on the previous context.
- [ ] User can change `Assistant` in the middle of a conversation in a thread and the new assistant setting will be applied instead.
- [ ] The chat windows can render and show all the content of a selected threads (including scroll up and down on long threads)
- [ ] Old thread retained their setting as of the last update / usage
- [ ] Assistant option
- [ ] Model option (except if the model / model provider has been deleted or disabled)
- [ ] User can send message with different type of text content (e.g text, emoji, ...)
- [ ] When request model to generate a markdown table, the table is correctly formatted as returned from the model.
- [ ] When model generate code, ensure that the code snippets is properly formatted according to the `Appearance -> Code Block` setting.
- [ ] Users can edit their old message and and user can regenerate the answer based on the new message
- [ ] User can click `Copy` to copy the model response
- [ ] User can click `Delete` to delete either the user message or the model response.
- [ ] The token speed appear when a response from model is being generated and the final value is show under the response.
- [ ] Make sure that user when using IME keyboard to type Chinese and Japanese character and they press `Enter`, the `Send` button doesn't trigger automatically after each words.
- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a remote model
- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a local model
- [ ] [0.6.9] Check that you can paste an image to text box from your system clipboard (Copy - Paste)
- [ ] [0.6.9] Make sure that user can favourite a model in the model selection in chat input
## E. Assistants
- [ ] There is always at least one default Assistant which is Jan
- [ ] The default Jan assistant has `stream = True` by default
- [ ] User can create / edit a new assistant with different parameters and instructions choice.
- [ ] When user delete the default Assistant, the next Assistant in line will be come the default Assistant and apply their setting to new chat accordingly.
- [ ] User can create / edit assistant from within a Chat windows (on the top left)
## F. After checking everything else
In `Settings -> General`:
- [ ] Change the location of the `App Data` to some other path that is not the default path
- [ ] Click on `Reset` button in `Other` to factory reset the app:
- [ ] All threads deleted
- [ ] All Assistant deleted except for default Jan Assistant
- [ ] `App Data` location is reset back to default path
- [ ] Appearance reset
- [ ] Model Providers information all reset
- [ ] Llama.cpp setting reset
- [ ] API keys cleared
- [ ] All Custom Providers deleted
- [ ] MCP Servers reset
- [ ] Local API Server reset
- [ ] HTTPS Proxy reset
- [ ] After closing the app, all models are unloaded properly
- [ ] Locate to the data folder using the `App Data` path information => delete the folder => reopen the app to check that all the folder is re-created with all the necessary data.
- [ ] Ensure that the uninstallation process removes the app successfully from the system.
## G. New App Installation
- [ ] Clean up by deleting all the left over folder created by Jan
- [ ] On MacOS
- [ ] `~/Library/Application Support/Jan`
- [ ] `~/Library/Caches/jan.ai.app`
- [ ] On Windows
- [ ] `C:\Users<Username>\AppData\Roaming\Jan\`
- [ ] `C:\Users<Username>\AppData\Local\jan.ai.app`
- [ ] On Linux
- [ ] `~/.cache/Jan`
- [ ] `~/.cache/jan.ai.app`
- [ ] `~/.local/share/Jan`
- [ ] `~/.local/share/jan.ai.app`
- [ ] Ensure that the fresh install of Jan launch
- [ ] Do some basic check to see that all function still behaved as expected. To be extra careful, you can go through the whole list again. However, it is more advisable to just check to make sure that all the core functionality like `Thread` and `Model Providers` work as intended.
# II. After release
- [ ] Check that the App Updater works and user can update to the latest release without any problem
- [ ] App restarts after the user finished an update
- [ ] Repeat section `A. Initial update / migration Data check` above to verify that update is done correctly on live version

View File

@ -1,514 +0,0 @@
import asyncio
import logging
import os
import argparse
import threading
import time
import platform
from datetime import datetime
from computer import Computer
from reportportal_client import RPClient
from reportportal_client.helpers import timestamp
from utils import scan_test_files
from test_runner import run_single_test_with_timeout
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[
logging.StreamHandler()
]
)
logger = logging.getLogger(__name__)
# Platform detection
IS_WINDOWS = platform.system() == "Windows"
IS_LINUX = platform.system() == "Linux"
IS_MACOS = platform.system() == "Darwin"
def get_computer_config():
"""Get computer configuration based on platform"""
if IS_WINDOWS:
return {
"os_type": "windows"
}
elif IS_LINUX:
return {
"os_type": "linux"
}
elif IS_MACOS:
return {
"os_type": "macos"
}
else:
# Default fallback
logger.warning(f"Unknown platform {platform.system()}, using Linux config as fallback")
return {
"os_type": "linux"
}
def get_default_jan_path():
"""Get default Jan app path based on OS"""
if IS_WINDOWS:
# Try multiple common locations on Windows
possible_paths = [
os.path.expanduser(r"~\AppData\Local\Programs\jan\Jan.exe"),
os.path.join(os.environ.get('LOCALAPPDATA', ''), 'Programs', 'jan', 'Jan.exe'),
os.path.join(os.environ.get('APPDATA', ''), 'jan', 'Jan.exe'),
r"C:\Program Files\jan\Jan.exe",
r"C:\Program Files (x86)\jan\Jan.exe"
]
# Return first existing path, or first option as default
for path in possible_paths:
if os.path.exists(path):
return path
# If none exist, return the most likely default
return possible_paths[0]
elif IS_LINUX:
# Linux possible locations
possible_paths = [
"/usr/bin/Jan",
"/usr/local/bin/Jan",
os.path.expanduser("~/Applications/Jan/Jan"),
"/opt/Jan/Jan"
]
# Return first existing path, or first option as default
for path in possible_paths:
if os.path.exists(path):
return path
# Default to nightly build path
return "/usr/bin/Jan"
elif IS_MACOS:
# macOS defaults
possible_paths = [
"/Applications/Jan.app/Contents/MacOS/Jan",
os.path.expanduser("~/Applications/Jan.app/Contents/MacOS/Jan")
]
for path in possible_paths:
if os.path.exists(path):
return path
return possible_paths[0]
else:
# Unknown platform
return "jan"
def start_computer_server():
"""Start computer server in background thread"""
try:
logger.info("Starting computer server in background...")
# Import computer_server module
import computer_server
import sys
# Start server in a separate thread
def run_server():
try:
# Save original sys.argv to avoid argument conflicts
original_argv = sys.argv.copy()
# Override sys.argv for computer_server to use default args
sys.argv = ['computer_server'] # Reset to minimal args
# Use the proper entry point
logger.info("Calling computer_server.run_cli()...")
computer_server.run_cli()
logger.info("Computer server.run_cli() completed")
except KeyboardInterrupt:
logger.info("Computer server interrupted")
except Exception as e:
logger.error(f"Computer server error: {e}")
import traceback
logger.error(f"Traceback: {traceback.format_exc()}")
finally:
# Restore original sys.argv
try:
sys.argv = original_argv
except:
pass
server_thread = threading.Thread(target=run_server, daemon=True)
server_thread.start()
logger.info("Computer server thread started")
# Give server more time to start up
time.sleep(5)
# Check if thread is still alive (server is running)
if server_thread.is_alive():
logger.info("Computer server is running successfully")
return server_thread
else:
logger.error("Computer server thread died unexpectedly")
return None
except ImportError as e:
logger.error(f"Cannot import computer_server module: {e}")
logger.error("Please install computer_server package")
return None
except Exception as e:
logger.error(f"Error starting computer server: {e}")
import traceback
logger.error(f"Traceback: {traceback.format_exc()}")
return None
def parse_arguments():
"""Parse command line arguments"""
parser = argparse.ArgumentParser(
description="E2E Test Runner with ReportPortal integration",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
# Run locally without ReportPortal
python main.py
# Run with ReportPortal integration
python main.py --enable-reportportal --rp-token YOUR_TOKEN
# Run with custom Jan app path
python main.py --jan-app-path "C:/Custom/Path/Jan.exe"
# Run with different model
python main.py --model-name "gpt-4" --model-base-url "https://api.openai.com/v1"
# Using environment variables
ENABLE_REPORTPORTAL=true RP_TOKEN=xxx MODEL_NAME=gpt-4 python main.py
"""
)
# Get default Jan path
default_jan_path = get_default_jan_path()
# Computer server arguments
server_group = parser.add_argument_group('Computer Server Configuration')
server_group.add_argument(
'--skip-server-start',
action='store_true',
default=os.getenv('SKIP_SERVER_START', 'false').lower() == 'true',
help='Skip automatic computer server startup (env: SKIP_SERVER_START, default: false)'
)
# ReportPortal arguments
rp_group = parser.add_argument_group('ReportPortal Configuration')
rp_group.add_argument(
'--enable-reportportal',
action='store_true',
default=os.getenv('ENABLE_REPORTPORTAL', 'false').lower() == 'true',
help='Enable ReportPortal integration (env: ENABLE_REPORTPORTAL, default: false)'
)
rp_group.add_argument(
'--rp-endpoint',
default=os.getenv('RP_ENDPOINT', 'https://reportportal.menlo.ai'),
help='ReportPortal endpoint URL (env: RP_ENDPOINT, default: %(default)s)'
)
rp_group.add_argument(
'--rp-project',
default=os.getenv('RP_PROJECT', 'default_personal'),
help='ReportPortal project name (env: RP_PROJECT, default: %(default)s)'
)
rp_group.add_argument(
'--rp-token',
default=os.getenv('RP_TOKEN'),
help='ReportPortal API token (env: RP_TOKEN, required when --enable-reportportal is used)'
)
rp_group.add_argument(
'--launch-name',
default=os.getenv('LAUNCH_NAME'),
help='Custom launch name for ReportPortal (env: LAUNCH_NAME, default: auto-generated with timestamp)'
)
# Jan app arguments
jan_group = parser.add_argument_group('Jan Application Configuration')
jan_group.add_argument(
'--jan-app-path',
default=os.getenv('JAN_APP_PATH', default_jan_path),
help=f'Path to Jan application executable (env: JAN_APP_PATH, default: auto-detected or {default_jan_path})'
)
jan_group.add_argument(
'--jan-process-name',
default=os.getenv('JAN_PROCESS_NAME', 'Jan.exe' if IS_WINDOWS else ('Jan' if IS_MACOS else 'Jan-nightly')),
help='Jan process name for monitoring (env: JAN_PROCESS_NAME, default: platform-specific)'
)
# Model/Agent arguments
model_group = parser.add_argument_group('Model Configuration')
model_group.add_argument(
'--model-loop',
default=os.getenv('MODEL_LOOP', 'uitars'),
help='Agent loop type (env: MODEL_LOOP, default: %(default)s)'
)
model_group.add_argument(
'--model-provider',
default=os.getenv('MODEL_PROVIDER', 'oaicompat'),
help='Model provider (env: MODEL_PROVIDER, default: %(default)s)'
)
model_group.add_argument(
'--model-name',
default=os.getenv('MODEL_NAME', 'ByteDance-Seed/UI-TARS-1.5-7B'),
help='Model name (env: MODEL_NAME, default: %(default)s)'
)
model_group.add_argument(
'--model-base-url',
default=os.getenv('MODEL_BASE_URL', 'http://10.200.108.58:1234/v1'),
help='Model base URL (env: MODEL_BASE_URL, default: %(default)s)'
)
# Test execution arguments
test_group = parser.add_argument_group('Test Execution Configuration')
test_group.add_argument(
'--max-turns',
type=int,
default=int(os.getenv('MAX_TURNS', '30')),
help='Maximum number of turns per test (env: MAX_TURNS, default: %(default)s)'
)
test_group.add_argument(
'--tests-dir',
default=os.getenv('TESTS_DIR', 'tests'),
help='Directory containing test files (env: TESTS_DIR, default: %(default)s)'
)
test_group.add_argument(
'--delay-between-tests',
type=int,
default=int(os.getenv('DELAY_BETWEEN_TESTS', '3')),
help='Delay in seconds between tests (env: DELAY_BETWEEN_TESTS, default: %(default)s)'
)
args = parser.parse_args()
# Validate ReportPortal token if ReportPortal is enabled
if args.enable_reportportal and not args.rp_token:
parser.error("--rp-token (or RP_TOKEN env var) is required when --enable-reportportal is used")
return args
async def main():
"""
Main function to scan and run all test files with optional ReportPortal integration
"""
# Parse command line arguments
args = parse_arguments()
# Initialize final exit code
final_exit_code = 0
# Start computer server if not skipped
server_thread = None
if not args.skip_server_start:
server_thread = start_computer_server()
if server_thread is None:
logger.error("Failed to start computer server. Exiting...")
return
else:
logger.info("Skipping computer server startup (assuming it's already running)")
try:
# Build agent config from arguments
agent_config = {
"loop": args.model_loop,
"model_provider": args.model_provider,
"model_name": args.model_name,
"model_base_url": args.model_base_url
}
# Log configuration
logger.info("=== Configuration ===")
logger.info(f"Computer server: {'STARTED' if server_thread else 'EXTERNAL'}")
logger.info(f"Tests directory: {args.tests_dir}")
logger.info(f"Max turns per test: {args.max_turns}")
logger.info(f"Delay between tests: {args.delay_between_tests}s")
logger.info(f"Jan app path: {args.jan_app_path}")
logger.info(f"Jan app exists: {os.path.exists(args.jan_app_path)}")
logger.info(f"Jan process name: {args.jan_process_name}")
logger.info(f"Model: {args.model_name}")
logger.info(f"Model URL: {args.model_base_url}")
logger.info(f"Model provider: {args.model_provider}")
logger.info(f"ReportPortal integration: {'ENABLED' if args.enable_reportportal else 'DISABLED'}")
if args.enable_reportportal:
logger.info(f"ReportPortal endpoint: {args.rp_endpoint}")
logger.info(f"ReportPortal project: {args.rp_project}")
logger.info(f"ReportPortal token: {'SET' if args.rp_token else 'NOT SET'}")
logger.info(f"Launch name: {args.launch_name if args.launch_name else 'AUTO-GENERATED'}")
logger.info("======================")
# Scan all test files
test_files = scan_test_files(args.tests_dir)
if not test_files:
logger.warning(f"No test files found in directory: {args.tests_dir}")
return
logger.info(f"Found {len(test_files)} test files")
# Track test results for final exit code
test_results = {"passed": 0, "failed": 0, "total": len(test_files)}
# Initialize ReportPortal client only if enabled
rp_client = None
launch_id = None
if args.enable_reportportal:
try:
rp_client = RPClient(
endpoint=args.rp_endpoint,
project=args.rp_project,
api_key=args.rp_token
)
# Start ReportPortal launch
current_time = datetime.now().strftime("%Y%m%d_%H%M%S")
# Use custom launch name if provided, otherwise generate default
if args.launch_name:
launch_name = args.launch_name
logger.info(f"Using custom launch name: {launch_name}")
else:
launch_name = f"E2E Test Run - {current_time}"
logger.info(f"Using auto-generated launch name: {launch_name}")
launch_id = rp_client.start_launch(
name=launch_name,
start_time=timestamp(),
description=f"Automated E2E test run with {len(test_files)} test cases\n"
f"Model: {args.model_name}\n"
f"Max turns: {args.max_turns}"
)
logger.info(f"Started ReportPortal launch: {launch_name}")
except Exception as e:
logger.error(f"Failed to initialize ReportPortal: {e}")
logger.warning("Continuing without ReportPortal integration...")
rp_client = None
launch_id = None
else:
logger.info("Running in local development mode - results will not be uploaded to ReportPortal")
# Start computer environment
logger.info("Initializing computer environment...")
# Get platform-specific computer configuration
computer_config = get_computer_config()
logger.info(f"Using computer config: {computer_config}")
computer = Computer(
os_type=computer_config["os_type"],
use_host_computer_server=True
)
await computer.run()
logger.info("Computer environment ready")
# Run each test sequentially with turn monitoring
for i, test_data in enumerate(test_files, 1):
logger.info(f"Running test {i}/{len(test_files)}: {test_data['path']}")
try:
# Pass all configs to test runner
test_result = await run_single_test_with_timeout(
computer=computer,
test_data=test_data,
rp_client=rp_client, # Can be None
launch_id=launch_id, # Can be None
max_turns=args.max_turns,
jan_app_path=args.jan_app_path,
jan_process_name=args.jan_process_name,
agent_config=agent_config,
enable_reportportal=args.enable_reportportal
)
# Track test result - properly handle different return formats
test_passed = False
if test_result:
# Check different possible return formats
if isinstance(test_result, dict):
# Dictionary format: check 'success' key
test_passed = test_result.get('success', False)
elif isinstance(test_result, bool):
# Boolean format: direct boolean value
test_passed = test_result
elif hasattr(test_result, 'success'):
# Object format: check success attribute
test_passed = getattr(test_result, 'success', False)
else:
# Any truthy value is considered success
test_passed = bool(test_result)
else:
test_passed = False
# Update counters and log result
if test_passed:
test_results["passed"] += 1
logger.info(f"[SUCCESS] Test {i} PASSED: {test_data['path']}")
else:
test_results["failed"] += 1
logger.error(f"[FAILED] Test {i} FAILED: {test_data['path']}")
# Debug log for troubleshooting
logger.info(f"[INFO] Debug - Test result: type={type(test_result)}, value={test_result}, success_field={test_result.get('success', 'N/A') if isinstance(test_result, dict) else 'N/A'}, final_passed={test_passed}")
except Exception as e:
test_results["failed"] += 1
logger.error(f"[FAILED] Test {i} FAILED with exception: {test_data['path']} - {e}")
# Add delay between tests
if i < len(test_files):
logger.info(f"Waiting {args.delay_between_tests} seconds before next test...")
await asyncio.sleep(args.delay_between_tests)
# Log final test results summary
logger.info("=" * 50)
logger.info("TEST EXECUTION SUMMARY")
logger.info("=" * 50)
logger.info(f"Total tests: {test_results['total']}")
logger.info(f"Passed: {test_results['passed']}")
logger.info(f"Failed: {test_results['failed']}")
logger.info(f"Success rate: {(test_results['passed']/test_results['total']*100):.1f}%")
logger.info("=" * 50)
if test_results["failed"] > 0:
logger.error(f"[FAILED] Test execution completed with {test_results['failed']} failures!")
final_exit_code = 1
else:
logger.info("[SUCCESS] All tests completed successfully!")
final_exit_code = 0
except KeyboardInterrupt:
logger.info("Test execution interrupted by user")
final_exit_code = 1
except Exception as e:
logger.error(f"Error in main execution: {e}")
final_exit_code = 1
finally:
# Finish ReportPortal launch only if it was started
if args.enable_reportportal and rp_client and launch_id:
try:
rp_client.finish_launch(
launch_id=launch_id,
end_time=timestamp()
)
rp_client.session.close()
logger.info("ReportPortal launch finished and session closed")
except Exception as e:
logger.error(f"Error finishing ReportPortal launch: {e}")
# Note: daemon thread will automatically terminate when main program ends
if server_thread:
logger.info("Computer server will stop when main program exits (daemon thread)")
# Exit with appropriate code based on test results
logger.info(f"Exiting with code: {final_exit_code}")
exit(final_exit_code)
if __name__ == "__main__":
asyncio.run(main())

View File

@ -1,439 +0,0 @@
import os
import json
import mimetypes
import re
import logging
import glob
import platform
from reportportal_client.helpers import timestamp
logger = logging.getLogger(__name__)
def upload_turn_folder(client, test_item_id, turn_path, turn_name, force_fail=False):
"""
Upload turn folder content to ReportPortal
"""
step_item_id = client.start_test_item(
parent_item_id=test_item_id,
name=turn_name,
start_time=timestamp(),
item_type="STEP"
)
uploaded = False
step_has_errors = False # Track if this step has any errors
for fname in sorted(os.listdir(turn_path)):
fpath = os.path.join(turn_path, fname)
if fname.endswith(".json"):
try:
with open(fpath, "r", encoding="utf-8") as f:
data = json.load(f)
client.log(
time=timestamp(),
level="INFO",
message=f"[{fname}]\n{json.dumps(data, indent=2)}",
item_id=step_item_id
)
uploaded = True
except Exception as e:
client.log(
time=timestamp(),
level="ERROR",
message=f"[ERROR parsing {fname}] {str(e)}",
item_id=step_item_id
)
step_has_errors = True
elif fname.endswith(".png"):
try:
with open(fpath, "rb") as img_file:
client.log(
time=timestamp(),
level="INFO",
message=f"Screenshot: {fname}",
item_id=step_item_id,
attachment={
"name": fname,
"data": img_file.read(),
"mime": mimetypes.guess_type(fname)[0] or "image/png"
}
)
uploaded = True
except Exception as e:
client.log(
time=timestamp(),
level="ERROR",
message=f"[ERROR attaching {fname}] {str(e)}",
item_id=step_item_id
)
step_has_errors = True
if not uploaded:
client.log(
time=timestamp(),
level="WARNING",
message="No data found in this turn.",
item_id=step_item_id
)
# Determine step status based on test case result
if force_fail:
step_status = "FAILED"
else:
step_status = "FAILED" if step_has_errors else "PASSED"
client.finish_test_item(
item_id=step_item_id,
end_time=timestamp(),
status=step_status
)
def extract_test_result_from_trajectory(trajectory_dir):
"""
Extract test result from the last turn's API response
Returns True only if found {"result": True}, False for all other cases including {"result": False}
"""
if not trajectory_dir or not os.path.exists(trajectory_dir):
logger.warning(f"Trajectory directory not found: {trajectory_dir}")
return False
try:
# Get all turn folders and find the last one
turn_folders = [f for f in os.listdir(trajectory_dir)
if os.path.isdir(os.path.join(trajectory_dir, f)) and f.startswith("turn_")]
if not turn_folders:
logger.warning("No turn folders found")
return False
# Sort to get the last turn
last_turn = sorted(turn_folders)[-1]
last_turn_path = os.path.join(trajectory_dir, last_turn)
logger.info(f"Checking result in last turn: {last_turn}")
# Look for API call response files
response_files = [f for f in os.listdir(last_turn_path)
if f.startswith("api_call_") and f.endswith("_response.json")]
if not response_files:
logger.warning("No API response files found in last turn")
return False
# Check the last response file
last_response_file = sorted(response_files)[-1]
response_file_path = os.path.join(last_turn_path, last_response_file)
logger.info(f"Checking response file: {last_response_file}")
with open(response_file_path, 'r', encoding='utf-8') as f:
data = json.load(f)
# Extract content from response
if 'response' in data and 'choices' in data['response'] and data['response']['choices']:
last_choice = data['response']['choices'][-1]
if 'message' in last_choice and 'content' in last_choice['message']:
content = last_choice['message']['content']
logger.info(f"Last response content: {content}")
# Look for result patterns - need to check both True and False
true_pattern = r'\{\s*"result"\s*:\s*True\s*\}'
false_pattern = r'\{\s*"result"\s*:\s*False\s*\}'
true_match = re.search(true_pattern, content)
false_match = re.search(false_pattern, content)
if true_match:
logger.info(f"Found test result: True - PASSED")
return True
elif false_match:
logger.info(f"Found test result: False - FAILED")
return False
else:
logger.warning("No valid result pattern found in response content - marking as FAILED")
return False
logger.warning("Could not extract content from response structure")
return False
except Exception as e:
logger.error(f"Error extracting test result: {e}")
return False
def get_jan_log_paths(is_nightly=False):
"""
Get Jan application log file paths based on OS and version (nightly vs regular)
Returns list of glob patterns for log files
"""
system = platform.system().lower()
app_name = "Jan-nightly" if is_nightly else "Jan"
if system == "windows":
# Windows: %APPDATA%\Jan(-nightly)\data\logs\*.log
appdata = os.path.expandvars("%APPDATA%")
return [f"{appdata}\\{app_name}\\data\\logs\\*.log"]
elif system == "darwin": # macOS
# macOS: ~/Library/Application Support/Jan(-nightly)/data/logs/*.log
home_dir = os.path.expanduser("~")
return [f"{home_dir}/Library/Application Support/{app_name}/data/logs/*.log"]
elif system == "linux":
# Linux: ~/.local/share/Jan(-nightly)/data/logs/*.log
home_dir = os.path.expanduser("~")
return [f"{home_dir}/.local/share/{app_name}/data/logs/*.log"]
else:
logger.warning(f"Unsupported OS: {system}")
return []
def upload_jan_logs(client, test_item_id, is_nightly=False, max_log_files=5):
"""
Upload Jan application log files to ReportPortal
"""
log_patterns = get_jan_log_paths(is_nightly)
app_type = "nightly" if is_nightly else "regular"
logger.info(f"Looking for Jan {app_type} logs...")
all_log_files = []
for pattern in log_patterns:
try:
log_files = glob.glob(pattern)
all_log_files.extend(log_files)
logger.info(f"Found {len(log_files)} log files matching pattern: {pattern}")
except Exception as e:
logger.error(f"Error searching for logs with pattern {pattern}: {e}")
if not all_log_files:
logger.warning(f"No Jan {app_type} log files found")
client.log(
time=timestamp(),
level="WARNING",
message=f"[INFO] No Jan {app_type} application logs found",
item_id=test_item_id
)
return
# Sort by modification time (newest first) and limit to max_log_files
try:
all_log_files.sort(key=lambda x: os.path.getmtime(x), reverse=True)
log_files_to_upload = all_log_files[:max_log_files]
logger.info(f"Uploading {len(log_files_to_upload)} most recent Jan {app_type} log files")
for i, log_file in enumerate(log_files_to_upload, 1):
try:
file_size = os.path.getsize(log_file)
file_name = os.path.basename(log_file)
# Check file size limit (50MB = 50 * 1024 * 1024 bytes)
max_file_size = 50 * 1024 * 1024 # 50MB
if file_size > max_file_size:
logger.warning(f"Log file {file_name} is too large ({file_size} bytes > {max_file_size} bytes), skipping upload")
client.log(
time=timestamp(),
level="WARNING",
message=f"[INFO] Log file {file_name} skipped (size: {file_size} bytes > 50MB limit)",
item_id=test_item_id
)
continue
logger.info(f"Uploading log file {i}/{len(log_files_to_upload)}: {file_name} ({file_size} bytes)")
# Read log file content (safe to read since we checked size)
with open(log_file, 'r', encoding='utf-8', errors='ignore') as f:
log_content = f.read()
# Upload as text attachment
client.log(
time=timestamp(),
level="INFO",
message=f"[INFO] Jan {app_type} application log: {file_name}",
item_id=test_item_id,
attachment={
"name": f"jan_{app_type}_log_{i}_{file_name}",
"data": log_content.encode('utf-8'),
"mime": "text/plain"
}
)
logger.info(f"Successfully uploaded log: {file_name}")
except Exception as e:
logger.error(f"Error uploading log file {log_file}: {e}")
client.log(
time=timestamp(),
level="ERROR",
message=f"Failed to upload log file {os.path.basename(log_file)}: {str(e)}",
item_id=test_item_id
)
# Add summary log
client.log(
time=timestamp(),
level="INFO",
message=f"[INFO] Uploaded {len(log_files_to_upload)} Jan {app_type} log files (total available: {len(all_log_files)})",
item_id=test_item_id
)
except Exception as e:
logger.error(f"Error processing Jan logs: {e}")
client.log(
time=timestamp(),
level="ERROR",
message=f"Error processing Jan {app_type} logs: {str(e)}",
item_id=test_item_id
)
def upload_test_results_to_rp(client, launch_id, test_path, trajectory_dir, force_stopped=False, video_path=None, is_nightly=False):
"""
Upload test results to ReportPortal with proper status based on test result
"""
if not trajectory_dir or not os.path.exists(trajectory_dir):
logger.warning(f"Trajectory directory not found: {trajectory_dir}")
formatted_test_path = test_path.replace('\\', '/').replace('.txt', '').replace('/', '__')
test_item_id = client.start_test_item(
launch_id=launch_id,
name=formatted_test_path,
start_time=timestamp(),
item_type="TEST",
description=f"Test case from: {test_path}"
)
client.log(
time=timestamp(),
level="ERROR",
message="[FAILED] TEST FAILED [FAILED]\nNo trajectory directory found",
item_id=test_item_id
)
# Upload video if available
if video_path and os.path.exists(video_path):
try:
with open(video_path, "rb") as video_file:
client.log(
time=timestamp(),
level="INFO",
message="Screen recording of test execution",
item_id=test_item_id,
attachment={
"name": f"test_recording_{formatted_test_path}.mp4",
"data": video_file.read(),
"mime": "video/x-msvideo"
}
)
logger.info(f"Uploaded video for failed test: {video_path}")
except Exception as e:
logger.error(f"Error uploading video: {e}")
client.finish_test_item(
item_id=test_item_id,
end_time=timestamp(),
status="FAILED"
)
return
formatted_test_path = test_path.replace('\\', '/').replace('.txt', '').replace('/', '__')
# Determine final status
if force_stopped:
final_status = "FAILED"
status_message = "exceeded maximum turn limit (30 turns)"
else:
test_result = extract_test_result_from_trajectory(trajectory_dir)
if test_result is True:
final_status = "PASSED"
status_message = "completed successfully with positive result"
else:
final_status = "FAILED"
status_message = "no valid success result found"
# Create test item
test_item_id = client.start_test_item(
launch_id=launch_id,
name=formatted_test_path,
start_time=timestamp(),
item_type="TEST",
description=f"Test case from: {test_path}"
)
try:
turn_folders = [f for f in os.listdir(trajectory_dir)
if os.path.isdir(os.path.join(trajectory_dir, f)) and f.startswith("turn_")]
# Add clear status log
status_emoji = "[SUCCESS]" if final_status == "PASSED" else "[FAILED]"
client.log(
time=timestamp(),
level="INFO" if final_status == "PASSED" else "ERROR",
message=f"{status_emoji} TEST {final_status} {status_emoji}\nReason: {status_message}\nTotal turns: {len(turn_folders)}",
item_id=test_item_id
)
# Upload screen recording video first
if video_path and os.path.exists(video_path):
logger.info(f"Attempting to upload video: {video_path}")
logger.info(f"Video file size: {os.path.getsize(video_path)} bytes")
try:
with open(video_path, "rb") as video_file:
video_data = video_file.read()
logger.info(f"Read video data: {len(video_data)} bytes")
client.log(
time=timestamp(),
level="INFO",
message="[INFO] Screen recording of test execution",
item_id=test_item_id,
attachment={
"name": f"test_recording_{formatted_test_path}.mp4",
"data": video_data,
"mime": "video/x-msvideo"
}
)
logger.info(f"Successfully uploaded screen recording: {video_path}")
except Exception as e:
logger.error(f"Error uploading screen recording: {e}")
client.log(
time=timestamp(),
level="WARNING",
message=f"Failed to upload screen recording: {str(e)}",
item_id=test_item_id
)
else:
logger.warning(f"Video upload skipped - video_path: {video_path}, exists: {os.path.exists(video_path) if video_path else 'N/A'}")
client.log(
time=timestamp(),
level="WARNING",
message="No screen recording available for this test",
item_id=test_item_id
)
# Upload Jan application logs
logger.info("Uploading Jan application logs...")
upload_jan_logs(client, test_item_id, is_nightly=is_nightly, max_log_files=5)
# Upload all turn data with appropriate status
# If test failed, mark all turns as failed
force_fail_turns = (final_status == "FAILED")
for turn_folder in sorted(turn_folders):
turn_path = os.path.join(trajectory_dir, turn_folder)
upload_turn_folder(client, test_item_id, turn_path, turn_folder, force_fail=force_fail_turns)
# Finish with correct status
client.finish_test_item(
item_id=test_item_id,
end_time=timestamp(),
status=final_status
)
logger.info(f"Uploaded test results for {formatted_test_path}: {final_status}")
except Exception as e:
logger.error(f"Error uploading test results: {e}")
client.finish_test_item(
item_id=test_item_id,
end_time=timestamp(),
status="FAILED"
)

View File

@ -1,18 +0,0 @@
# Core dependencies
cua-computer[all]~=0.3.5
cua-agent[all]~=0.3.0
cua-agent @ git+https://github.com/janhq/cua.git@compute-agent-0.3.0-patch#subdirectory=libs/python/agent
# ReportPortal integration
reportportal-client~=5.6.5
# Screen recording and automation
opencv-python~=4.10.0
numpy~=2.2.6
PyAutoGUI~=0.9.54
# System utilities
psutil~=7.0.0
# Server component
cua-computer-server~=0.1.19

View File

@ -1,84 +0,0 @@
import cv2
import numpy as np
import pyautogui
import threading
import time
import logging
logger = logging.getLogger(__name__)
class ScreenRecorder:
def __init__(self, output_path, fps=10):
self.output_path = output_path
self.fps = fps
self.recording = False
self.writer = None
self.thread = None
def start_recording(self):
"""Start screen recording"""
if self.recording:
logger.warning("Recording already in progress")
return
self.recording = True
self.thread = threading.Thread(target=self._record_screen, daemon=True)
self.thread.start()
logger.info(f"Started screen recording: {self.output_path}")
def stop_recording(self):
"""Stop screen recording"""
if not self.recording:
logger.warning("No recording in progress")
return
self.recording = False
if self.thread:
self.thread.join(timeout=5)
if self.writer:
self.writer.release()
logger.info(f"Stopped screen recording: {self.output_path}")
def _record_screen(self):
"""Internal method to record screen"""
try:
# Get screen dimensions
screen_size = pyautogui.size()
# Try MP4 with H264 codec for better compatibility
fourcc = cv2.VideoWriter_fourcc(*'mp4v') # or 'H264'
output_path_mp4 = self.output_path
self.writer = cv2.VideoWriter(
output_path_mp4,
fourcc,
self.fps,
screen_size
)
while self.recording:
try:
# Capture screen
screenshot = pyautogui.screenshot()
# Convert PIL image to numpy array
frame = np.array(screenshot)
# Convert RGB to BGR (OpenCV uses BGR)
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
# Write frame
self.writer.write(frame)
# Control FPS
time.sleep(1.0 / self.fps)
except Exception as e:
logger.error(f"Error capturing frame: {e}")
break
except Exception as e:
logger.error(f"Error in screen recording: {e}")
finally:
if self.writer:
self.writer.release()

View File

@ -1,116 +0,0 @@
# AutoQA Scripts
This directory contains platform-specific scripts used by the AutoQA GitHub Actions workflow. These scripts help maintain a cleaner and more maintainable workflow file by extracting complex inline scripts into separate files.
## Directory Structure
```text
autoqa/scripts/
├── setup_permissions.sh # Setup executable permissions for all scripts
├── windows_cleanup.ps1 # Windows: Clean existing Jan installations
├── windows_download.ps1 # Windows: Download Jan app installer
├── windows_install.ps1 # Windows: Install Jan app
├── windows_post_cleanup.ps1 # Windows: Post-test cleanup
├── run_tests.ps1 # Windows: Run AutoQA tests
├── ubuntu_cleanup.sh # Ubuntu: Clean existing Jan installations
├── ubuntu_download.sh # Ubuntu: Download Jan app (.deb)
├── ubuntu_install.sh # Ubuntu: Install Jan app
├── ubuntu_post_cleanup.sh # Ubuntu: Post-test cleanup
├── macos_cleanup.sh # macOS: Clean existing Jan installations
├── macos_download.sh # macOS: Download Jan app (.dmg)
├── macos_install.sh # macOS: Install Jan app
├── macos_post_cleanup.sh # macOS: Post-test cleanup
├── run_tests.sh # Unix: Run AutoQA tests (Ubuntu/macOS)
├── README.md # This file
└── PERMISSIONS.md # Permission setup documentation
```
## Script Functions
### Windows Scripts (.ps1)
- **windows_cleanup.ps1**: Removes existing Jan installations and kills running processes
- **windows_download.ps1**: Downloads Jan installer with priority-based URL selection
- **windows_install.ps1**: Installs Jan app and sets environment variables
- **windows_post_cleanup.ps1**: Comprehensive cleanup after tests including uninstallation
- **run_tests.ps1**: Runs the AutoQA Python tests with proper arguments
### Ubuntu Scripts (.sh)
- **ubuntu_cleanup.sh**: Removes existing Jan installations and kills running processes
- **ubuntu_download.sh**: Downloads Jan .deb package with priority-based URL selection
- **ubuntu_install.sh**: Installs Jan .deb package and sets environment variables
- **ubuntu_post_cleanup.sh**: Comprehensive cleanup after tests including package removal
### macOS Scripts (.sh)
- **macos_cleanup.sh**: Removes existing Jan installations and kills running processes
- **macos_download.sh**: Downloads Jan .dmg package with priority-based URL selection
- **macos_install.sh**: Mounts DMG, extracts .app, and installs to Applications
- **macos_post_cleanup.sh**: Comprehensive cleanup after tests
### Common Scripts
- **setup_permissions.sh**: Automatically sets executable permissions for all shell scripts
- **run_tests.sh**: Platform-agnostic test runner for Unix-based systems (Ubuntu/macOS)
## Usage in GitHub Actions
These scripts are called from the `.github/workflows/autoqa.yml` workflow file:
```yaml
# Setup permissions first (Ubuntu/macOS)
- name: Setup script permissions
run: |
chmod +x autoqa/scripts/setup_permissions.sh
./autoqa/scripts/setup_permissions.sh
# Then use scripts without chmod
- name: Clean existing Jan installations
run: |
./autoqa/scripts/ubuntu_cleanup.sh
# Windows example (no chmod needed)
- name: Clean existing Jan installations
shell: powershell
run: |
.\autoqa\scripts\windows_cleanup.ps1
```
## Benefits
1. **Maintainability**: Complex scripts are in separate files, easier to read and modify
2. **Reusability**: Scripts can be reused across different workflows or locally
3. **Testing**: Scripts can be tested independently
4. **Version Control**: Better diff tracking for script changes
5. **Platform Consistency**: Similar functionality across platforms in separate files
## Development
When modifying these scripts:
1. Test them locally on the respective platforms
2. Ensure proper error handling and exit codes
3. Follow platform-specific best practices
4. Update this README if new scripts are added
## Script Parameters
### Windows Scripts
- Most scripts accept `-IsNightly` parameter to handle nightly vs stable builds
- Download script accepts multiple URL sources with priority ordering
### Unix Scripts
- Most scripts accept positional parameters for nightly flag and URLs
- Scripts use `$1`, `$2`, etc. for parameter access
## Environment Variables
Scripts set these environment variables for subsequent workflow steps:
- `JAN_APP_URL`: The selected Jan app download URL
- `IS_NIGHTLY`: Boolean flag indicating if it's a nightly build
- `JAN_APP_PATH`: Path to the installed Jan executable
- `JAN_PROCESS_NAME`: Name of the Jan process for monitoring

View File

@ -1,34 +0,0 @@
#!/bin/bash
# macOS cleanup script for Jan app
echo "Cleaning existing Jan installations..."
# Kill any running Jan processes (both regular and nightly)
pkill -f "Jan" || true
pkill -f "jan" || true
pkill -f "Jan-nightly" || true
pkill -f "jan-nightly" || true
# Remove Jan app directories
rm -rf /Applications/Jan.app
rm -rf /Applications/Jan-nightly.app
rm -rf ~/Applications/Jan.app
rm -rf ~/Applications/Jan-nightly.app
# Remove Jan data folders (both regular and nightly)
rm -rf ~/Library/Application\ Support/Jan
rm -rf ~/Library/Application\ Support/Jan-nightly
rm -rf ~/Library/Application\ Support/jan.ai.app
rm -rf ~/Library/Application\ Support/jan-nightly.ai.app
rm -rf ~/Library/Preferences/jan.*
rm -rf ~/Library/Preferences/jan-nightly.*
rm -rf ~/Library/Caches/jan.*
rm -rf ~/Library/Caches/jan-nightly.*
rm -rf ~/Library/Caches/jan.ai.app
rm -rf ~/Library/Caches/jan-nightly.ai.app
rm -rf ~/Library/WebKit/jan.ai.app
rm -rf ~/Library/WebKit/jan-nightly.ai.app
rm -rf ~/Library/Saved\ Application\ State/jan.ai.app
rm -rf ~/Library/Saved\ Application\ State/jan-nightly.ai.app
echo "Jan cleanup completed"

View File

@ -1,49 +0,0 @@
#!/bin/bash
# macOS download script for Jan app
WORKFLOW_INPUT_URL="$1"
WORKFLOW_INPUT_IS_NIGHTLY="$2"
REPO_VARIABLE_URL="$3"
REPO_VARIABLE_IS_NIGHTLY="$4"
DEFAULT_URL="$5"
DEFAULT_IS_NIGHTLY="$6"
# Determine Jan app URL and nightly flag from multiple sources (priority order):
# 1. Workflow dispatch input (manual trigger)
# 2. Repository variable JAN_APP_URL
# 3. Default URL from env
JAN_APP_URL=""
IS_NIGHTLY="false"
if [ -n "$WORKFLOW_INPUT_URL" ]; then
JAN_APP_URL="$WORKFLOW_INPUT_URL"
IS_NIGHTLY="$WORKFLOW_INPUT_IS_NIGHTLY"
echo "Using Jan app URL from workflow input: $JAN_APP_URL"
echo "Is nightly build: $IS_NIGHTLY"
elif [ -n "$REPO_VARIABLE_URL" ]; then
JAN_APP_URL="$REPO_VARIABLE_URL"
IS_NIGHTLY="$REPO_VARIABLE_IS_NIGHTLY"
echo "Using Jan app URL from repository variable: $JAN_APP_URL"
echo "Is nightly build: $IS_NIGHTLY"
else
JAN_APP_URL="$DEFAULT_URL"
IS_NIGHTLY="$DEFAULT_IS_NIGHTLY"
echo "Using default Jan app URL: $JAN_APP_URL"
echo "Is nightly build: $IS_NIGHTLY"
fi
# Export for later steps
echo "JAN_APP_URL=$JAN_APP_URL" >> $GITHUB_ENV
echo "IS_NIGHTLY=$IS_NIGHTLY" >> $GITHUB_ENV
echo "Downloading Jan app from: $JAN_APP_URL"
curl -L -o "/tmp/jan-installer.dmg" "$JAN_APP_URL"
if [ ! -f "/tmp/jan-installer.dmg" ]; then
echo "[FAILED] Failed to download Jan app"
exit 1
fi
echo "[SUCCESS] Successfully downloaded Jan app"
ls -la "/tmp/jan-installer.dmg"

View File

@ -1,91 +0,0 @@
#!/bin/bash
# macOS install script for Jan app
echo "Installing Jan app from DMG..."
# Mount the DMG
hdiutil attach "/tmp/jan-installer.dmg" -mountpoint "/tmp/jan-mount"
# Find the .app file in the mounted DMG
APP_FILE=$(find "/tmp/jan-mount" -name "*.app" -type d | head -1)
if [ -z "$APP_FILE" ]; then
echo "[Failed] No .app file found in DMG"
hdiutil detach "/tmp/jan-mount" || true
exit 1
fi
echo "Found app file: $APP_FILE"
# Copy to Applications directory
cp -R "$APP_FILE" /Applications/
# Unmount the DMG
hdiutil detach "/tmp/jan-mount"
# Determine app name and executable path
APP_NAME=$(basename "$APP_FILE")
echo "App name: $APP_NAME"
# First, check what's actually in the MacOS folder
echo "Contents of MacOS folder:"
ls -la "/Applications/$APP_NAME/Contents/MacOS/"
# Find all executable files in MacOS folder
echo "Looking for executable files..."
find "/Applications/$APP_NAME/Contents/MacOS/" -type f -perm +111 -ls
# Try to find the main executable - it's usually the one with the same name as the app (without .app)
APP_BASE_NAME=$(basename "$APP_NAME" .app)
POTENTIAL_EXECUTABLES=(
"/Applications/$APP_NAME/Contents/MacOS/$APP_BASE_NAME"
"/Applications/$APP_NAME/Contents/MacOS/Jan"
"/Applications/$APP_NAME/Contents/MacOS/Jan-nightly"
)
APP_PATH=""
for potential_exec in "${POTENTIAL_EXECUTABLES[@]}"; do
echo "Checking: $potential_exec"
if [ -f "$potential_exec" ] && [ -x "$potential_exec" ]; then
APP_PATH="$potential_exec"
echo "Found executable: $APP_PATH"
break
fi
done
# If still not found, get any executable file
if [ -z "$APP_PATH" ]; then
echo "No predefined executable found, searching for any executable..."
APP_PATH=$(find "/Applications/$APP_NAME/Contents/MacOS/" -type f -perm +111 | head -1)
fi
if [ -z "$APP_PATH" ]; then
echo "[FAILED] No executable found in MacOS folder"
ls -la "/Applications/$APP_NAME/Contents/MacOS/"
exit 1
fi
PROCESS_NAME=$(basename "$APP_PATH")
echo "App installed at: /Applications/$APP_NAME"
echo "Executable path: $APP_PATH"
echo "Process name: $PROCESS_NAME"
# Export for test step
echo "JAN_APP_PATH=$APP_PATH" >> $GITHUB_ENV
echo "PROCESS_NAME=$PROCESS_NAME" >> $GITHUB_ENV
echo "[INFO] Waiting for Jan app first initialization (120 seconds)..."
echo "This allows Jan to complete its initial setup and configuration"
sleep 120
echo "[SUCCESS] Initialization wait completed"
# Verify installation
if [ -f "$APP_PATH" ]; then
echo "[SUCCESS] Jan app installed successfully"
ls -la "/Applications/$APP_NAME"
else
echo "[FAILED] Jan app installation failed - executable not found"
exit 1
fi

View File

@ -1,38 +0,0 @@
#!/bin/bash
# macOS post-test cleanup script
echo "Cleaning up after tests..."
# Kill any running Jan processes (both regular and nightly)
pkill -f "Jan" || true
pkill -f "jan" || true
pkill -f "Jan-nightly" || true
pkill -f "jan-nightly" || true
# Remove Jan app directories
rm -rf /Applications/Jan.app
rm -rf /Applications/Jan-nightly.app
rm -rf ~/Applications/Jan.app
rm -rf ~/Applications/Jan-nightly.app
# Remove Jan data folders (both regular and nightly)
rm -rf ~/Library/Application\ Support/Jan
rm -rf ~/Library/Application\ Support/Jan-nightly
rm -rf ~/Library/Application\ Support/jan.ai.app
rm -rf ~/Library/Application\ Support/jan-nightly.ai.app
rm -rf ~/Library/Preferences/jan.*
rm -rf ~/Library/Preferences/jan-nightly.*
rm -rf ~/Library/Caches/jan.*
rm -rf ~/Library/Caches/jan-nightly.*
rm -rf ~/Library/Caches/jan.ai.app
rm -rf ~/Library/Caches/jan-nightly.ai.app
rm -rf ~/Library/WebKit/jan.ai.app
rm -rf ~/Library/WebKit/jan-nightly.ai.app
rm -rf ~/Library/Saved\ Application\ State/jan.ai.app
rm -rf ~/Library/Saved\ Application\ State/jan-nightly.ai.app
# Clean up downloaded installer
rm -f "/tmp/jan-installer.dmg"
rm -rf "/tmp/jan-mount"
echo "Cleanup completed"

View File

@ -1,31 +0,0 @@
#!/usr/bin/env pwsh
# Windows test runner script
param(
[string]$JanAppPath,
[string]$ProcessName,
[string]$RpToken
)
Write-Host "Starting Auto QA Tests..."
Write-Host "Jan app path: $JanAppPath"
Write-Host "Process name: $ProcessName"
Write-Host "Current working directory: $(Get-Location)"
Write-Host "Contents of current directory:"
Get-ChildItem
Write-Host "Contents of trajectories directory (if exists):"
if (Test-Path "trajectories") {
Get-ChildItem "trajectories"
} else {
Write-Host "trajectories directory not found"
}
# Run the main test with proper arguments
if ($JanAppPath -and $ProcessName) {
python main.py --enable-reportportal --rp-token "$RpToken" --jan-app-path "$JanAppPath" --jan-process-name "$ProcessName"
} elseif ($JanAppPath) {
python main.py --enable-reportportal --rp-token "$RpToken" --jan-app-path "$JanAppPath"
} else {
python main.py --enable-reportportal --rp-token "$RpToken"
}

View File

@ -1,69 +0,0 @@
#!/bin/bash
# Common test runner script
JAN_APP_PATH="$1"
PROCESS_NAME="$2"
RP_TOKEN="$3"
PLATFORM="$4"
echo "Starting Auto QA Tests..."
echo "Platform: $PLATFORM"
echo "Jan app path: $JAN_APP_PATH"
echo "Process name: $PROCESS_NAME"
# Platform-specific setup
if [ "$PLATFORM" = "ubuntu" ]; then
# Get the current display session
export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
echo "Display ID: $DISPLAY"
# Verify display is working
if [ -z "$DISPLAY" ]; then
echo "No display session found, falling back to :0"
export DISPLAY=:0
fi
echo "Using display: $DISPLAY"
# Test display connection
xdpyinfo -display $DISPLAY >/dev/null 2>&1 || {
echo "Display $DISPLAY is not available"
exit 1
}
# Make Jan executable if needed
if [ -f "/usr/bin/Jan-nightly" ]; then
sudo chmod +x /usr/bin/Jan-nightly
fi
if [ -f "/usr/bin/Jan" ]; then
sudo chmod +x /usr/bin/Jan
fi
fi
# macOS specific setup
if [ "$PLATFORM" = "macos" ]; then
# Verify Jan app path
if [ ! -f "$JAN_APP_PATH" ]; then
echo "❌ Jan app not found at: $JAN_APP_PATH"
echo "Available files in /Applications:"
ls -la /Applications/ | grep -i jan || echo "No Jan apps found"
exit 1
fi
fi
# Change to autoqa directory to ensure correct working directory
cd "$(dirname "$0")/.."
echo "Current working directory: $(pwd)"
echo "Contents of current directory:"
ls -la
echo "Contents of trajectories directory (if exists):"
ls -la trajectories/ 2>/dev/null || echo "trajectories directory not found"
# Run the main test with proper arguments
if [ -n "$JAN_APP_PATH" ] && [ -n "$PROCESS_NAME" ]; then
python main.py --enable-reportportal --rp-token "$RP_TOKEN" --jan-app-path "$JAN_APP_PATH" --jan-process-name "$PROCESS_NAME"
elif [ -n "$JAN_APP_PATH" ]; then
python main.py --enable-reportportal --rp-token "$RP_TOKEN" --jan-app-path "$JAN_APP_PATH"
else
python main.py --enable-reportportal --rp-token "$RP_TOKEN"
fi

View File

@ -1,80 +0,0 @@
#!/bin/bash
# Android Development Environment Setup for Jan
# Ensure rustup's Rust toolchain is used instead of Homebrew's
export PATH="$HOME/.cargo/bin:$PATH"
# Set JAVA_HOME for Android builds
export JAVA_HOME=/opt/homebrew/opt/openjdk@17/libexec/openjdk.jdk/Contents/Home
export PATH="/opt/homebrew/opt/openjdk@17/bin:$PATH"
export ANDROID_HOME="$HOME/Library/Android/sdk"
export ANDROID_NDK_ROOT="$HOME/Library/Android/sdk/ndk/29.0.14033849"
export NDK_HOME="$HOME/Library/Android/sdk/ndk/29.0.14033849"
# Add Android tools to PATH
export PATH=$PATH:$ANDROID_HOME/platform-tools:$ANDROID_HOME/tools:$ANDROID_HOME/cmdline-tools/latest/bin:$ANDROID_HOME/emulator:$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin
# Set up CC and CXX for Android compilation
export CC_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
export CXX_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang++"
export AR_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar"
export RANLIB_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ranlib"
# Additional environment variables for Rust cross-compilation
export CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
# Only set global CC and AR for Android builds (when IS_ANDROID is set)
if [ "$IS_ANDROID" = "true" ]; then
export CC="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
export AR="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar"
echo "Global CC and AR set for Android build"
fi
# Create symlinks for Android tools if they don't exist
mkdir -p ~/.local/bin
if [ ! -f ~/.local/bin/aarch64-linux-android-ranlib ]; then
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ranlib ~/.local/bin/aarch64-linux-android-ranlib
fi
if [ ! -f ~/.local/bin/aarch64-linux-android-clang ]; then
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang ~/.local/bin/aarch64-linux-android-clang
fi
if [ ! -f ~/.local/bin/aarch64-linux-android-clang++ ]; then
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang++ ~/.local/bin/aarch64-linux-android-clang++
fi
# Fix the broken clang symlinks by ensuring base clang is available
if [ ! -f ~/.local/bin/clang ]; then
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/clang ~/.local/bin/clang
fi
if [ ! -f ~/.local/bin/clang++ ]; then
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/clang++ ~/.local/bin/clang++
fi
# Create symlinks for target-specific ar tools
if [ ! -f ~/.local/bin/aarch64-linux-android-ar ]; then
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar ~/.local/bin/aarch64-linux-android-ar
fi
export PATH="$HOME/.local/bin:$PATH"
echo "Android environment configured:"
echo "ANDROID_HOME: $ANDROID_HOME"
echo "ANDROID_NDK_ROOT: $ANDROID_NDK_ROOT"
echo "PATH includes NDK toolchain: $(echo $PATH | grep -o "ndk.*bin" || echo "NOT FOUND")"
# Verify required tools
echo -e "\nChecking required tools:"
which adb && echo "✅ adb found" || echo "❌ adb not found"
which emulator && echo "✅ emulator found" || echo "❌ emulator not found"
which $CC_aarch64_linux_android && echo "✅ Android clang found" || echo "❌ Android clang not found"
# Show available AVDs
echo -e "\nAvailable Android Virtual Devices:"
emulator -list-avds 2>/dev/null || echo "No AVDs found"
# Execute the provided command
if [ "$1" ]; then
echo -e "\nExecuting: $@"
exec "$@"
fi

View File

@ -1,15 +0,0 @@
#!/bin/bash
# Setup script permissions for AutoQA scripts
echo "Setting up permissions for AutoQA scripts..."
# Get the directory where this script is located
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Make all shell scripts executable
chmod +x "$SCRIPT_DIR"/*.sh
echo "[SUCCESS] All shell scripts are now executable:"
ls -la "$SCRIPT_DIR"/*.sh
echo "[SUCCESS] Permission setup completed"

View File

@ -1,22 +0,0 @@
#!/bin/bash
# Ubuntu cleanup script for Jan app
echo "Cleaning existing Jan installations..."
# Remove Jan data folders (both regular and nightly)
rm -rf ~/.config/Jan
rm -rf ~/.config/Jan-nightly
rm -rf ~/.local/share/Jan
rm -rf ~/.local/share/Jan-nightly
rm -rf ~/.cache/jan
rm -rf ~/.cache/jan-nightly
rm -rf ~/.local/share/jan-nightly.ai.app
rm -rf ~/.local/share/jan.ai.app
# Kill any running Jan processes (both regular and nightly)
pkill -f "Jan" || true
pkill -f "jan" || true
pkill -f "Jan-nightly" || true
pkill -f "jan-nightly" || true
echo "Jan cleanup completed"

View File

@ -1,57 +0,0 @@
#!/bin/bash
# Ubuntu download script for Jan app
WORKFLOW_INPUT_URL="$1"
WORKFLOW_INPUT_IS_NIGHTLY="$2"
REPO_VARIABLE_URL="$3"
REPO_VARIABLE_IS_NIGHTLY="$4"
DEFAULT_URL="$5"
DEFAULT_IS_NIGHTLY="$6"
# Determine Jan app URL and nightly flag from multiple sources (priority order):
# 1. Workflow dispatch input (manual trigger)
# 2. Repository variable JAN_APP_URL_LINUX
# 3. Default URL from env
JAN_APP_URL=""
IS_NIGHTLY=false
if [ -n "$WORKFLOW_INPUT_URL" ]; then
JAN_APP_URL="$WORKFLOW_INPUT_URL"
IS_NIGHTLY="$WORKFLOW_INPUT_IS_NIGHTLY"
echo "Using Jan app URL from workflow input: $JAN_APP_URL"
echo "Is nightly build: $IS_NIGHTLY"
elif [ -n "$REPO_VARIABLE_URL" ]; then
JAN_APP_URL="$REPO_VARIABLE_URL"
IS_NIGHTLY="$REPO_VARIABLE_IS_NIGHTLY"
echo "Using Jan app URL from repository variable: $JAN_APP_URL"
echo "Is nightly build: $IS_NIGHTLY"
else
JAN_APP_URL="$DEFAULT_URL"
IS_NIGHTLY="$DEFAULT_IS_NIGHTLY"
echo "Using default Jan app URL: $JAN_APP_URL"
echo "Is nightly build: $IS_NIGHTLY"
fi
# Set environment variables for later steps
echo "JAN_APP_URL=$JAN_APP_URL" >> $GITHUB_ENV
echo "IS_NIGHTLY=$IS_NIGHTLY" >> $GITHUB_ENV
echo "Downloading Jan app from: $JAN_APP_URL"
DOWNLOAD_PATH="/tmp/jan-installer.deb"
# Download the package
if ! wget "$JAN_APP_URL" -O "$DOWNLOAD_PATH"; then
echo "Failed to download Jan app"
exit 1
fi
if [ -f "$DOWNLOAD_PATH" ]; then
FILE_SIZE=$(stat -c%s "$DOWNLOAD_PATH")
echo "Downloaded Jan app successfully. Size: $FILE_SIZE bytes"
echo "File saved to: $DOWNLOAD_PATH"
else
echo "Downloaded file not found"
exit 1
fi

View File

@ -1,39 +0,0 @@
#!/bin/bash
# Ubuntu install script for Jan app
IS_NIGHTLY="$1"
INSTALLER_PATH="/tmp/jan-installer.deb"
echo "Installing Jan app..."
echo "Is nightly build: $IS_NIGHTLY"
# Install the .deb package
sudo apt install "$INSTALLER_PATH" -y
sudo apt-get install -f -y
# Wait for installation to complete
sleep 10
echo "[INFO] Waiting for Jan app first initialization (120 seconds)..."
echo "This allows Jan to complete its initial setup and configuration"
sleep 120
echo "[SUCCESS] Initialization wait completed"
# Verify installation based on nightly flag
if [ "$IS_NIGHTLY" = "true" ]; then
DEFAULT_JAN_PATH="/usr/bin/Jan-nightly"
PROCESS_NAME="Jan-nightly"
else
DEFAULT_JAN_PATH="/usr/bin/Jan"
PROCESS_NAME="Jan"
fi
if [ -f "$DEFAULT_JAN_PATH" ]; then
echo "Jan app installed successfully at: $DEFAULT_JAN_PATH"
echo "JAN_APP_PATH=$DEFAULT_JAN_PATH" >> $GITHUB_ENV
echo "JAN_PROCESS_NAME=$PROCESS_NAME" >> $GITHUB_ENV
else
echo "Jan app not found at expected location: $DEFAULT_JAN_PATH"
echo "Will auto-detect during test run"
fi

View File

@ -1,44 +0,0 @@
#!/bin/bash
# Ubuntu post-test cleanup script
IS_NIGHTLY="$1"
echo "Cleaning up after tests..."
# Kill any running Jan processes (both regular and nightly)
pkill -f "Jan" || true
pkill -f "jan" || true
pkill -f "Jan-nightly" || true
pkill -f "jan-nightly" || true
# Remove Jan data folders (both regular and nightly)
rm -rf ~/.config/Jan
rm -rf ~/.config/Jan-nightly
rm -rf ~/.local/share/Jan
rm -rf ~/.local/share/Jan-nightly
rm -rf ~/.cache/jan
rm -rf ~/.cache/jan-nightly
rm -rf ~/.local/share/jan-nightly.ai.app
rm -rf ~/.local/share/jan.ai.app
# Try to uninstall Jan app
if [ "$IS_NIGHTLY" = "true" ]; then
PACKAGE_NAME="jan-nightly"
else
PACKAGE_NAME="jan"
fi
echo "Attempting to uninstall package: $PACKAGE_NAME"
if dpkg -l | grep -q "$PACKAGE_NAME"; then
echo "Found package $PACKAGE_NAME, uninstalling..."
sudo dpkg -r "$PACKAGE_NAME" || true
sudo apt-get autoremove -y || true
else
echo "Package $PACKAGE_NAME not found in dpkg list"
fi
# Clean up downloaded installer
rm -f "/tmp/jan-installer.deb"
echo "Cleanup completed"

View File

@ -1,50 +0,0 @@
#!/usr/bin/env pwsh
# Windows cleanup script for Jan app
param(
[string]$IsNightly = "false"
)
Write-Host "Cleaning existing Jan installations..."
# Remove Jan data folders (both regular and nightly)
$janAppData = "$env:APPDATA\Jan"
$janNightlyAppData = "$env:APPDATA\Jan-nightly"
$janLocalAppData = "$env:LOCALAPPDATA\jan.ai.app"
$janNightlyLocalAppData = "$env:LOCALAPPDATA\jan-nightly.ai.app"
if (Test-Path $janAppData) {
Write-Host "Removing $janAppData"
Remove-Item -Path $janAppData -Recurse -Force -ErrorAction SilentlyContinue
}
if (Test-Path $janNightlyAppData) {
Write-Host "Removing $janNightlyAppData"
Remove-Item -Path $janNightlyAppData -Recurse -Force -ErrorAction SilentlyContinue
}
if (Test-Path $janLocalAppData) {
Write-Host "Removing $janLocalAppData"
Remove-Item -Path $janLocalAppData -Recurse -Force -ErrorAction SilentlyContinue
}
if (Test-Path $janNightlyLocalAppData) {
Write-Host "Removing $janNightlyLocalAppData"
Remove-Item -Path $janNightlyLocalAppData -Recurse -Force -ErrorAction SilentlyContinue
}
# Kill any running Jan processes (both regular and nightly)
Get-Process -Name "Jan" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
Get-Process -Name "jan" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
Get-Process -Name "Jan-nightly" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
Get-Process -Name "jan-nightly" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
# Remove Jan extensions folder
$janExtensionsPath = "$env:USERPROFILE\jan\extensions"
if (Test-Path $janExtensionsPath) {
Write-Host "Removing $janExtensionsPath"
Remove-Item -Path $janExtensionsPath -Recurse -Force -ErrorAction SilentlyContinue
}
Write-Host "Jan cleanup completed"

View File

@ -1,63 +0,0 @@
#!/usr/bin/env pwsh
# Windows download script for Jan app
param(
[string]$WorkflowInputUrl = "",
[string]$WorkflowInputIsNightly = "",
[string]$RepoVariableUrl = "",
[string]$RepoVariableIsNightly = "",
[string]$DefaultUrl = "",
[string]$DefaultIsNightly = ""
)
# Determine Jan app URL and nightly flag from multiple sources (priority order):
# 1. Workflow dispatch input (manual trigger)
# 2. Repository variable JAN_APP_URL
# 3. Default URL from env
$janAppUrl = ""
$isNightly = $false
if ($WorkflowInputUrl -ne "") {
$janAppUrl = $WorkflowInputUrl
$isNightly = [System.Convert]::ToBoolean($WorkflowInputIsNightly)
Write-Host "Using Jan app URL from workflow input: $janAppUrl"
Write-Host "Is nightly build: $isNightly"
}
elseif ($RepoVariableUrl -ne "") {
$janAppUrl = $RepoVariableUrl
$isNightly = [System.Convert]::ToBoolean($RepoVariableIsNightly)
Write-Host "Using Jan app URL from repository variable: $janAppUrl"
Write-Host "Is nightly build: $isNightly"
}
else {
$janAppUrl = $DefaultUrl
$isNightly = [System.Convert]::ToBoolean($DefaultIsNightly)
Write-Host "Using default Jan app URL: $janAppUrl"
Write-Host "Is nightly build: $isNightly"
}
# Set environment variables for later steps
Write-Output "JAN_APP_URL=$janAppUrl" >> $env:GITHUB_ENV
Write-Output "IS_NIGHTLY=$isNightly" >> $env:GITHUB_ENV
Write-Host "Downloading Jan app from: $janAppUrl"
$downloadPath = "$env:TEMP\jan-installer.exe"
try {
# Use wget for better performance
wget.exe "$janAppUrl" -O "$downloadPath"
if (Test-Path $downloadPath) {
$fileSize = (Get-Item $downloadPath).Length
Write-Host "Downloaded Jan app successfully. Size: $fileSize bytes"
Write-Host "File saved to: $downloadPath"
} else {
throw "Downloaded file not found"
}
}
catch {
Write-Error "Failed to download Jan app: $_"
exit 1
}

View File

@ -1,48 +0,0 @@
#!/usr/bin/env pwsh
# Windows install script for Jan app
param(
[string]$IsNightly = "false"
)
$installerPath = "$env:TEMP\jan-installer.exe"
$isNightly = [System.Convert]::ToBoolean($IsNightly)
Write-Host "Installing Jan app..."
Write-Host "Is nightly build: $isNightly"
# Try silent installation first
try {
Start-Process -FilePath $installerPath -ArgumentList "/S" -Wait -NoNewWindow
Write-Host "Jan app installed silently"
}
catch {
Write-Host "Silent installation failed, trying normal installation..."
Start-Process -FilePath $installerPath -Wait -NoNewWindow
}
# Wait a bit for installation to complete
Start-Sleep -Seconds 10
Write-Host "[INFO] Waiting for Jan app first initialization (120 seconds)..."
Write-Host "This allows Jan to complete its initial setup and configuration"
Start-Sleep -Seconds 120
Write-Host "[SUCCESS] Initialization wait completed"
# Verify installation based on nightly flag
if ($isNightly) {
$defaultJanPath = "$env:LOCALAPPDATA\Programs\jan-nightly\Jan-nightly.exe"
$processName = "Jan-nightly.exe"
} else {
$defaultJanPath = "$env:LOCALAPPDATA\Programs\jan\Jan.exe"
$processName = "Jan.exe"
}
if (Test-Path $defaultJanPath) {
Write-Host "Jan app installed successfully at: $defaultJanPath"
Write-Output "JAN_APP_PATH=$defaultJanPath" >> $env:GITHUB_ENV
Write-Output "JAN_PROCESS_NAME=$processName" >> $env:GITHUB_ENV
} else {
Write-Warning "Jan app not found at expected location: $defaultJanPath"
Write-Host "Will auto-detect during test run"
}

View File

@ -1,102 +0,0 @@
#!/usr/bin/env pwsh
# Windows post-test cleanup script
param(
[string]$IsNightly = "false"
)
Write-Host "Cleaning up after tests..."
# Kill any running Jan processes (both regular and nightly)
Get-Process -Name "Jan" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
Get-Process -Name "jan" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
Get-Process -Name "Jan-nightly" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
Get-Process -Name "jan-nightly" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
# Remove Jan data folders (both regular and nightly)
$janAppData = "$env:APPDATA\Jan"
$janNightlyAppData = "$env:APPDATA\Jan-nightly"
$janLocalAppData = "$env:LOCALAPPDATA\jan.ai.app"
$janNightlyLocalAppData = "$env:LOCALAPPDATA\jan-nightly.ai.app"
$janProgramsPath = "$env:LOCALAPPDATA\Programs\Jan"
$janNightlyProgramsPath = "$env:LOCALAPPDATA\Programs\Jan-nightly"
if (Test-Path $janAppData) {
Write-Host "Removing $janAppData"
Remove-Item -Path $janAppData -Recurse -Force -ErrorAction SilentlyContinue
}
if (Test-Path $janNightlyAppData) {
Write-Host "Removing $janNightlyAppData"
Remove-Item -Path $janNightlyAppData -Recurse -Force -ErrorAction SilentlyContinue
}
if (Test-Path $janLocalAppData) {
Write-Host "Removing $janLocalAppData"
Remove-Item -Path $janLocalAppData -Recurse -Force -ErrorAction SilentlyContinue
}
if (Test-Path $janNightlyLocalAppData) {
Write-Host "Removing $janNightlyLocalAppData"
Remove-Item -Path $janNightlyLocalAppData -Recurse -Force -ErrorAction SilentlyContinue
}
if (Test-Path $janProgramsPath) {
Write-Host "Removing $janProgramsPath"
Remove-Item -Path $janProgramsPath -Recurse -Force -ErrorAction SilentlyContinue
}
if (Test-Path $janNightlyProgramsPath) {
Write-Host "Removing $janNightlyProgramsPath"
Remove-Item -Path $janNightlyProgramsPath -Recurse -Force -ErrorAction SilentlyContinue
}
# Remove Jan extensions folder
$janExtensionsPath = "$env:USERPROFILE\jan\extensions"
if (Test-Path $janExtensionsPath) {
Write-Host "Removing $janExtensionsPath"
Remove-Item -Path $janExtensionsPath -Recurse -Force -ErrorAction SilentlyContinue
}
# Try to uninstall Jan app silently
try {
$isNightly = [System.Convert]::ToBoolean($IsNightly)
# Determine uninstaller path based on nightly flag
if ($isNightly) {
$uninstallerPath = "$env:LOCALAPPDATA\Programs\jan-nightly\uninstall.exe"
$installPath = "$env:LOCALAPPDATA\Programs\jan-nightly"
} else {
$uninstallerPath = "$env:LOCALAPPDATA\Programs\jan\uninstall.exe"
$installPath = "$env:LOCALAPPDATA\Programs\jan"
}
Write-Host "Looking for uninstaller at: $uninstallerPath"
if (Test-Path $uninstallerPath) {
Write-Host "Found uninstaller, attempting silent uninstall..."
Start-Process -FilePath $uninstallerPath -ArgumentList "/S" -Wait -NoNewWindow -ErrorAction SilentlyContinue
Write-Host "Uninstall completed"
} else {
Write-Host "No uninstaller found, attempting manual cleanup..."
if (Test-Path $installPath) {
Write-Host "Removing installation directory: $installPath"
Remove-Item -Path $installPath -Recurse -Force -ErrorAction SilentlyContinue
}
}
Write-Host "Jan app cleanup completed"
}
catch {
Write-Warning "Failed to uninstall Jan app cleanly: $_"
Write-Host "Manual cleanup may be required"
}
# Clean up downloaded installer
$installerPath = "$env:TEMP\jan-installer.exe"
if (Test-Path $installerPath) {
Remove-Item -Path $installerPath -Force -ErrorAction SilentlyContinue
}
Write-Host "Cleanup completed"

View File

@ -1,322 +0,0 @@
import os
import asyncio
import threading
import time
import logging
from datetime import datetime
from pathlib import Path
# from computer import Computer
from agent import ComputerAgent, LLM
from utils import is_jan_running, force_close_jan, start_jan_app, get_latest_trajectory_folder
from screen_recorder import ScreenRecorder
from reportportal_handler import upload_test_results_to_rp
from reportportal_client.helpers import timestamp
logger = logging.getLogger(__name__)
async def run_single_test_with_timeout(computer, test_data, rp_client, launch_id, max_turns=30,
jan_app_path=None, jan_process_name="Jan.exe", agent_config=None,
enable_reportportal=False):
"""
Run a single test case with turn count monitoring, forced stop, and screen recording
Returns dict with test result: {"success": bool, "status": str, "message": str}
"""
path = test_data['path']
prompt = test_data['prompt']
# Detect if using nightly version based on process name
is_nightly = "nightly" in jan_process_name.lower() if jan_process_name else False
# Default agent config if not provided
if agent_config is None:
agent_config = {
"loop": "uitars",
"model_provider": "oaicompat",
"model_name": "ByteDance-Seed/UI-TARS-1.5-7B",
"model_base_url": "http://10.200.108.58:1234/v1"
}
# Create trajectory_dir from path (remove .txt extension)
trajectory_name = str(Path(path).with_suffix(''))
trajectory_base_dir = os.path.abspath(f"trajectories/{trajectory_name.replace(os.sep, '/')}")
# Ensure trajectories directory exists
os.makedirs(os.path.dirname(trajectory_base_dir), exist_ok=True)
# Create recordings directory
recordings_dir = "recordings"
os.makedirs(recordings_dir, exist_ok=True)
# Create video filename
current_time = datetime.now().strftime("%Y%m%d_%H%M%S")
safe_test_name = trajectory_name.replace('/', '_').replace('\\', '_')
video_filename = f"{safe_test_name}_{current_time}.mp4"
video_path = os.path.abspath(os.path.join(recordings_dir, video_filename))
# Initialize result tracking
test_result_data = {
"success": False,
"status": "UNKNOWN",
"message": "Test execution incomplete",
"trajectory_dir": None,
"video_path": video_path
}
logger.info(f"Starting test: {path}")
logger.info(f"Current working directory: {os.getcwd()}")
logger.info(f"Trajectory base directory: {trajectory_base_dir}")
logger.info(f"Screen recording will be saved to: {video_path}")
logger.info(f"Using model: {agent_config['model_name']} from {agent_config['model_base_url']}")
logger.info(f"ReportPortal upload: {'ENABLED' if enable_reportportal else 'DISABLED'}")
trajectory_dir = None
agent_task = None
monitor_stop_event = threading.Event()
force_stopped_due_to_turns = False # Track if test was force stopped
# Initialize screen recorder
recorder = ScreenRecorder(video_path, fps=10)
try:
# Step 1: Check and force close Jan app if running
if is_jan_running(jan_process_name):
logger.info("Jan application is running, force closing...")
force_close_jan(jan_process_name)
# Step 2: Start Jan app in maximized mode
if jan_app_path:
start_jan_app(jan_app_path)
else:
start_jan_app() # Use default path
# Step 3: Start screen recording
recorder.start_recording()
# Step 4: Create agent for this test using config
agent = ComputerAgent(
computer=computer,
loop=agent_config["loop"],
model=LLM(
provider=agent_config["model_provider"],
name=agent_config["model_name"],
provider_base_url=agent_config["model_base_url"]
),
trajectory_dir=trajectory_base_dir
)
# Step 5: Start monitoring thread
def monitor_thread():
nonlocal force_stopped_due_to_turns
while not monitor_stop_event.is_set():
try:
if os.path.exists(trajectory_base_dir):
folders = [f for f in os.listdir(trajectory_base_dir)
if os.path.isdir(os.path.join(trajectory_base_dir, f))]
if folders:
latest_folder = sorted(folders)[-1]
latest_folder_path = os.path.join(trajectory_base_dir, latest_folder)
if os.path.exists(latest_folder_path):
turn_folders = [f for f in os.listdir(latest_folder_path)
if os.path.isdir(os.path.join(latest_folder_path, f)) and f.startswith("turn_")]
turn_count = len(turn_folders)
logger.info(f"Current turn count: {turn_count}")
if turn_count >= max_turns:
logger.warning(f"Turn count exceeded {max_turns} for test {path}, forcing stop")
force_stopped_due_to_turns = True # Mark as force stopped
# Cancel the agent task
if agent_task and not agent_task.done():
agent_task.cancel()
monitor_stop_event.set()
return
# Check every 5 seconds
if not monitor_stop_event.wait(5):
continue
else:
break
except Exception as e:
logger.error(f"Error in monitor thread: {e}")
time.sleep(5)
# Start monitoring in background thread
monitor_thread_obj = threading.Thread(target=monitor_thread, daemon=True)
monitor_thread_obj.start()
# Step 6: Run the test with prompt
logger.info(f"Running test case: {path}")
try:
# Create the agent task
async def run_agent():
async for result in agent.run(prompt):
if monitor_stop_event.is_set():
logger.warning(f"Test {path} stopped due to turn limit")
break
logger.info(f"Test result for {path}: {result}")
print(result)
agent_task = asyncio.create_task(run_agent())
# Wait for agent task to complete or timeout
try:
await asyncio.wait_for(agent_task, timeout=600) # 10 minute timeout as backup
if not monitor_stop_event.is_set():
logger.info(f"Successfully completed test execution: {path}")
else:
logger.warning(f"Test {path} was stopped due to turn limit")
except asyncio.TimeoutError:
logger.warning(f"Test {path} timed out after 10 minutes")
agent_task.cancel()
except asyncio.CancelledError:
logger.warning(f"Test {path} was cancelled due to turn limit")
finally:
# Stop monitoring
monitor_stop_event.set()
except Exception as e:
logger.error(f"Error running test {path}: {e}")
monitor_stop_event.set()
# Update result data for exception case
test_result_data.update({
"success": False,
"status": "ERROR",
"message": f"Test execution failed with exception: {str(e)}",
"trajectory_dir": None
})
finally:
# Step 7: Stop screen recording
try:
recorder.stop_recording()
logger.info(f"Screen recording saved to: {video_path}")
except Exception as e:
logger.error(f"Error stopping screen recording: {e}")
# Step 8: Upload results to ReportPortal only if enabled
if enable_reportportal and rp_client and launch_id:
# Get trajectory folder first
trajectory_dir = get_latest_trajectory_folder(trajectory_base_dir)
try:
if trajectory_dir:
logger.info(f"Uploading results to ReportPortal for: {path}")
logger.info(f"Video path for upload: {video_path}")
logger.info(f"Video exists: {os.path.exists(video_path)}")
if os.path.exists(video_path):
logger.info(f"Video file size: {os.path.getsize(video_path)} bytes")
upload_test_results_to_rp(rp_client, launch_id, path, trajectory_dir, force_stopped_due_to_turns, video_path, is_nightly)
else:
logger.warning(f"Test completed but no trajectory found for: {path}")
# Handle case where test completed but no trajectory found
formatted_test_path = path.replace('\\', '/').replace('.txt', '').replace('/', '__')
test_item_id = rp_client.start_test_item(
launch_id=launch_id,
name=formatted_test_path,
start_time=timestamp(),
item_type="TEST"
)
rp_client.log(
time=timestamp(),
level="ERROR",
message="Test execution completed but no trajectory data found",
item_id=test_item_id
)
# Still upload video for failed test
if video_path and os.path.exists(video_path):
try:
with open(video_path, "rb") as video_file:
rp_client.log(
time=timestamp(),
level="INFO",
message="[INFO] Screen recording of failed test",
item_id=test_item_id,
attachment={
"name": f"failed_test_recording_{formatted_test_path}.mp4",
"data": video_file.read(),
"mime": "video/x-msvideo"
}
)
except Exception as e:
logger.error(f"Error uploading video for failed test: {e}")
rp_client.finish_test_item(
item_id=test_item_id,
end_time=timestamp(),
status="FAILED"
)
except Exception as upload_error:
logger.error(f"Error uploading results for {path}: {upload_error}")
else:
# For non-ReportPortal mode, still get trajectory for final results
trajectory_dir = get_latest_trajectory_folder(trajectory_base_dir)
# Always process results for consistency (both RP and local mode)
# trajectory_dir is already set above, no need to call get_latest_trajectory_folder again
if trajectory_dir:
# Extract test result for processing
from reportportal_handler import extract_test_result_from_trajectory
if force_stopped_due_to_turns:
final_status = "FAILED"
status_message = "exceeded maximum turn limit ({} turns)".format(max_turns)
test_result_data.update({
"success": False,
"status": final_status,
"message": status_message,
"trajectory_dir": trajectory_dir
})
else:
test_result = extract_test_result_from_trajectory(trajectory_dir)
if test_result is True:
final_status = "PASSED"
status_message = "completed successfully with positive result"
test_result_data.update({
"success": True,
"status": final_status,
"message": status_message,
"trajectory_dir": trajectory_dir
})
else:
final_status = "FAILED"
status_message = "no valid success result found"
test_result_data.update({
"success": False,
"status": final_status,
"message": status_message,
"trajectory_dir": trajectory_dir
})
if not enable_reportportal:
# Local development mode - log results
logger.info(f"[INFO] LOCAL RESULT: {path} - {final_status} ({status_message})")
logger.info(f"[INFO] Video saved: {video_path}")
logger.info(f"[INFO] Trajectory: {trajectory_dir}")
else:
final_status = "FAILED"
status_message = "no trajectory found"
test_result_data.update({
"success": False,
"status": final_status,
"message": status_message,
"trajectory_dir": None
})
if not enable_reportportal:
logger.warning(f"[INFO] LOCAL RESULT: {path} - {final_status} ({status_message})")
# Step 9: Always force close Jan app after test completion
logger.info(f"Cleaning up after test: {path}")
force_close_jan(jan_process_name)
# Return test result
return test_result_data

View File

@ -1,17 +0,0 @@
prompt = """
You are going to test the Jan application by downloading and chatting with a model (bitcpm4).
Step-by-step instructions:
1. Given the Jan application is already opened.
2. In the **bottom-left corner**, click the **Hub** menu item.
3. Scroll through the model list or use the search bar to find **qwen3-0.6B**.
4. Click **Use** on the qwen3-0.6B model.
5. Wait for the model to finish downloading and become ready.
6. Once redirected to the chat screen, type any message into the input box (e.g. `Hello World`).
7. Press **Enter** to send the message.
8. Wait for the models response.
If the model responds correctly, return: {"result": True}, otherwise return: {"result": False}.
In all your responses, use only plain ASCII characters. Do NOT use Unicode symbols
"""

View File

@ -1,343 +0,0 @@
import os
import logging
import subprocess
import psutil
import time
import pyautogui
import platform
from pathlib import Path
logger = logging.getLogger(__name__)
# Cross-platform window management
IS_LINUX = platform.system() == "Linux"
IS_WINDOWS = platform.system() == "Windows"
IS_MACOS = platform.system() == "Darwin"
if IS_WINDOWS:
try:
import pygetwindow as gw
except ImportError:
gw = None
logger.warning("pygetwindow not available on this system")
def is_jan_running(jan_process_name="Jan.exe"):
"""
Check if Jan application is currently running
"""
for proc in psutil.process_iter(['pid', 'name']):
try:
if proc.info['name'] and jan_process_name.lower() in proc.info['name'].lower():
return True
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
pass
return False
def force_close_jan(jan_process_name="Jan.exe"):
"""
Force close Jan application if it's running
"""
logger.info("Checking for running Jan processes...")
closed_any = False
for proc in psutil.process_iter(['pid', 'name']):
try:
if proc.info['name'] and jan_process_name.lower() in proc.info['name'].lower():
logger.info(f"Force closing Jan process (PID: {proc.info['pid']})")
proc.kill()
closed_any = True
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
pass
if closed_any:
logger.info("Waiting for Jan processes to terminate...")
time.sleep(3) # Wait for processes to fully terminate
else:
logger.info("No Jan processes found running")
def find_jan_window_linux():
"""
Find Jan window on Linux using wmctrl
"""
try:
result = subprocess.run(['wmctrl', '-l'], capture_output=True, text=True, timeout=10)
if result.returncode == 0:
for line in result.stdout.split('\n'):
if 'jan' in line.lower() or 'Jan' in line:
# Extract window ID (first column)
window_id = line.split()[0]
logger.info(f"Found Jan window with ID: {window_id}")
return window_id
except (subprocess.TimeoutExpired, FileNotFoundError, subprocess.SubprocessError) as e:
logger.warning(f"wmctrl command failed: {e}")
return None
def maximize_jan_window_linux():
"""
Maximize Jan window on Linux using wmctrl
"""
window_id = find_jan_window_linux()
if window_id:
try:
# Maximize window using wmctrl
subprocess.run(['wmctrl', '-i', '-r', window_id, '-b', 'add,maximized_vert,maximized_horz'],
timeout=5)
logger.info("Jan window maximized using wmctrl")
return True
except (subprocess.TimeoutExpired, subprocess.SubprocessError) as e:
logger.warning(f"Failed to maximize with wmctrl: {e}")
# Fallback: Try xdotool
try:
result = subprocess.run(['xdotool', 'search', '--name', 'Jan'],
capture_output=True, text=True, timeout=5)
if result.returncode == 0 and result.stdout.strip():
window_id = result.stdout.strip().split('\n')[0]
subprocess.run(['xdotool', 'windowactivate', window_id], timeout=5)
subprocess.run(['xdotool', 'key', 'alt+F10'], timeout=5) # Maximize shortcut
logger.info("Jan window maximized using xdotool")
return True
except (subprocess.TimeoutExpired, FileNotFoundError, subprocess.SubprocessError) as e:
logger.warning(f"xdotool command failed: {e}")
return False
def find_jan_window_macos():
"""
Find Jan window on macOS using AppleScript
"""
try:
# AppleScript to find Jan window
script = '''
tell application "System Events"
set janApps to (every process whose name contains "Jan")
if length of janApps > 0 then
return name of first item of janApps
else
return ""
end if
end tell
'''
result = subprocess.run(['osascript', '-e', script],
capture_output=True, text=True, timeout=10)
if result.returncode == 0 and result.stdout.strip():
app_name = result.stdout.strip()
logger.info(f"Found Jan app: {app_name}")
return app_name
except (subprocess.TimeoutExpired, FileNotFoundError, subprocess.SubprocessError) as e:
logger.warning(f"AppleScript command failed: {e}")
return None
def maximize_jan_window_macos():
"""
Maximize Jan window on macOS using AppleScript
"""
app_name = find_jan_window_macos()
if app_name:
try:
# AppleScript to maximize window
script = f'''
tell application "System Events"
tell process "{app_name}"
set frontmost to true
tell window 1
set value of attribute "AXFullScreen" to true
end tell
end tell
end tell
'''
result = subprocess.run(['osascript', '-e', script], timeout=10)
if result.returncode == 0:
logger.info("Jan window maximized using AppleScript")
return True
except (subprocess.TimeoutExpired, subprocess.SubprocessError) as e:
logger.warning(f"Failed to maximize with AppleScript: {e}")
# Fallback: Try Command+M (fullscreen hotkey on macOS)
try:
logger.info("Trying Cmd+Ctrl+F hotkey to maximize")
pyautogui.hotkey('cmd', 'ctrl', 'f')
time.sleep(1)
logger.info("Attempted to maximize using Cmd+Ctrl+F")
return True
except Exception as e:
logger.warning(f"Hotkey maximize failed: {e}")
return False
def maximize_jan_window():
"""
Find and maximize Jan window (cross-platform)
"""
try:
# Wait a bit for window to appear
time.sleep(2)
if IS_LINUX:
return maximize_jan_window_linux()
elif IS_MACOS:
return maximize_jan_window_macos()
elif IS_WINDOWS and gw:
# Method 1: Try to find window by title containing "Jan"
windows = gw.getWindowsWithTitle("Jan")
if windows:
jan_window = windows[0]
logger.info(f"Found Jan window: {jan_window.title}")
jan_window.maximize()
logger.info("Jan window maximized using pygetwindow")
return True
# Fallback methods for both platforms
# Method 2: Try Alt+Space then X (maximize hotkey) - works on both platforms
logger.info("Trying Alt+Space+X hotkey to maximize")
pyautogui.hotkey('alt', 'space')
time.sleep(0.5)
pyautogui.press('x')
logger.info("Attempted to maximize using Alt+Space+X")
return True
except Exception as e:
logger.warning(f"Could not maximize Jan window: {e}")
# Method 3: Platform-specific fallback
try:
if IS_WINDOWS:
logger.info("Trying Windows+Up arrow to maximize")
pyautogui.hotkey('win', 'up')
elif IS_LINUX:
logger.info("Trying Alt+F10 to maximize")
pyautogui.hotkey('alt', 'F10')
elif IS_MACOS:
logger.info("Trying macOS specific maximize")
pyautogui.hotkey('cmd', 'tab') # Switch to Jan if it's running
time.sleep(0.5)
return True
except Exception as e2:
logger.warning(f"All maximize methods failed: {e2}")
return False
def start_jan_app(jan_app_path=None):
"""
Start Jan application in maximized window (cross-platform)
"""
# Set default path based on platform
if jan_app_path is None:
if IS_WINDOWS:
jan_app_path = os.path.expanduser(r"~\AppData\Local\Programs\jan\Jan.exe")
elif IS_LINUX:
jan_app_path = "/usr/bin/Jan" # or "/usr/bin/Jan" for regular
elif IS_MACOS:
jan_app_path = "/Applications/Jan.app/Contents/MacOS/Jan" # Default macOS path
else:
raise NotImplementedError(f"Platform {platform.system()} not supported")
logger.info(f"Starting Jan application from: {jan_app_path}")
if not os.path.exists(jan_app_path):
logger.error(f"Jan executable not found at: {jan_app_path}")
raise FileNotFoundError(f"Jan app not found at {jan_app_path}")
try:
# Start the Jan application
if IS_WINDOWS:
subprocess.Popen([jan_app_path], shell=True)
elif IS_LINUX:
# On Linux, start with DISPLAY environment variable
env = os.environ.copy()
subprocess.Popen([jan_app_path], env=env)
elif IS_MACOS:
# On macOS, use 'open' command to launch .app bundle properly
if jan_app_path.endswith('.app/Contents/MacOS/Jan'):
# Use the .app bundle path instead
app_bundle = jan_app_path.replace('/Contents/MacOS/Jan', '')
subprocess.Popen(['open', app_bundle])
elif jan_app_path.endswith('.app'):
# Direct .app bundle
subprocess.Popen(['open', jan_app_path])
elif '/Contents/MacOS/' in jan_app_path:
# Extract app bundle from full executable path
app_bundle = jan_app_path.split('/Contents/MacOS/')[0]
subprocess.Popen(['open', app_bundle])
else:
# Fallback: try to execute directly
subprocess.Popen([jan_app_path])
else:
raise NotImplementedError(f"Platform {platform.system()} not supported")
logger.info("Jan application started")
# Wait for app to fully load
logger.info("Waiting for Jan application to initialize...")
time.sleep(5)
# Try to maximize the window
if maximize_jan_window():
logger.info("Jan application maximized successfully")
else:
logger.warning("Could not maximize Jan application window")
# Wait a bit more after maximizing
time.sleep(10)
logger.info("Jan application should be ready, waiting for additional setup...")
time.sleep(10) # Additional wait to ensure everything is ready
except Exception as e:
logger.error(f"Error starting Jan application: {e}")
raise
def scan_test_files(tests_dir="tests"):
"""
Scan tests folder and find all .txt files
Returns list with format [{'path': 'relative_path', 'prompt': 'file_content'}]
"""
test_files = []
tests_path = Path(tests_dir)
if not tests_path.exists():
logger.error(f"Tests directory {tests_dir} does not exist!")
return test_files
# Scan all .txt files in folder and subfolders
for txt_file in tests_path.rglob("*.txt"):
try:
# Read file content
with open(txt_file, 'r', encoding='utf-8') as f:
content = f.read().strip()
# Get relative path
relative_path = txt_file.relative_to(tests_path)
test_files.append({
'path': str(relative_path),
'prompt': content
})
logger.info(f"Found test file: {relative_path}")
except Exception as e:
logger.error(f"Error reading file {txt_file}: {e}")
return test_files
def get_latest_trajectory_folder(trajectory_base_path):
"""
Get the latest created folder in trajectory base path
"""
if not os.path.exists(trajectory_base_path):
logger.warning(f"Trajectory base path not found: {trajectory_base_path}")
return None
# Get all folders and sort by creation time (latest first)
folders = [f for f in os.listdir(trajectory_base_path)
if os.path.isdir(os.path.join(trajectory_base_path, f))]
if not folders:
logger.warning(f"No trajectory folders found in: {trajectory_base_path}")
return None
# Sort by folder name (assuming timestamp format like 20250715_100443)
folders.sort(reverse=True)
latest_folder = folders[0]
full_path = os.path.join(trajectory_base_path, latest_folder)
logger.info(f"Found latest trajectory folder: {full_path}")
return full_path

View File

@ -1,71 +0,0 @@
# Contributing to Jan Core
[← Back to Main Contributing Guide](../CONTRIBUTING.md)
TypeScript SDK providing extension system, APIs, and type definitions for all Jan components.
## Key Directories
- **`/src/browser`** - Core APIs (events, extensions, file system)
- **`/src/browser/extensions`** - Built-in extensions (assistant, inference, conversational)
- **`/src/types`** - TypeScript type definitions
- **`/src/test`** - Testing utilities
## Development
### Key Principles
1. **Platform Agnostic** - Works everywhere (browser, Node.js)
2. **Extension-Based** - New features = new extensions
3. **Type Everything** - TypeScript required
4. **Event-Driven** - Components communicate via events
### Building & Testing
```bash
# Build the SDK
yarn build
# Run tests
yarn test
# Watch mode
yarn test:watch
```
### Event System
```typescript
// Emit events
events.emit('model:loaded', { modelId: 'llama-3' })
// Listen for events
events.on('model:loaded', (data) => {
console.log('Model loaded:', data.modelId)
})
```
## Testing
```typescript
describe('MyFeature', () => {
it('should do something', () => {
const result = doSomething()
expect(result).toBe('expected')
})
})
```
## Best Practices
- Keep it simple
- Use TypeScript fully (no `any`)
- Write tests for critical features
- Follow existing patterns
- Export new modules in index files
## Dependencies
- **TypeScript** - Type safety
- **Rolldown** - Bundling
- **Vitest** - Testing

View File

@ -9,11 +9,14 @@
```js ```js
// Web / extension runtime // Web / extension runtime
import * as core from '@janhq/core' import * as core from '@janhq/core'
// Node runtime
import * as node from '@janhq/core/node'
``` ```
## Build an Extension ## Build an Extension
1. Download an extension template, for example, [https://github.com/janhq/extension-template](https://github.com/janhq/extension-template). 1. Download an extension template, for example, [https://github.com/menloresearch/extension-template](https://github.com/menloresearch/extension-template).
2. Update the source code: 2. Update the source code:

17
core/jest.config.js Normal file
View File

@ -0,0 +1,17 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
collectCoverageFrom: ['src/**/*.{ts,tsx}'],
moduleNameMapper: {
'@/(.*)': '<rootDir>/src/$1',
},
runner: './testRunner.js',
transform: {
"^.+\\.tsx?$": [
"ts-jest",
{
diagnostics: false,
},
],
},
}

View File

@ -8,46 +8,61 @@
], ],
"homepage": "https://jan.ai", "homepage": "https://jan.ai",
"license": "AGPL-3.0", "license": "AGPL-3.0",
"browser": "dist/index.js",
"main": "dist/index.js", "main": "dist/index.js",
"module": "dist/node/index.cjs.js",
"typings": "dist/types/index.d.ts", "typings": "dist/types/index.d.ts",
"files": [ "files": [
"dist", "dist",
"types" "types"
], ],
"author": "Jan <service@jan.ai>", "author": "Jan <service@jan.ai>",
"exports": {
".": "./dist/index.js",
"./node": "./dist/node/index.cjs.js"
},
"typesVersions": {
"*": {
".": [
"./dist/index.js.map",
"./dist/types/index.d.ts"
],
"node": [
"./dist/node/index.cjs.js.map",
"./dist/types/node/index.d.ts"
]
}
},
"scripts": { "scripts": {
"lint": "tslint --project tsconfig.json -t codeFrame 'src/**/*.ts' 'test/**/*.ts'", "lint": "tslint --project tsconfig.json -t codeFrame 'src/**/*.ts' 'test/**/*.ts'",
"test": "vitest run", "test": "jest",
"test:watch": "vitest",
"test:ui": "vitest --ui",
"test:coverage": "vitest run --coverage",
"prebuild": "rimraf dist", "prebuild": "rimraf dist",
"build": "tsc -p . && rolldown -c rolldown.config.mjs" "build": "tsc -p . && rolldown -c rolldown.config.mjs"
}, },
"devDependencies": { "devDependencies": {
"@npmcli/arborist": "^7.1.0", "@npmcli/arborist": "^7.1.0",
"@types/jest": "^29.5.14",
"@types/node": "^22.10.0", "@types/node": "^22.10.0",
"@types/react": "19.1.2", "@types/pacote": "^11.1.7",
"@vitest/coverage-v8": "^2.1.8", "@types/request": "^2.48.12",
"@vitest/ui": "^2.1.8", "electron": "33.2.1",
"eslint": "8.57.0", "eslint": "8.57.0",
"happy-dom": "^20.0.0", "eslint-plugin-jest": "^27.9.0",
"jest": "^29.7.0",
"jest-junit": "^16.0.0",
"jest-runner": "^29.7.0",
"pacote": "^21.0.0", "pacote": "^21.0.0",
"react": "19.0.0",
"request": "^2.88.2", "request": "^2.88.2",
"request-progress": "^3.0.0", "request-progress": "^3.0.0",
"rimraf": "^6.0.1", "rimraf": "^3.0.2",
"rolldown": "1.0.0-beta.1", "rolldown": "1.0.0-beta.1",
"ts-jest": "^29.2.5",
"tslib": "^2.6.2", "tslib": "^2.6.2",
"typescript": "^5.8.3", "typescript": "^5.3.3"
"vitest": "^2.1.8"
}, },
"dependencies": { "dependencies": {
"rxjs": "^7.8.1", "rxjs": "^7.8.1",
"ulidx": "^2.3.0" "ulidx": "^2.3.0"
}, },
"peerDependencies": {
"react": "19.0.0"
},
"packageManager": "yarn@4.5.3" "packageManager": "yarn@4.5.3"
} }

View File

@ -10,10 +10,41 @@ export default defineConfig([
sourcemap: true, sourcemap: true,
}, },
platform: 'browser', platform: 'browser',
external: ['path', 'react', 'react-dom', 'react/jsx-runtime'], external: ['path'],
define: { define: {
NODE: JSON.stringify(`${pkgJson.name}/${pkgJson.node}`), NODE: JSON.stringify(`${pkgJson.name}/${pkgJson.node}`),
VERSION: JSON.stringify(pkgJson.version), VERSION: JSON.stringify(pkgJson.version),
}, },
} },
{
input: 'src/node/index.ts',
external: [
'fs/promises',
'path',
'pacote',
'@types/pacote',
'@npmcli/arborist',
'ulidx',
'fs',
'request',
'crypto',
'url',
'http',
'os',
'util',
'child_process',
'electron',
'request-progress',
],
output: {
format: 'cjs',
file: 'dist/node/index.cjs.js',
sourcemap: true,
inlineDynamicImports: true,
},
resolve: {
extensions: ['.js', '.ts'],
},
platform: 'node',
},
]) ])

View File

@ -1,4 +1,3 @@
import { describe, it, expect, vi } from 'vitest'
import { openExternalUrl } from './core' import { openExternalUrl } from './core'
import { joinPath } from './core' import { joinPath } from './core'
import { openFileExplorer } from './core' import { openFileExplorer } from './core'
@ -10,7 +9,7 @@ describe('test core apis', () => {
const url = 'http://example.com' const url = 'http://example.com'
globalThis.core = { globalThis.core = {
api: { api: {
openExternalUrl: vi.fn().mockResolvedValue('opened'), openExternalUrl: jest.fn().mockResolvedValue('opened'),
}, },
} }
const result = await openExternalUrl(url) const result = await openExternalUrl(url)
@ -22,11 +21,11 @@ describe('test core apis', () => {
const paths = ['/path/one', '/path/two'] const paths = ['/path/one', '/path/two']
globalThis.core = { globalThis.core = {
api: { api: {
joinPath: vi.fn().mockResolvedValue('/path/one/path/two'), joinPath: jest.fn().mockResolvedValue('/path/one/path/two'),
}, },
} }
const result = await joinPath(paths) const result = await joinPath(paths)
expect(globalThis.core.api.joinPath).toHaveBeenCalledWith({ args: paths }) expect(globalThis.core.api.joinPath).toHaveBeenCalledWith(paths)
expect(result).toBe('/path/one/path/two') expect(result).toBe('/path/one/path/two')
}) })
@ -34,29 +33,43 @@ describe('test core apis', () => {
const path = '/path/to/open' const path = '/path/to/open'
globalThis.core = { globalThis.core = {
api: { api: {
openFileExplorer: vi.fn().mockResolvedValue('opened'), openFileExplorer: jest.fn().mockResolvedValue('opened'),
}, },
} }
const result = await openFileExplorer(path) const result = await openFileExplorer(path)
expect(globalThis.core.api.openFileExplorer).toHaveBeenCalledWith({ path }) expect(globalThis.core.api.openFileExplorer).toHaveBeenCalledWith(path)
expect(result).toBe('opened') expect(result).toBe('opened')
}) })
it('should get jan data folder path', async () => { it('should get jan data folder path', async () => {
globalThis.core = { globalThis.core = {
api: { api: {
getJanDataFolderPath: vi.fn().mockResolvedValue('/path/to/jan/data'), getJanDataFolderPath: jest.fn().mockResolvedValue('/path/to/jan/data'),
}, },
} }
const result = await getJanDataFolderPath() const result = await getJanDataFolderPath()
expect(globalThis.core.api.getJanDataFolderPath).toHaveBeenCalled() expect(globalThis.core.api.getJanDataFolderPath).toHaveBeenCalled()
expect(result).toBe('/path/to/jan/data') expect(result).toBe('/path/to/jan/data')
}) })
it('should execute function on main process', async () => {
const extension = 'testExtension'
const method = 'testMethod'
const args = ['arg1', 'arg2']
globalThis.core = {
api: {
invokeExtensionFunc: jest.fn().mockResolvedValue('result'),
},
}
const result = await executeOnMain(extension, method, ...args)
expect(globalThis.core.api.invokeExtensionFunc).toHaveBeenCalledWith(extension, method, ...args)
expect(result).toBe('result')
})
}) })
describe('dirName - just a pass thru api', () => { describe('dirName - just a pass thru api', () => {
it('should retrieve the directory name from a file path', async () => { it('should retrieve the directory name from a file path', async () => {
const mockDirName = vi.fn() const mockDirName = jest.fn()
globalThis.core = { globalThis.core = {
api: { api: {
dirName: mockDirName.mockResolvedValue('/path/to'), dirName: mockDirName.mockResolvedValue('/path/to'),

View File

@ -1,5 +1,21 @@
import { SystemInformation } from '../types' import { SystemInformation } from '../types'
/**
* Execute a extension module function in main process
*
* @param extension extension name to import
* @param method function name to execute
* @param args arguments to pass to the function
* @returns Promise<any>
*
*/
const executeOnMain: (extension: string, method: string, ...args: any[]) => Promise<any> = (
extension,
method,
...args
) => globalThis.core?.api?.invokeExtensionFunc(extension, method, ...args)
/** /**
* Gets Jan's data folder path. * Gets Jan's data folder path.
* *
@ -13,15 +29,15 @@ const getJanDataFolderPath = (): Promise<string> => globalThis.core.api?.getJanD
* @returns {Promise<any>} A promise that resolves when the file explorer is opened. * @returns {Promise<any>} A promise that resolves when the file explorer is opened.
*/ */
const openFileExplorer: (path: string) => Promise<any> = (path) => const openFileExplorer: (path: string) => Promise<any> = (path) =>
globalThis.core.api?.openFileExplorer({ path }) globalThis.core.api?.openFileExplorer(path)
/** /**
* Joins multiple paths together. * Joins multiple paths together.
* @param paths - The paths to join. * @param paths - The paths to join.
* @returns {Promise<string>} A promise that resolves with the joined path. * @returns {Promise<string>} A promise that resolves with the joined path.
*/ */
const joinPath: (args: string[]) => Promise<string> = (args) => const joinPath: (paths: string[]) => Promise<string> = (paths) =>
globalThis.core.api?.joinPath({ args }) globalThis.core.api?.joinPath(paths)
/** /**
* Get dirname of a file path. * Get dirname of a file path.
@ -78,6 +94,13 @@ const log: (message: string, fileName?: string) => void = (message, fileName) =>
const isSubdirectory: (from: string, to: string) => Promise<boolean> = (from: string, to: string) => const isSubdirectory: (from: string, to: string) => Promise<boolean> = (from: string, to: string) =>
globalThis.core.api?.isSubdirectory(from, to) globalThis.core.api?.isSubdirectory(from, to)
/**
* Get system information
* @returns {Promise<any>} - A promise that resolves with the system information.
*/
const systemInformation: () => Promise<SystemInformation> = () =>
globalThis.core.api?.systemInformation()
/** /**
* Show toast message from browser processes. * Show toast message from browser processes.
* @param title * @param title
@ -101,6 +124,7 @@ export type RegisterExtensionPoint = (
* Functions exports * Functions exports
*/ */
export { export {
executeOnMain,
getJanDataFolderPath, getJanDataFolderPath,
openFileExplorer, openFileExplorer,
getResourcePath, getResourcePath,
@ -110,6 +134,7 @@ export {
log, log,
isSubdirectory, isSubdirectory,
getUserHomePath, getUserHomePath,
systemInformation,
showToast, showToast,
dirName, dirName,
} }

View File

@ -1,11 +1,11 @@
import { it, expect, vi } from 'vitest'
import { events } from './events'; import { events } from './events';
import { jest } from '@jest/globals';
it('should emit an event', () => { it('should emit an event', () => {
const mockObject = { key: 'value' }; const mockObject = { key: 'value' };
globalThis.core = { globalThis.core = {
events: { events: {
emit: vi.fn() emit: jest.fn()
} }
}; };
events.emit('testEvent', mockObject); events.emit('testEvent', mockObject);
@ -14,10 +14,10 @@ it('should emit an event', () => {
it('should remove an observer for an event', () => { it('should remove an observer for an event', () => {
const mockHandler = vi.fn(); const mockHandler = jest.fn();
globalThis.core = { globalThis.core = {
events: { events: {
off: vi.fn() off: jest.fn()
} }
}; };
events.off('testEvent', mockHandler); events.off('testEvent', mockHandler);
@ -26,10 +26,10 @@ it('should remove an observer for an event', () => {
it('should add an observer for an event', () => { it('should add an observer for an event', () => {
const mockHandler = vi.fn(); const mockHandler = jest.fn();
globalThis.core = { globalThis.core = {
events: { events: {
on: vi.fn() on: jest.fn()
} }
}; };
events.on('testEvent', mockHandler); events.on('testEvent', mockHandler);

View File

@ -1,8 +1,9 @@
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'
import { BaseExtension } from './extension' import { BaseExtension } from './extension'
import { SettingComponentProps } from '../types' import { SettingComponentProps } from '../types'
vi.mock('./core') import { getJanDataFolderPath, joinPath } from './core'
vi.mock('./fs') import { fs } from './fs'
jest.mock('./core')
jest.mock('./fs')
class TestBaseExtension extends BaseExtension { class TestBaseExtension extends BaseExtension {
onLoad(): void {} onLoad(): void {}
@ -17,7 +18,7 @@ describe('BaseExtension', () => {
}) })
afterEach(() => { afterEach(() => {
vi.clearAllMocks() jest.resetAllMocks()
}) })
it('should have the correct properties', () => { it('should have the correct properties', () => {
@ -57,7 +58,7 @@ describe('BaseExtension', () => {
}) })
afterEach(() => { afterEach(() => {
vi.clearAllMocks() jest.resetAllMocks()
}) })
it('should have the correct properties', () => { it('should have the correct properties', () => {
@ -89,32 +90,18 @@ describe('BaseExtension', () => {
{ key: 'setting2', controllerProps: { value: 'value2' } } as any, { key: 'setting2', controllerProps: { value: 'value2' } } as any,
] ]
const localStorageMock = (() => { ;(getJanDataFolderPath as jest.Mock).mockResolvedValue('/data')
let store: Record<string, string> = {} ;(joinPath as jest.Mock).mockResolvedValue('/data/settings/TestExtension')
;(fs.existsSync as jest.Mock).mockResolvedValue(false)
;(fs.mkdir as jest.Mock).mockResolvedValue(undefined)
;(fs.writeFileSync as jest.Mock).mockResolvedValue(undefined)
return {
getItem: (key: string) => store[key] || null,
setItem: (key: string, value: string) => {
store[key] = value
},
removeItem: (key: string) => {
delete store[key]
},
clear: () => {
store = {}
},
}
})()
Object.defineProperty(global, 'localStorage', {
value: localStorageMock,
})
const mock = vi.spyOn(localStorage, 'setItem')
await baseExtension.registerSettings(settings) await baseExtension.registerSettings(settings)
expect(mock).toHaveBeenCalledWith( expect(fs.mkdir).toHaveBeenCalledWith('/data/settings/TestExtension')
'TestExtension', expect(fs.writeFileSync).toHaveBeenCalledWith(
JSON.stringify(settings) '/data/settings/TestExtension',
JSON.stringify(settings, null, 2)
) )
}) })
@ -123,7 +110,7 @@ describe('BaseExtension', () => {
{ key: 'setting1', controllerProps: { value: 'value1' } } as any, { key: 'setting1', controllerProps: { value: 'value1' } } as any,
] ]
vi.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings) jest.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings)
const value = await baseExtension.getSetting('setting1', 'defaultValue') const value = await baseExtension.getSetting('setting1', 'defaultValue')
expect(value).toBe('value1') expect(value).toBe('value1')
@ -137,16 +124,18 @@ describe('BaseExtension', () => {
{ key: 'setting1', controllerProps: { value: 'value1' } } as any, { key: 'setting1', controllerProps: { value: 'value1' } } as any,
] ]
vi.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings) jest.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings)
const mockSetItem = vi.spyOn(localStorage, 'setItem') ;(getJanDataFolderPath as jest.Mock).mockResolvedValue('/data')
;(joinPath as jest.Mock).mockResolvedValue('/data/settings/TestExtension/settings.json')
;(fs.writeFileSync as jest.Mock).mockResolvedValue(undefined)
await baseExtension.updateSettings([ await baseExtension.updateSettings([
{ key: 'setting1', controllerProps: { value: 'newValue' } } as any, { key: 'setting1', controllerProps: { value: 'newValue' } } as any,
]) ])
expect(mockSetItem).toHaveBeenCalledWith( expect(fs.writeFileSync).toHaveBeenCalledWith(
'TestExtension', '/data/settings/TestExtension/settings.json',
JSON.stringify([{ key: 'setting1', controllerProps: { value: 'newValue' } }]) JSON.stringify([{ key: 'setting1', controllerProps: { value: 'newValue' } }], null, 2)
) )
}) })
}) })

View File

@ -1,4 +1,7 @@
import { Model, SettingComponentProps } from '../types' import { Model, ModelEvent, SettingComponentProps } from '../types'
import { getJanDataFolderPath, joinPath } from './core'
import { events } from './events'
import { fs } from './fs'
import { ModelManager } from './models' import { ModelManager } from './models'
export enum ExtensionTypeEnum { export enum ExtensionTypeEnum {
@ -7,12 +10,9 @@ export enum ExtensionTypeEnum {
Inference = 'inference', Inference = 'inference',
Model = 'model', Model = 'model',
SystemMonitoring = 'systemMonitoring', SystemMonitoring = 'systemMonitoring',
MCP = 'mcp',
HuggingFace = 'huggingFace', HuggingFace = 'huggingFace',
Engine = 'engine', Engine = 'engine',
Hardware = 'hardware', Hardware = 'hardware',
RAG = 'rag',
VectorDB = 'vectorDB',
} }
export interface ExtensionType { export interface ExtensionType {
@ -117,34 +117,31 @@ export abstract class BaseExtension implements ExtensionType {
return return
} }
const extensionSettingFolderPath = await joinPath([
await getJanDataFolderPath(),
'settings',
this.name,
])
settings.forEach((setting) => { settings.forEach((setting) => {
setting.extensionName = this.name setting.extensionName = this.name
}) })
try { try {
const oldSettingsJson = localStorage.getItem(this.name) if (!(await fs.existsSync(extensionSettingFolderPath)))
await fs.mkdir(extensionSettingFolderPath)
const settingFilePath = await joinPath([extensionSettingFolderPath, this.settingFileName])
// Persists new settings // Persists new settings
if (oldSettingsJson) { if (await fs.existsSync(settingFilePath)) {
const oldSettings = JSON.parse(oldSettingsJson) const oldSettings = JSON.parse(await fs.readFileSync(settingFilePath, 'utf-8'))
settings.forEach((setting) => { settings.forEach((setting) => {
// Keep setting value // Keep setting value
if (setting.controllerProps && Array.isArray(oldSettings)) if (setting.controllerProps && Array.isArray(oldSettings))
setting.controllerProps.value = setting.controllerProps.value = oldSettings.find(
oldSettings.find((e: any) => e.key === setting.key)?.controllerProps?.value ?? (e: any) => e.key === setting.key
setting.controllerProps.value )?.controllerProps?.value
if ('options' in setting.controllerProps)
setting.controllerProps.options = setting.controllerProps.options?.length
? setting.controllerProps.options
: oldSettings.find((e: any) => e.key === setting.key)?.controllerProps?.options
if ('recommended' in setting.controllerProps) {
const oldRecommended = oldSettings.find((e: any) => e.key === setting.key)
?.controllerProps?.recommended
if (oldRecommended !== undefined && oldRecommended !== '') {
setting.controllerProps.recommended = oldRecommended
}
}
}) })
} }
localStorage.setItem(this.name, JSON.stringify(settings)) await fs.writeFileSync(settingFilePath, JSON.stringify(settings, null, 2))
} catch (err) { } catch (err) {
console.error(err) console.error(err)
} }
@ -183,10 +180,17 @@ export abstract class BaseExtension implements ExtensionType {
async getSettings(): Promise<SettingComponentProps[]> { async getSettings(): Promise<SettingComponentProps[]> {
if (!this.name) return [] if (!this.name) return []
const settingPath = await joinPath([
await getJanDataFolderPath(),
this.settingFolderName,
this.name,
this.settingFileName,
])
try { try {
const settingsString = localStorage.getItem(this.name) if (!(await fs.existsSync(settingPath))) return []
if (!settingsString) return [] const content = await fs.readFileSync(settingPath, 'utf-8')
const settings: SettingComponentProps[] = JSON.parse(settingsString) const settings: SettingComponentProps[] = JSON.parse(content)
return settings return settings
} catch (err) { } catch (err) {
console.warn(err) console.warn(err)
@ -216,7 +220,19 @@ export abstract class BaseExtension implements ExtensionType {
if (!updatedSettings.length) updatedSettings = componentProps as SettingComponentProps[] if (!updatedSettings.length) updatedSettings = componentProps as SettingComponentProps[]
localStorage.setItem(this.name, JSON.stringify(updatedSettings)) const settingFolder = await joinPath([
await getJanDataFolderPath(),
this.settingFolderName,
this.name,
])
if (!(await fs.existsSync(settingFolder))) {
await fs.mkdir(settingFolder)
}
const settingPath = await joinPath([settingFolder, this.settingFileName])
await fs.writeFileSync(settingPath, JSON.stringify(updatedSettings, null, 2))
updatedSettings.forEach((setting) => { updatedSettings.forEach((setting) => {
this.onSettingUpdate<typeof setting.controllerProps.value>( this.onSettingUpdate<typeof setting.controllerProps.value>(

View File

@ -1,5 +1,4 @@
import { it, expect } from 'vitest'
import { AssistantExtension } from './assistant'; import { AssistantExtension } from './assistant';
import { ExtensionTypeEnum } from '../extension'; import { ExtensionTypeEnum } from '../extension';

View File

@ -1,4 +1,3 @@
import { describe, it, test, expect, beforeEach } from 'vitest'
import { ConversationalExtension } from './conversational' import { ConversationalExtension } from './conversational'
import { ExtensionTypeEnum } from '../extension' import { ExtensionTypeEnum } from '../extension'
import { Thread, ThreadAssistantInfo, ThreadMessage } from '../../types' import { Thread, ThreadAssistantInfo, ThreadMessage } from '../../types'

View File

@ -1,11 +1,10 @@
import { describe, it, expect, beforeEach, vi } from 'vitest'
import { AIEngine } from './AIEngine' import { AIEngine } from './AIEngine'
import { events } from '../../events' import { events } from '../../events'
import { ModelEvent, Model } from '../../../types' import { ModelEvent, Model } from '../../../types'
vi.mock('../../events') jest.mock('../../events')
vi.mock('./EngineManager') jest.mock('./EngineManager')
vi.mock('../../fs') jest.mock('../../fs')
class TestAIEngine extends AIEngine { class TestAIEngine extends AIEngine {
onUnload(): void {} onUnload(): void {}
@ -14,38 +13,6 @@ class TestAIEngine extends AIEngine {
inference(data: any) {} inference(data: any) {}
stopInference() {} stopInference() {}
async list(): Promise<any[]> {
return []
}
async load(modelId: string): Promise<any> {
return { pid: 1, port: 8080, model_id: modelId, model_path: '', api_key: '' }
}
async unload(sessionId: string): Promise<any> {
return { success: true }
}
async chat(opts: any): Promise<any> {
return { id: 'test', object: 'chat.completion', created: Date.now(), model: 'test', choices: [] }
}
async delete(modelId: string): Promise<void> {
return
}
async import(modelId: string, opts: any): Promise<void> {
return
}
async abortImport(modelId: string): Promise<void> {
return
}
async getLoadedModels(): Promise<string[]> {
return []
}
} }
describe('AIEngine', () => { describe('AIEngine', () => {
@ -53,34 +20,38 @@ describe('AIEngine', () => {
beforeEach(() => { beforeEach(() => {
engine = new TestAIEngine('', '') engine = new TestAIEngine('', '')
vi.clearAllMocks() jest.clearAllMocks()
}) })
it('should load model successfully', async () => { it('should load model if provider matches', async () => {
const modelId = 'model1' const model: any = { id: 'model1', engine: 'test-provider' } as any
const result = await engine.load(modelId) await engine.loadModel(model)
expect(result).toEqual({ pid: 1, port: 8080, model_id: modelId, model_path: '', api_key: '' }) expect(events.emit).toHaveBeenCalledWith(ModelEvent.OnModelReady, model)
}) })
it('should unload model successfully', async () => { it('should not load model if provider does not match', async () => {
const sessionId = 'session1' const model: any = { id: 'model1', engine: 'other-provider' } as any
const result = await engine.unload(sessionId) await engine.loadModel(model)
expect(result).toEqual({ success: true }) expect(events.emit).not.toHaveBeenCalledWith(ModelEvent.OnModelReady, model)
}) })
it('should list models', async () => { it('should unload model if provider matches', async () => {
const result = await engine.list() const model: Model = { id: 'model1', version: '1.0', engine: 'test-provider' } as any
expect(result).toEqual([]) await engine.unloadModel(model)
expect(events.emit).toHaveBeenCalledWith(ModelEvent.OnModelStopped, model)
}) })
it('should get loaded models', async () => { it('should not unload model if provider does not match', async () => {
const result = await engine.getLoadedModels() const model: Model = { id: 'model1', version: '1.0', engine: 'other-provider' } as any
expect(result).toEqual([]) await engine.unloadModel(model)
expect(events.emit).not.toHaveBeenCalledWith(ModelEvent.OnModelStopped, model)
}) })
}) })

View File

@ -1,237 +1,24 @@
import { events } from '../../events'
import { BaseExtension } from '../../extension' import { BaseExtension } from '../../extension'
import { MessageRequest, Model, ModelEvent } from '../../../types'
import { EngineManager } from './EngineManager' import { EngineManager } from './EngineManager'
/* AIEngine class types */
export interface chatCompletionRequestMessage {
role: 'system' | 'user' | 'assistant' | 'tool'
content: string | null | Content[] // Content can be a string OR an array of content parts
reasoning?: string | null // Some models return reasoning in completed responses
reasoning_content?: string | null // Some models return reasoning in completed responses
name?: string
tool_calls?: any[] // Simplified tool_call_id?: string
}
export interface Content {
type: 'text' | 'image_url' | 'input_audio'
text?: string
image_url?: string
input_audio?: InputAudio
}
export interface InputAudio {
data: string // Base64 encoded audio data
format: 'mp3' | 'wav' | 'ogg' | 'flac' // Add more formats as needed/llama-server seems to support mp3
}
export interface ToolFunction {
name: string // Required: a-z, A-Z, 0-9, _, -, max length 64
description?: string
parameters?: Record<string, unknown> // JSON Schema object
strict?: boolean | null // Defaults to false
}
export interface Tool {
type: 'function' // Currently, only 'function' is supported
function: ToolFunction
}
export interface ToolCallOptions {
tools?: Tool[]
}
// A specific tool choice to force the model to call
export interface ToolCallSpec {
type: 'function'
function: {
name: string
}
}
// tool_choice may be one of several modes or a specific call
export type ToolChoice = 'none' | 'auto' | 'required' | ToolCallSpec
export interface chatCompletionRequest {
model: string // Model ID, though for local it might be implicit via sessionInfo
messages: chatCompletionRequestMessage[]
thread_id?: string // Thread/conversation ID for context tracking
return_progress?: boolean
tools?: Tool[]
tool_choice?: ToolChoice
// Core sampling parameters
temperature?: number | null
dynatemp_range?: number | null
dynatemp_exponent?: number | null
top_k?: number | null
top_p?: number | null
min_p?: number | null
typical_p?: number | null
repeat_penalty?: number | null
repeat_last_n?: number | null
presence_penalty?: number | null
frequency_penalty?: number | null
dry_multiplier?: number | null
dry_base?: number | null
dry_allowed_length?: number | null
dry_penalty_last_n?: number | null
dry_sequence_breakers?: string[] | null
xtc_probability?: number | null
xtc_threshold?: number | null
mirostat?: number | null // 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0
mirostat_tau?: number | null
mirostat_eta?: number | null
n_predict?: number | null
n_indent?: number | null
n_keep?: number | null
stream?: boolean | null
stop?: string | string[] | null
seed?: number | null // RNG seed
// Advanced sampling
logit_bias?: { [key: string]: number } | null
n_probs?: number | null
min_keep?: number | null
t_max_predict_ms?: number | null
image_data?: Array<{ data: string; id: number }> | null
// Internal/optimization parameters
id_slot?: number | null
cache_prompt?: boolean | null
return_tokens?: boolean | null
samplers?: string[] | null
timings_per_token?: boolean | null
post_sampling_probs?: boolean | null
chat_template_kwargs?: chat_template_kdict | null
}
export interface chat_template_kdict {
enable_thinking: false
}
export interface chatCompletionChunkChoiceDelta {
content?: string | null
role?: 'system' | 'user' | 'assistant' | 'tool'
tool_calls?: any[] // Simplified
}
export interface chatCompletionChunkChoice {
index: number
delta: chatCompletionChunkChoiceDelta
finish_reason?: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call' | null
}
export interface chatCompletionPromptProgress {
cache: number
processed: number
time_ms: number
total: number
}
export interface chatCompletionChunk {
id: string
object: 'chat.completion.chunk'
created: number
model: string
choices: chatCompletionChunkChoice[]
system_fingerprint?: string
prompt_progress?: chatCompletionPromptProgress
}
export interface chatCompletionChoice {
index: number
message: chatCompletionRequestMessage // Response message
finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call'
logprobs?: any // Simplified
}
export interface chatCompletion {
id: string
object: 'chat.completion'
created: number
model: string // Model ID used
choices: chatCompletionChoice[]
usage?: {
prompt_tokens: number
completion_tokens: number
total_tokens: number
}
system_fingerprint?: string
}
// --- End OpenAI types ---
// Shared model metadata
export interface modelInfo {
id: string // e.g. "qwen3-4B" or "org/model/quant"
name: string // humanreadable, e.g., "Qwen3 4B Q4_0"
quant_type?: string // q4_0 (optional as it might be part of ID or name)
providerId: string // e.g. "llama.cpp"
port: number
sizeBytes: number
tags?: string[]
path?: string // Absolute path to the model file, if applicable
// Additional provider-specific metadata can be added here
[key: string]: any
}
// 1. /list
export type listResult = modelInfo[]
export interface SessionInfo {
pid: number // opaque handle for unload/chat
port: number // llama-server output port (corrected from portid)
model_id: string //name of the model
model_path: string // path of the loaded model
is_embedding: boolean
api_key: string
mmproj_path?: string
}
export interface UnloadResult {
success: boolean
error?: string
}
// 5. /chat
export interface chatOptions {
providerId: string
sessionId: string
/** Full OpenAI ChatCompletionRequest payload */
payload: chatCompletionRequest
}
// Output for /chat will be Promise<ChatCompletion> for non-streaming
// or Promise<AsyncIterable<ChatCompletionChunk>> for streaming
// 7. /import
export interface ImportOptions {
modelPath: string
mmprojPath?: string
modelSha256?: string
modelSize?: number
mmprojSha256?: string
mmprojSize?: number
}
export interface importResult {
success: boolean
modelInfo?: modelInfo
error?: string
}
/** /**
* Base AIEngine * Base AIEngine
* Applicable to all AI Engines * Applicable to all AI Engines
*/ */
export abstract class AIEngine extends BaseExtension { export abstract class AIEngine extends BaseExtension {
// The inference engine ID, implementing the readonly providerId from interface // The inference engine
abstract readonly provider: string abstract provider: string
/** /**
* On extension load, subscribe to events. * On extension load, subscribe to events.
*/ */
override onLoad() { override onLoad() {
this.registerEngine() this.registerEngine()
events.on(ModelEvent.OnModelInit, (model: Model) => this.loadModel(model))
events.on(ModelEvent.OnModelStop, (model: Model) => this.unloadModel(model))
} }
/** /**
@ -242,61 +29,29 @@ export abstract class AIEngine extends BaseExtension {
} }
/** /**
* Gets model info * Loads the model.
* @param modelId
*/ */
abstract get(modelId: string): Promise<modelInfo | undefined> async loadModel(model: Model): Promise<any> {
if (model.engine.toString() !== this.provider) return Promise.resolve()
/** events.emit(ModelEvent.OnModelReady, model)
* Lists available models return Promise.resolve()
*/ }
abstract list(): Promise<modelInfo[]> /**
* Stops the model.
/** */
* Loads a model into memory async unloadModel(model?: Model): Promise<any> {
*/ if (model?.engine && model.engine.toString() !== this.provider) return Promise.resolve()
abstract load(modelId: string, settings?: any): Promise<SessionInfo> events.emit(ModelEvent.OnModelStopped, model ?? {})
return Promise.resolve()
/** }
* Unloads a model from memory
*/ /*
abstract unload(sessionId: string): Promise<UnloadResult> * Inference request
*/
/** inference(data: MessageRequest) {}
* Sends a chat request to the model
*/ /**
abstract chat( * Stop inference
opts: chatCompletionRequest, */
abortController?: AbortController stopInference() {}
): Promise<chatCompletion | AsyncIterable<chatCompletionChunk>>
/**
* Deletes a model
*/
abstract delete(modelId: string): Promise<void>
/**
* Updates a model
*/
abstract update(modelId: string, model: Partial<modelInfo>): Promise<void>
/**
* Imports a model
*/
abstract import(modelId: string, opts: ImportOptions): Promise<void>
/**
* Aborts an ongoing model import
*/
abstract abortImport(modelId: string): Promise<void>
/**
* Get currently loaded models
*/
abstract getLoadedModels(): Promise<string[]>
/**
* Check if a tool is supported by the model
* @param modelId
*/
abstract isToolSupported(modelId: string): Promise<boolean>
} }

Some files were not shown because too many files have changed in this diff Show More