Compare commits

...

79 Commits

Author SHA1 Message Date
hiento09
6ee99b836b chore: remove deprecated workflows 2025-04-30 15:26:25 +07:00
hiento09
b749b39ddf chore: update cicd 2025-04-30 10:18:17 +07:00
vansangpfiev
78317e9c73 chore: upload artifacts 2025-04-28 16:02:19 +07:00
vansangpfiev
6c67843446 chore: hide windows install detail 2025-04-28 14:47:22 +07:00
vansangpfiev
0b850f2ead chore: test tauri manual build 2025-04-28 11:36:23 +07:00
vansangpfiev
20809723dc chore: workflows for tauri 2025-04-28 11:34:00 +07:00
vansangpfiev
130d17f45e chore: CI/CD for Windows, Linux
commit 4897b2bcf7f044080fce81bd725515e62fc4eb29
Author: vansangpfiev <vansangpfiev@gmail.com>
Date:   Fri Apr 25 15:32:37 2025 +0700

    chore: cleanup tauri config

commit 66c5676ec146b25c89cccb570ede7c070dbc5853
Author: vansangpfiev <vansangpfiev@gmail.com>
Date:   Fri Apr 25 14:37:04 2025 +0700

    fix: store path

commit bc6560c576873e55f84c4b21764bedbdd9dbd5a8
Author: vansangpfiev <vansangpfiev@gmail.com>
Date:   Thu Apr 24 09:39:50 2025 +0700

    chore: Linux CI

commit b036275dc9f1df7614aaca3b358b9c6493082512
Author: vansangpfiev <vansangpfiev@gmail.com>
Date:   Wed Apr 23 16:41:22 2025 +0700

    chore: updater windows

commit e91b543dbdd82bd4a44db7550ffb993897b56081
Merge: dea80a83 4a54a378
Author: vansangpfiev <vansangpfiev@gmail.com>
Date:   Wed Apr 23 16:39:24 2025 +0700

    Merge branch 'chore/tauri-cicd' of https://github.com/menloresearch/jan into chore/tauri-cicd-windows

commit dea80a83966113b108137c385a3c28920d2adda4
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Wed Apr 23 11:47:04 2025 +0700

    chore: update azuresigntool install method

commit 2ec2234082be57e53887192153fa982a134ea535
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Wed Apr 23 11:01:31 2025 +0700

    chore: add verbose option build tauri and targets app and dmg for macos build

commit 42c7592cc89641130545551d4d864268cde3d5b0
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Wed Apr 23 10:35:27 2025 +0700

    chore: update targets build

commit 4c8ba44ff60cdef8b639fa189f5729dc69c5aff6
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Wed Apr 23 09:53:21 2025 +0700

    refactor: remove debug step and upload electron build artifact

commit 158c08b465e18823e0f2b9a30fd5ecd589d08934
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Wed Apr 23 09:21:08 2025 +0700

    chore: add script codesign on windows

commit 4545b2bcd852029472298e530176494992dd0950
Author: vansangpfiev <vansangpfiev@gmail.com>
Date:   Tue Apr 22 13:39:49 2025 +0700

    chore: update csp setting

commit f64a1e1ca958e3c1c685485a06d45956ddcf14a0
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Tue Apr 22 10:15:14 2025 +0700

    chore: update azuresigntool installation

commit 1f4b9d18b332d5205685a6fe68f5dfaf973d273c
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Tue Apr 22 09:49:42 2025 +0700

    chore: update signcommand

commit 911a3ab3540f872f6fe906c8e2135440d39f108c
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Mon Apr 21 19:19:23 2025 +0700

    chore: update codesign tauri windows

commit fba15c4c2de43b4cb87308ef998cdd8dc88b1ce6
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Mon Apr 21 19:04:29 2025 +0700

    chore: update path azuresigntool

commit 8b8c950b56f5aa42baf76aba064fc99b50758150
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Mon Apr 21 18:38:56 2025 +0700

    chore: update azuresigntool path

commit bd67a2b7908b5f3a126c634a840e0b941373a3c6
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Mon Apr 21 17:47:33 2025 +0700

    chore: update azuresigntool url

commit f70effca7c09cd2fe9b5866b4f194b64a13294b9
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Mon Apr 21 17:33:32 2025 +0700

    chore: update azuretoolsign download

commit 667910772f30369b9afa554ad06e4378f93d0b1a
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Mon Apr 21 16:56:25 2025 +0700

    chore: update path azuresigntool

commit f1610bfd80dfa996db4a777bb58475f2e6d02cc6
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Mon Apr 21 16:52:36 2025 +0700

    chore: update azuresigntool path

commit 0873d56fb88fb66c884eff31d3f63aa99858f038
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Mon Apr 21 16:19:46 2025 +0700

    chore: add debug step

commit 88e0b1a697ed478375429686eb1c03ae71a3b447
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Mon Apr 21 15:58:39 2025 +0700

    ci(windows): download AzureSignTool to src-tauri for Tauri code signing

commit 47f94e86589826c3941a3d602298f188d6480980
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Mon Apr 21 15:21:20 2025 +0700

    fix: AzureSignTool signcommand Path

commit dc014a7905fd0b49b5972e24b4d5773c5dc29ea5
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Mon Apr 21 15:00:02 2025 +0700

    chore: add debug step azuresigntool tauri windows

commit ee7b6163a8419604dfba7dc2f967026be4884da4
Author: Minh141120 <minh.itptit@gmail.com>
Date:   Mon Apr 21 14:33:33 2025 +0700

    chore: tauri windows codesign

commit 6607090857120531d8a096f45ff556c3f2553e53
Author: vansangpfiev <vansangpfiev@gmail.com>
Date:   Thu Apr 17 10:29:50 2025 +0700

    chore: add windows download script

commit 4b1a5cc29c77eecca75978a1ab3126d2c710e738
Author: Nguyen Ngoc Minh <kuuhakuu@Nguyens-MacBook-Air.local>
Date:   Mon Apr 21 13:44:34 2025 +0700

    chore: tauri codesign windows
2025-04-28 10:22:13 +07:00
vansangpfiev
4a54a378c7 chore: self sign tauri 2025-04-22 09:52:36 +07:00
vansangpfiev
ba003c3936 chore: jan-nightly to 1320 2025-04-18 16:48:40 +07:00
vansangpfiev
8941112015 fix: correct pre_install_path 2025-04-18 15:52:36 +07:00
vansangpfiev
17f34cceba chore: nightly to 1317 2025-04-18 11:18:39 +07:00
vansangpfiev
2cba4b98f3 chore: update csp config and cors 2025-04-18 11:17:06 +07:00
vansangpfiev
b83ee8dd0d Merge branch 'chore/tauri-cicd' of https://github.com/menloresearch/jan into chore/tauri-cicd 2025-04-18 06:16:14 +07:00
vansangpfiev
a3cb4f0ee7 Merge branch 'feat/tauri-build-option' of https://github.com/menloresearch/jan into chore/tauri-cicd 2025-04-18 06:15:49 +07:00
David
e6ed713bf2
Merge pull request #4919 from menloresearch/fix/threads-update-persistence
fix: clean up threads persistence and fix assistant update issue
2025-04-17 21:52:13 +07:00
Louis
2b732bc976
fix: clean up threads persistence and fix assistant update issue 2025-04-17 21:49:32 +07:00
David
0bb04e59d3
Merge pull request #4918 from menloresearch/fix/tool-outputs-gone-after-switching-threads
fix: tool outputs are gone after switching to another thread
2025-04-17 21:36:44 +07:00
Louis
513aac80e0
fix: tool outputs are gone after switching to another thread 2025-04-17 21:09:53 +07:00
Service Account
c8e1a198c7 chore: test update from electron to tauri 2025-04-17 17:52:13 +07:00
Louis
c9bef9ff35
feat: restart MCP servers to reflect config updates (#4917) 2025-04-17 15:25:24 +07:00
Service Account
a0402a1ab5 chore: test update from electron to tauri 2025-04-17 13:37:52 +07:00
Service Account
b926f35c01 chore: codesign tauri app 2025-04-17 12:34:22 +07:00
Faisal Amir
a0d8d9cb27
Merge pull request #4913 from menloresearch/feat/mcp-setting
feat: MCP settings UI
2025-04-15 20:37:00 +07:00
Faisal Amir
f07a821a00 chore: update tab setting 2025-04-15 20:05:19 +07:00
Faisal Amir
6aa2ea8da4 chore: update UI mcp settings 2025-04-15 20:05:19 +07:00
Louis
70be283d0e chore: add mcp write and read commands 2025-04-15 20:05:12 +07:00
Faisal Amir
ef1a85b58c chore: setting mcp 2025-04-15 20:04:09 +07:00
Faisal Amir
40d63853ec chore: initial commit mcp setting 2025-04-15 20:04:09 +07:00
Louis
b252f716d7
refactor: Jan manages threads for a better performance (#4912)
* refactor: Jan manages threads for a better performance

* test: add tests
2025-04-15 18:57:43 +07:00
vansangpfiev
64756cca94 chore: build tauri 2025-04-15 09:49:31 +07:00
Louis
31f7073977
chore: missing import 2025-04-14 15:33:53 +07:00
Louis
57786e5e45
Merge pull request #4900 from menloresearch/feat/jan-ui-with-tool-use
feat: jan UI with Tool use UX
2025-04-14 15:23:31 +07:00
David
a8e418c4d3
Merge pull request #4903 from menloresearch/4899-bug-missing-border-for-search-or-enter-hf-url-text-field
fix: added border for search textfield
2025-04-14 00:40:07 +07:00
David
45b8458570 fix: added border for search textfield 2025-04-14 00:20:33 +07:00
Faisal Amir
9879a14352
Merge pull request #4887 from menloresearch/chore/window-effect-tauri
chore: enable window effect blur
2025-04-09 20:12:31 +07:00
Faisal Amir
82819728c5 chore: enable window effect blur tauri 2025-04-09 18:28:00 +07:00
Louis
6ed5e38a7e
chore: mcp supports env 2025-04-09 09:08:17 +07:00
Faisal Amir
acdb554596
Merge pull request #4877 from menloresearch/chore/dragable-window
chore: allow draggable window app
2025-04-08 18:31:46 +07:00
Faisal Amir
08362eb4d2 chore: exclude lock file 2025-04-08 15:02:56 +07:00
Louis
f3326bc161
chore: fix tests 2025-04-07 14:11:25 +07:00
Louis
a10cec1a66
refactor: remove FS call from frontend - remove any casting 2025-04-07 13:34:14 +07:00
Louis
7e2498cc79
Merge pull request #4860 from menloresearch/feat/mcp-jan-frontend
feat: Jan Tool Use - MCP frontend implementation
2025-04-07 02:24:08 +07:00
Louis
c91cde3654
chore: differentiate debug and release logging destination 2025-04-07 02:06:56 +07:00
Louis
38c7355e43
refactor: app logging - script-src access in release 2025-04-07 02:03:51 +07:00
Louis
7392b2f92b
chore: app updater 2025-04-02 23:53:54 +07:00
Louis
8b1709c14f
refactor: clean up assistant extension to remove node modules 2025-04-02 10:32:42 +07:00
Louis
c335caeb42
refactor: remove lagecy tools 2025-04-02 10:26:02 +07:00
Louis
e4658ce98c
chore: tool type 2025-03-31 20:22:45 +07:00
Louis
3dd80841c2
feat: Jan Tool Use - MCP frontend implementation 2025-03-31 20:22:45 +07:00
Louis
94b77db294
chore: clean up 2025-03-31 20:13:31 +07:00
Louis
3248d4eff6
chore: clean up generated assets 2025-03-31 19:59:52 +07:00
Louis
b6c051dc77
Merge pull request #4859 from menloresearch/feat/mcp-jan-backend
feat: Jan - MCP Client Host
2025-03-31 19:40:09 +07:00
Louis
e8d99117d6
chore: clean up 2025-03-31 11:43:58 +07:00
Louis
bbd818d0ad
chore: clean up 2025-03-31 11:43:57 +07:00
Louis
a4a746e18b
refactor: clean up 2025-03-31 11:43:57 +07:00
Louis
fb55826395
chore: improve startup time 2025-03-31 11:43:57 +07:00
Louis
b07c617d72
chore: add tests 2025-03-31 11:43:57 +07:00
Louis
d287586ae8
feat: Jan supports MCP as a client host 2025-03-31 11:43:57 +07:00
Louis
4ab5393f3e
chore: ignore lock files - bring back required assets 2025-03-31 11:34:41 +07:00
Louis
541428c72c
refactor: remove generated assets 2025-03-31 11:23:32 +07:00
Louis
aea39d372a
chore: add make step to run tauri dev 2025-03-28 17:28:45 +07:00
Louis
3b93e29c3e
refactor: proxy server and clean up 2025-03-27 21:21:30 +07:00
Louis
57c537f6a4
chore: fix tests 2025-03-27 16:38:05 +07:00
Louis
01964d3d03
chore: maintain electron build 2025-03-27 15:24:52 +07:00
Louis
3066f2ad79
chore: fix tests 2025-03-26 20:57:56 +07:00
Louis
afd83b6018
chore: copy resources on build 2025-03-26 20:39:47 +07:00
Louis
fecfa19d17
chore: update tests 2025-03-26 20:16:17 +07:00
Louis
60114ed620
chore: bump cortex with cors fix 2025-03-26 15:45:51 +07:00
Louis
5e77517a7f
feat: extensions versioning 2025-03-26 15:45:50 +07:00
Louis
52bf1b2062
chore: update app icon 2025-03-26 15:45:50 +07:00
Louis
0d7b89fbad
chore: kill sidecar process on exit 2025-03-26 15:45:50 +07:00
Louis
ed8bb06c27
chore: fix fs api 2025-03-26 15:45:50 +07:00
Louis
048872a3c8
chore: allow shell open default browser 2025-03-26 15:45:50 +07:00
Louis
c0756767cd
refactor: setup extensions, engines and sidecar 2025-03-26 15:45:49 +07:00
Louis
5b8eab6469
chore: clean up logs opening 2025-03-26 15:45:49 +07:00
Louis
e112583a87
chore: update native apis 2025-03-26 15:45:49 +07:00
Louis
27fad2b0b2
chore: factory reset works 2025-03-26 15:45:49 +07:00
Louis
d2a13409b2
chore: simplify themes and assistants 2025-03-26 15:45:49 +07:00
Louis
9815d03cc4
feat: tauri toolkit 2025-03-26 15:45:48 +07:00
130 changed files with 6523 additions and 2665 deletions

28
.github/scripts/electron-checksum.py vendored Normal file
View File

@ -0,0 +1,28 @@
import hashlib
import base64
import sys
def hash_file(file_path):
# Create a SHA-512 hash object
sha512 = hashlib.sha512()
# Read and update the hash object with the content of the file
with open(file_path, 'rb') as f:
while True:
data = f.read(1024 * 1024) # Read in 1 MB chunks
if not data:
break
sha512.update(data)
# Obtain the hash result and encode it in base64
hash_base64 = base64.b64encode(sha512.digest()).decode('utf-8')
return hash_base64
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: python3 script.py <file_path>")
sys.exit(1)
file_path = sys.argv[1]
hash_base64_output = hash_file(file_path)
print(hash_base64_output)

BIN
.github/scripts/icon-beta.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 KiB

BIN
.github/scripts/icon-nightly.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 KiB

48
.github/scripts/rename-tauri-app.sh vendored Normal file
View File

@ -0,0 +1,48 @@
#!/bin/bash
# Check if the correct number of arguments is provided
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <path_to_json_input_file> <channel>"
exit 1
fi
INPUT_JSON_FILE="$1"
CHANNEL="$2"
if [ "$CHANNEL" == "nightly" ]; then
UPDATER="latest"
else
UPDATER="beta"
fi
# Check if the input file exists
if [ ! -f "$INPUT_JSON_FILE" ]; then
echo "Input file not found: $INPUT_JSON_FILE"
exit 1
fi
# Use jq to transform the content
jq --arg channel "$CHANNEL" --arg updater "$UPDATER" '
.productName = "Jan-\($channel)" |
.identifier = "jan-\($channel).ai.app"
' "$INPUT_JSON_FILE" > ./tauri.conf.json.tmp
cat ./tauri.conf.json.tmp
rm $INPUT_JSON_FILE
mv ./tauri.conf.json.tmp $INPUT_JSON_FILE
# Update the layout file
# LAYOUT_FILE_PATH="web/app/layout.tsx"
# if [ ! -f "$LAYOUT_FILE_PATH" ]; then
# echo "File does not exist: $LAYOUT_FILE_PATH"
# exit 1
# fi
# Perform the replacements
# sed -i -e "s#Jan#Jan-$CHANNEL#g" "$LAYOUT_FILE_PATH"
# Notify completion
# echo "File has been updated: $LAYOUT_FILE_PATH"

View File

@ -1,86 +0,0 @@
name: Electron Builder - Beta Build
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+-beta"]
jobs:
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
build-macos:
uses: ./.github/workflows/template-build-macos.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
beta: true
nightly: false
cortex_api_port: "39271"
build-windows-x64:
uses: ./.github/workflows/template-build-windows-x64.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
beta: true
nightly: false
cortex_api_port: "39271"
build-linux-x64:
uses: ./.github/workflows/template-build-linux-x64.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
beta: true
nightly: false
cortex_api_port: "39271"
sync-temp-to-latest:
needs: [build-macos, build-windows-x64, build-linux-x64]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: Sync temp to latest
run: |
# sync temp-beta to beta by copy files that are different or new
aws s3 sync "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/" "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/beta/"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: "true"
noti-discord-and-update-url-readme:
needs: [build-macos, get-update-version, build-windows-x64, build-linux-x64, sync-temp-to-latest]
runs-on: ubuntu-latest
steps:
- name: Set version to environment variable
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
VERSION="${VERSION#v}"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Notify Discord
uses: Ilshidur/action-discord@master
with:
args: |
Jan-beta App version {{ VERSION }}, has been released, use the following links to download the app with faster speed or visit the Github release page for more information:
- Windows: https://delta.jan.ai/beta/jan-beta-win-x64-{{ VERSION }}.exe
- macOS Universal: https://delta.jan.ai/beta/jan-beta-mac-universal-{{ VERSION }}.dmg
- Linux Deb: https://delta.jan.ai/beta/jan-beta-linux-amd64-{{ VERSION }}.deb
- Linux AppImage: https://delta.jan.ai/beta/jan-beta-linux-x86_64-{{ VERSION }}.AppImage
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_JAN_BETA }}

View File

@ -48,47 +48,83 @@ jobs:
uses: ./.github/workflows/template-get-update-version.yml
build-macos:
uses: ./.github/workflows/template-build-macos.yml
uses: ./.github/workflows/template-tauri-build-macos.yml
needs: [get-update-version, set-public-provider]
secrets: inherit
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
nightly: true
beta: false
channel: nightly
cortex_api_port: "39261"
build-windows-x64:
uses: ./.github/workflows/template-build-windows-x64.yml
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
nightly: true
beta: false
channel: nightly
cortex_api_port: "39261"
build-linux-x64:
uses: ./.github/workflows/template-build-linux-x64.yml
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
nightly: true
beta: false
channel: nightly
cortex_api_port: "39261"
sync-temp-to-latest:
needs: [set-public-provider, build-windows-x64, build-linux-x64, build-macos]
needs: [get-update-version, set-public-provider, build-windows-x64, build-linux-x64, build-macos]
runs-on: ubuntu-latest
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: create latest.json file
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
LINUX_URL="https://delta.jan.ai/nightly/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
WINDOWS_URL="https://delta.jan.ai/nightly/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
DARWIN_URL="https://delta.jan.ai/nightly/Jan-nightly_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
jq --arg version "$VERSION" \
--arg pub_date "$PUB_DATE" \
--arg linux_signature "$LINUX_SIGNATURE" \
--arg linux_url "$LINUX_URL" \
--arg windows_signature "$WINDOWS_SIGNATURE" \
--arg windows_url "$WINDOWS_URL" \
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
--arg darwin_arm_url "$DARWIN_URL" \
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
--arg darwin_amd_url "$DARWIN_URL" \
'.version = $version
| .pub_date = $pub_date
| .platforms["linux-x86_64"].signature = $linux_signature
| .platforms["linux-x86_64"].url = $linux_url
| .platforms["windows-x86_64"].signature = $windows_signature
| .platforms["windows-x86_64"].url = $windows_url
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
| .platforms["darwin-aarch64"].url = $darwin_arm_url
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
src-tauri/latest.json.template > latest.json
cat latest.json
- name: Sync temp to latest
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
run: |
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/latest.json
aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}

View File

@ -1,91 +0,0 @@
name: Electron Builder - Tag
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+"]
jobs:
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
create-draft-release:
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
outputs:
upload_url: ${{ steps.create_release.outputs.upload_url }}
version: ${{ steps.get_version.outputs.version }}
permissions:
contents: write
steps:
- name: Extract tag name without v prefix
id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
env:
GITHUB_REF: ${{ github.ref }}
- name: Create Draft Release
id: create_release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ github.ref_name }}
token: ${{ secrets.GITHUB_TOKEN }}
name: "${{ env.VERSION }}"
draft: true
prerelease: false
build-macos:
uses: ./.github/workflows/template-build-macos.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
beta: false
nightly: false
new_version: ${{ needs.get-update-version.outputs.new_version }}
build-windows-x64:
uses: ./.github/workflows/template-build-windows-x64.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
beta: false
nightly: false
new_version: ${{ needs.get-update-version.outputs.new_version }}
build-linux-x64:
uses: ./.github/workflows/template-build-linux-x64.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
beta: false
nightly: false
new_version: ${{ needs.get-update-version.outputs.new_version }}
update_release_draft:
needs: [build-macos, build-windows-x64, build-linux-x64]
permissions:
# write permission is required to create a github release
contents: write
# write permission is required for autolabeler
# otherwise, read permission is required at least
pull-requests: write
runs-on: ubuntu-latest
steps:
# (Optional) GitHub Enterprise requires GHE_HOST variable set
#- name: Set GHE_HOST
# run: |
# echo "GHE_HOST=${GITHUB_SERVER_URL##https:\/\/}" >> $GITHUB_ENV
# Drafts your next Release notes as Pull Requests are merged into "master"
- uses: release-drafter/release-drafter@v5
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
# with:
# config-name: my-config.yml
# disable-autolabeler: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -0,0 +1,156 @@
name: Tauri Builder - Beta Build
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+-beta"]
jobs:
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
create-draft-release:
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
outputs:
upload_url: ${{ steps.create_release.outputs.upload_url }}
version: ${{ steps.get_version.outputs.version }}
permissions:
contents: write
steps:
- name: Extract tag name without v prefix
id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
env:
GITHUB_REF: ${{ github.ref }}
- name: Create Draft Release
id: create_release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ github.ref_name }}
token: ${{ secrets.GITHUB_TOKEN }}
name: "${{ env.VERSION }}"
draft: true
prerelease: false
generate_release_notes: true
build-macos:
uses: ./.github/workflows/template-tauri-build-macos.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: beta
cortex_api_port: "39271"
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
build-windows-x64:
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: beta
cortex_api_port: "39271"
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
build-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: beta
cortex_api_port: "39271"
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
sync-temp-to-latest:
needs: [create-draft-release, get-update-version, build-macos, build-windows-x64, build-linux-x64]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: create latest.json file
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
LINUX_URL="https://delta.jan.ai/beta/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
WINDOWS_URL="https://delta.jan.ai/beta/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
DARWIN_URL="https://delta.jan.ai/beta/Jan-beta_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
jq --arg version "$VERSION" \
--arg pub_date "$PUB_DATE" \
--arg linux_signature "$LINUX_SIGNATURE" \
--arg linux_url "$LINUX_URL" \
--arg windows_signature "$WINDOWS_SIGNATURE" \
--arg windows_url "$WINDOWS_URL" \
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
--arg darwin_arm_url "$DARWIN_URL" \
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
--arg darwin_amd_url "$DARWIN_URL" \
'.version = $version
| .pub_date = $pub_date
| .platforms["linux-x86_64"].signature = $linux_signature
| .platforms["linux-x86_64"].url = $linux_url
| .platforms["windows-x86_64"].signature = $windows_signature
| .platforms["windows-x86_64"].url = $windows_url
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
| .platforms["darwin-aarch64"].url = $darwin_arm_url
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
src-tauri/latest.json.template > latest.json
cat latest.json
- name: Sync temp to latest
run: |
# sync temp-beta to beta by copy files that are different or new
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/latest.json
aws s3 sync "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/" "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/beta/"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: "true"
- name: Upload release assert if public provider is github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
asset_path: ./latest.json
asset_name: latest.json
asset_content_type: text/json
noti-discord-and-update-url-readme:
needs: [build-macos, get-update-version, build-windows-x64, build-linux-x64, sync-temp-to-latest]
runs-on: ubuntu-latest
steps:
- name: Set version to environment variable
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
VERSION="${VERSION#v}"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Notify Discord
uses: Ilshidur/action-discord@master
with:
args: |
Jan-beta App version {{ VERSION }}, has been released, use the following links to download the app with faster speed or visit the Github release page for more information:
- Windows: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_x64-setup.exe
- macOS Universal: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_universal.dmg
- Linux Deb: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_amd64.deb
- Linux AppImage: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_amd64.AppImage
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_JAN_BETA }}

View File

@ -0,0 +1,186 @@
name: Tauri Builder - Nightly / Manual
on:
schedule:
- cron: '0 20 * * 1,2,3' # At 8 PM UTC on Monday, Tuesday, and Wednesday which is 3 AM UTC+7 Tuesday, Wednesday, and Thursday
workflow_dispatch:
inputs:
public_provider:
type: choice
description: 'Public Provider'
options:
- none
- aws-s3
default: none
pull_request_review:
types: [submitted]
jobs:
set-public-provider:
runs-on: ubuntu-latest
outputs:
public_provider: ${{ steps.set-public-provider.outputs.public_provider }}
ref: ${{ steps.set-public-provider.outputs.ref }}
steps:
- name: Set public provider
id: set-public-provider
run: |
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo "::set-output name=public_provider::${{ github.event.inputs.public_provider }}"
echo "::set-output name=ref::${{ github.ref }}"
else
if [ "${{ github.event_name }}" == "schedule" ]; then
echo "::set-output name=public_provider::aws-s3"
echo "::set-output name=ref::refs/heads/dev"
elif [ "${{ github.event_name }}" == "push" ]; then
echo "::set-output name=public_provider::aws-s3"
echo "::set-output name=ref::${{ github.ref }}"
elif [ "${{ github.event_name }}" == "pull_request_review" ]; then
echo "::set-output name=public_provider::none"
echo "::set-output name=ref::${{ github.ref }}"
else
echo "::set-output name=public_provider::none"
echo "::set-output name=ref::${{ github.ref }}"
fi
fi
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
build-macos:
uses: ./.github/workflows/template-tauri-build-macos.yml
needs: [get-update-version, set-public-provider]
secrets: inherit
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
cortex_api_port: "39261"
build-windows-x64:
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
cortex_api_port: "39261"
build-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
cortex_api_port: "39261"
sync-temp-to-latest:
needs: [get-update-version, set-public-provider, build-windows-x64, build-linux-x64, build-macos]
runs-on: ubuntu-latest
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: create latest.json file
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
LINUX_URL="https://delta.jan.ai/nightly/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
WINDOWS_URL="https://delta.jan.ai/nightly/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
DARWIN_URL="https://delta.jan.ai/nightly/Jan-nightly_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
jq --arg version "$VERSION" \
--arg pub_date "$PUB_DATE" \
--arg linux_signature "$LINUX_SIGNATURE" \
--arg linux_url "$LINUX_URL" \
--arg windows_signature "$WINDOWS_SIGNATURE" \
--arg windows_url "$WINDOWS_URL" \
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
--arg darwin_arm_url "$DARWIN_URL" \
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
--arg darwin_amd_url "$DARWIN_URL" \
'.version = $version
| .pub_date = $pub_date
| .platforms["linux-x86_64"].signature = $linux_signature
| .platforms["linux-x86_64"].url = $linux_url
| .platforms["windows-x86_64"].signature = $windows_signature
| .platforms["windows-x86_64"].url = $windows_url
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
| .platforms["darwin-aarch64"].url = $darwin_arm_url
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
src-tauri/latest.json.template > latest.json
cat latest.json
- name: Sync temp to latest
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
run: |
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/latest.json
aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: "true"
noti-discord-nightly-and-update-url-readme:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
secrets: inherit
if: github.event_name == 'schedule'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Nightly
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-pre-release-and-update-url-readme:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
secrets: inherit
if: github.event_name == 'push'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Pre-release
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-manual-and-update-url-readme:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
secrets: inherit
if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Manual
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
comment-pr-build-url:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
runs-on: ubuntu-latest
if: github.event_name == 'pull_request_review'
steps:
- name: Set up GitHub CLI
run: |
curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz
sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/
- name: Comment build URL on PR
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_URL=${{ github.event.pull_request.html_url }}
RUN_ID=${{ github.run_id }}
COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})."
gh pr comment $PR_URL --body "$COMMENT"

145
.github/workflows/jan-tauri-build.yml vendored Normal file
View File

@ -0,0 +1,145 @@
name: Tauri Builder - Tag
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+"]
jobs:
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
create-draft-release:
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
outputs:
upload_url: ${{ steps.create_release.outputs.upload_url }}
version: ${{ steps.get_version.outputs.version }}
permissions:
contents: write
steps:
- name: Extract tag name without v prefix
id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
env:
GITHUB_REF: ${{ github.ref }}
- name: Create Draft Release
id: create_release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ github.ref_name }}
token: ${{ secrets.GITHUB_TOKEN }}
name: "${{ env.VERSION }}"
draft: true
prerelease: false
build-macos:
uses: ./.github/workflows/template-tauri-build-macos.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
channel: stable
new_version: ${{ needs.get-update-version.outputs.new_version }}
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
build-windows-x64:
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
channel: stable
new_version: ${{ needs.get-update-version.outputs.new_version }}
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
build-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
channel: stable
new_version: ${{ needs.get-update-version.outputs.new_version }}
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
sync-temp-to-latest:
needs: [create-draft-release, get-update-version, build-macos, build-windows-x64, build-linux-x64]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: create latest.json file
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
LINUX_URL="https://github.com/menloresearch/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
WINDOWS_URL="https://github.com/menloresearch/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
DARWIN_URL="https://github.com/menloresearch/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-macos.outputs.TAR_NAME }}"
jq --arg version "$VERSION" \
--arg pub_date "$PUB_DATE" \
--arg linux_signature "$LINUX_SIGNATURE" \
--arg linux_url "$LINUX_URL" \
--arg windows_signature "$WINDOWS_SIGNATURE" \
--arg windows_url "$WINDOWS_URL" \
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
--arg darwin_arm_url "$DARWIN_URL" \
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
--arg darwin_amd_url "$DARWIN_URL" \
'.version = $version
| .pub_date = $pub_date
| .platforms["linux-x86_64"].signature = $linux_signature
| .platforms["linux-x86_64"].url = $linux_url
| .platforms["windows-x86_64"].signature = $windows_signature
| .platforms["windows-x86_64"].url = $windows_url
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
| .platforms["darwin-aarch64"].url = $darwin_arm_url
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
src-tauri/latest.json.template > latest.json
cat latest.json
- name: Upload release assert if public provider is github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
asset_path: ./latest.json
asset_name: latest.json
asset_content_type: text/json
update_release_draft:
needs: [build-macos, build-windows-x64, build-linux-x64]
permissions:
# write permission is required to create a github release
contents: write
# write permission is required for autolabeler
# otherwise, read permission is required at least
pull-requests: write
runs-on: ubuntu-latest
steps:
# (Optional) GitHub Enterprise requires GHE_HOST variable set
#- name: Set GHE_HOST
# run: |
# echo "GHE_HOST=${GITHUB_SERVER_URL##https:\/\/}" >> $GITHUB_ENV
# Drafts your next Release notes as Pull Requests are merged into "master"
- uses: release-drafter/release-drafter@v5
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
# with:
# config-name: my-config.yml
# disable-autolabeler: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,127 +0,0 @@
name: Nightly Update cortex cpp
on:
schedule:
- cron: '30 19 * * 1-5' # At 01:30 on every day-of-week from Monday through Friday UTC +7
workflow_dispatch:
jobs:
update-submodule:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
actions: write
outputs:
pr_number: ${{ steps.check-update.outputs.pr_number }}
pr_created: ${{ steps.check-update.outputs.pr_created }}
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
submodules: recursive
ref: dev
fetch-depth: 0
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
- name: Configure Git
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
- name: Update submodule to latest release
id: check-update
env:
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
run: |
curl -s https://api.github.com/repos/menloresearch/cortex/releases > /tmp/github_api_releases.json
latest_prerelease_name=$(cat /tmp/github_api_releases.json | jq -r '.[] | select(.prerelease) | .name' | head -n 1)
get_asset_count() {
local version_name=$1
cat /tmp/github_api_releases.json | jq -r --arg version_name "$version_name" '.[] | select(.name == $version_name) | .assets | length'
}
cortex_cpp_version_file_path="extensions/inference-nitro-extension/bin/version.txt"
current_version_name=$(cat "$cortex_cpp_version_file_path" | head -n 1)
current_version_asset_count=$(get_asset_count "$current_version_name")
latest_prerelease_asset_count=$(get_asset_count "$latest_prerelease_name")
if [ "$current_version_name" = "$latest_prerelease_name" ]; then
echo "cortex cpp remote repo doesn't have update today, skip update cortex.cpp for today nightly build"
echo "::set-output name=pr_created::false"
exit 0
fi
if [ "$current_version_asset_count" != "$latest_prerelease_asset_count" ]; then
echo "Latest prerelease version has different number of assets, somethink went wrong, skip update cortex.cpp for today nightly build"
echo "::set-output name=pr_created::false"
exit 1
fi
echo $latest_prerelease_name > $cortex_cpp_version_file_path
echo "Updated version from $current_version_name to $latest_prerelease_name."
echo "::set-output name=pr_created::true"
git add -f $cortex_cpp_version_file_path
git commit -m "Update cortex cpp nightly to version $latest_prerelease_name"
branch_name="update-nightly-$(date +'%Y-%m-%d-%H-%M')"
git checkout -b $branch_name
git push origin $branch_name
pr_title="Update cortex cpp nightly to version $latest_prerelease_name"
pr_body="This PR updates the Update cortex cpp nightly to version $latest_prerelease_name"
gh pr create --title "$pr_title" --body "$pr_body" --head $branch_name --base dev --reviewer Van-QA
pr_number=$(gh pr list --head $branch_name --json number --jq '.[0].number')
echo "::set-output name=pr_number::$pr_number"
check-and-merge-pr:
needs: update-submodule
if: needs.update-submodule.outputs.pr_created == 'true'
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
submodules: recursive
fetch-depth: 0
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
- name: Wait for CI to pass
env:
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
run: |
pr_number=${{ needs.update-submodule.outputs.pr_number }}
while true; do
ci_completed=$(gh pr checks $pr_number --json completedAt --jq '.[].completedAt')
if echo "$ci_completed" | grep -q "0001-01-01T00:00:00Z"; then
echo "CI is still running, waiting..."
sleep 60
else
echo "CI has completed, checking states..."
ci_states=$(gh pr checks $pr_number --json state --jq '.[].state')
if echo "$ci_states" | grep -vqE "SUCCESS|SKIPPED"; then
echo "CI failed, exiting..."
exit 1
else
echo "CI passed, merging PR..."
break
fi
fi
done
- name: Merge the PR
env:
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
run: |
pr_number=${{ needs.update-submodule.outputs.pr_number }}
gh pr merge $pr_number --merge --admin

View File

@ -1,187 +0,0 @@
name: build-linux-x64
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
aws_s3_prefix:
required: false
type: string
default: '/latest/'
beta:
required: false
type: boolean
default: false
nightly:
required: false
type: boolean
default: false
cortex_api_port:
required: false
type: string
default: null
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
jobs:
build-linux-x64:
runs-on: ubuntu-latest
environment: production
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.beta == true && inputs.nightly != true
shell: bash
run: |
rm -rf electron/icons/*
cp electron/icons_dev/jan-beta-512x512.png electron/icons/512x512.png
cp electron/icons_dev/jan-beta.ico electron/icons/icon.ico
cp electron/icons_dev/jan-beta.png electron/icons/icon.png
cp electron/icons_dev/jan-beta-tray@2x.png electron/icons/icon-tray@2x.png
cp electron/icons_dev/jan-beta-tray.png electron/icons/icon-tray.png
- name: Replace Icons for Nightly Build
if: inputs.nightly == true && inputs.beta != true
shell: bash
run: |
rm -rf electron/icons/*
cp electron/icons_dev/jan-nightly-512x512.png electron/icons/512x512.png
cp electron/icons_dev/jan-nightly.ico electron/icons/icon.ico
cp electron/icons_dev/jan-nightly.png electron/icons/icon.png
cp electron/icons_dev/jan-nightly-tray@2x.png electron/icons/icon-tray@2x.png
cp electron/icons_dev/jan-nightly-tray.png electron/icons/icon-tray.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Update app version base public_provider
if: inputs.public_provider != 'github'
run: |
echo "Version: ${{ inputs.new_version }}"
# Update the version in electron/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
jq '.build.publish = [{"provider": "generic", "url": "https://delta.jan.ai/nightly", "channel": "latest"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "temp-nightly", "channel": "latest"}]' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
cat electron/package.json
chmod +x .github/scripts/rename-app.sh
.github/scripts/rename-app.sh ./electron/package.json nightly
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json nightly
echo "------------------------"
cat ./electron/package.json
echo "------------------------"
- name: Change App Name for beta version
if: inputs.beta == true
shell: bash
run: |
chmod +x .github/scripts/rename-app.sh
.github/scripts/rename-app.sh ./electron/package.json beta
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json beta
echo "------------------------"
cat ./electron/package.json
echo "------------------------"
cat ./package.json
jq '.build.publish = [{"provider": "generic", "url": "https://delta.jan.ai/beta", "channel": "beta"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "temp-beta", "channel": "beta"}]' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
cat electron/package.json
- name: Update app version base on tag
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
run: |
jq --arg version "${VERSION_TAG#v}" '.version = $version' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg version "${VERSION_TAG#v}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
env:
VERSION_TAG: ${{ inputs.new_version }}
- name: Build and publish app to aws s3 r2 or github artifactory
if: inputs.public_provider != 'github'
run: |
# check public_provider is true or not
echo "public_provider is ${{ inputs.public_provider }}"
if [ "${{ inputs.public_provider }}" == "none" ]; then
make build
else
make build-and-publish
fi
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
- name: Build and publish app to github
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github' && inputs.beta == false
run: |
make build-and-publish
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
- name: Build and publish app to github
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github' && inputs.beta == true
run: |
make build-and-publish
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
- name: Upload Artifact .deb file
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-deb
path: ./electron/dist/*.deb
- name: Upload Artifact .AppImage file
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
path: ./electron/dist/*.AppImage

View File

@ -1,233 +0,0 @@
name: build-macos
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
aws_s3_prefix:
required: false
type: string
default: '/latest/'
beta:
required: false
type: boolean
default: false
nightly:
required: false
type: boolean
default: false
cortex_api_port:
required: false
type: string
default: null
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
CODE_SIGN_P12_BASE64:
required: false
CODE_SIGN_P12_PASSWORD:
required: false
APPLE_ID:
required: false
APPLE_APP_SPECIFIC_PASSWORD:
required: false
DEVELOPER_ID:
required: false
jobs:
build-macos:
runs-on: macos-latest
environment: production
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.beta == true && inputs.nightly != true
shell: bash
run: |
rm -rf electron/icons/*
cp electron/icons_dev/jan-beta-512x512.png electron/icons/512x512.png
cp electron/icons_dev/jan-beta.ico electron/icons/icon.ico
cp electron/icons_dev/jan-beta.png electron/icons/icon.png
cp electron/icons_dev/jan-beta-tray@2x.png electron/icons/icon-tray@2x.png
cp electron/icons_dev/jan-beta-tray.png electron/icons/icon-tray.png
- name: Replace Icons for Nightly Build
if: inputs.nightly == true && inputs.beta != true
shell: bash
run: |
rm -rf electron/icons/*
cp electron/icons_dev/jan-nightly-512x512.png electron/icons/512x512.png
cp electron/icons_dev/jan-nightly.ico electron/icons/icon.ico
cp electron/icons_dev/jan-nightly.png electron/icons/icon.png
cp electron/icons_dev/jan-nightly-tray@2x.png electron/icons/icon-tray@2x.png
cp electron/icons_dev/jan-nightly-tray.png electron/icons/icon-tray.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Update app version based on latest release tag with build number
if: inputs.public_provider != 'github'
run: |
echo "Version: ${{ inputs.new_version }}"
# Update the version in electron/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
jq '.build.publish = [{"provider": "generic", "url": "https://delta.jan.ai/nightly", "channel": "latest"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "temp-nightly", "channel": "latest"}]' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg teamid "${{ secrets.APPLE_TEAM_ID }}" '.build.mac.notarize.teamId = $teamid' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
# cat electron/package.json
chmod +x .github/scripts/rename-app.sh
.github/scripts/rename-app.sh ./electron/package.json nightly
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json nightly
echo "------------------------"
cat ./electron/package.json
echo "------------------------"
- name: Change App Name for beta version
if: inputs.beta == true
shell: bash
run: |
chmod +x .github/scripts/rename-app.sh
.github/scripts/rename-app.sh ./electron/package.json beta
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json beta
echo "------------------------"
cat ./electron/package.json
echo "------------------------"
cat ./package.json
jq '.build.publish = [{"provider": "generic", "url": "https://delta.jan.ai/beta", "channel": "beta"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "temp-beta", "channel": "beta"}]' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
cat electron/package.json
- name: Update app version base on tag
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
run: |
jq --arg version "${VERSION_TAG#v}" '.version = $version' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg version "${VERSION_TAG#v}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
jq --arg teamid "${{ secrets.APPLE_TEAM_ID }}" '.build.mac.notarize.teamId = $teamid' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
cat electron/package.json
env:
VERSION_TAG: ${{ inputs.new_version }}
- name: Get Cer for code signing
run: base64 -d <<< "$CODE_SIGN_P12_BASE64" > /tmp/codesign.p12
shell: bash
env:
CODE_SIGN_P12_BASE64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
- uses: apple-actions/import-codesign-certs@v2
continue-on-error: true
with:
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
- name: Build and publish app to aws s3 r2 or github artifactory
if: inputs.public_provider != 'github'
run: |
# check public_provider is true or not
echo "public_provider is ${{ inputs.public_provider }}"
if [ "${{ inputs.public_provider }}" == "none" ]; then
make build
else
make build-and-publish
fi
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CSC_LINK: '/tmp/codesign.p12'
CSC_KEY_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
CSC_IDENTITY_AUTO_DISCOVERY: 'true'
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
APP_PATH: '.'
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
- name: Build and publish app to github
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github' && inputs.beta == false
run: |
make build-and-publish
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CSC_LINK: '/tmp/codesign.p12'
CSC_KEY_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
CSC_IDENTITY_AUTO_DISCOVERY: 'true'
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
APP_PATH: '.'
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
- name: Build and publish app to github
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github' && inputs.beta == true
run: |
make build-and-publish
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CSC_LINK: '/tmp/codesign.p12'
CSC_KEY_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
CSC_IDENTITY_AUTO_DISCOVERY: 'true'
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
APP_PATH: '.'
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-mac-universal-${{ inputs.new_version }}
path: ./electron/dist/*.dmg

View File

@ -1,229 +0,0 @@
name: build-windows-x64
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
aws_s3_prefix:
required: false
type: string
default: '/latest/'
beta:
required: false
type: boolean
default: false
nightly:
required: false
type: boolean
default: false
cortex_api_port:
required: false
type: string
default: null
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
AZURE_KEY_VAULT_URI:
required: false
AZURE_CLIENT_ID:
required: false
AZURE_TENANT_ID:
required: false
AZURE_CLIENT_SECRET:
required: false
AZURE_CERT_NAME:
required: false
jobs:
build-windows-x64:
runs-on: windows-latest
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.beta == true && inputs.nightly != true
shell: bash
run: |
rm -rf electron/icons/*
cp electron/icons_dev/jan-beta-512x512.png electron/icons/512x512.png
cp electron/icons_dev/jan-beta.ico electron/icons/icon.ico
cp electron/icons_dev/jan-beta.png electron/icons/icon.png
cp electron/icons_dev/jan-beta-tray@2x.png electron/icons/icon-tray@2x.png
cp electron/icons_dev/jan-beta-tray.png electron/icons/icon-tray.png
- name: Replace Icons for Nightly Build
if: inputs.nightly == true && inputs.beta != true
shell: bash
run: |
rm -rf electron/icons/*
cp electron/icons_dev/jan-nightly-512x512.png electron/icons/512x512.png
cp electron/icons_dev/jan-nightly.ico electron/icons/icon.ico
cp electron/icons_dev/jan-nightly.png electron/icons/icon.png
cp electron/icons_dev/jan-nightly-tray@2x.png electron/icons/icon-tray@2x.png
cp electron/icons_dev/jan-nightly-tray.png electron/icons/icon-tray.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Update app version base on tag
if: inputs.public_provider != 'github'
id: version_update
shell: bash
run: |
echo "Version: ${{ inputs.new_version }}"
# Update the version in electron/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
jq '.build.publish = [{"provider": "generic", "url": "https://delta.jan.ai/nightly", "channel": "latest"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "temp-nightly", "channel": "latest"}]' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq '.build.win.sign = "./sign.js"' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
cat electron/package.json
chmod +x .github/scripts/rename-app.sh
.github/scripts/rename-app.sh ./electron/package.json nightly
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json nightly
chmod +x .github/scripts/rename-uninstaller.sh
.github/scripts/rename-uninstaller.sh nightly
echo "------------------------"
cat ./electron/package.json
echo "------------------------"
cat ./package.json
echo "------------------------"
- name: Change App Name for beta version
if: inputs.beta == true
shell: bash
run: |
chmod +x .github/scripts/rename-app.sh
.github/scripts/rename-app.sh ./electron/package.json beta
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json beta
chmod +x .github/scripts/rename-uninstaller.sh
.github/scripts/rename-uninstaller.sh beta
echo "------------------------"
cat ./electron/package.json
echo "------------------------"
cat ./package.json
echo "------------------------"
cat ./electron/scripts/uninstaller.nsh
jq '.build.publish = [{"provider": "generic", "url": "https://delta.jan.ai/beta", "channel": "beta"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "temp-beta", "channel": "beta"}]' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
cat electron/package.json
- name: Update app version base on tag
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
shell: bash
run: |
jq --arg version "${VERSION_TAG#v}" '.version = $version' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
jq --arg version "${VERSION_TAG#v}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
jq '.build.win.sign = "./sign.js"' electron/package.json > /tmp/package.json
mv /tmp/package.json electron/package.json
env:
VERSION_TAG: ${{ inputs.new_version }}
- name: Install AzureSignTool
run: |
dotnet tool install --global AzureSignTool
- name: Build and publish app to aws s3 r2 or github artifactory
shell: bash
if: inputs.public_provider != 'github'
run: |
# check public_provider is true or not
echo "public_provider is ${{ inputs.public_provider }}"
if [ "${{ inputs.public_provider }}" == "none" ]; then
make build
else
make build-and-publish
fi
env:
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_CERT_NAME: homebrewltd
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
- name: Build app and publish app to github
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github' && inputs.beta == false
run: |
make build-and-publish
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_CERT_NAME: homebrewltd
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
- name: Build app and publish app to github
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github' && inputs.beta == true
run: |
make build-and-publish
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
# AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }}
AZURE_CERT_NAME: homebrewltd
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-win-x64-${{ inputs.new_version }}
path: ./electron/dist/*.exe

View File

@ -44,9 +44,12 @@ jobs:
exit 1
}
if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then
if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then
echo "Tag detected, set output follow tag"
echo "::set-output name=new_version::${{ steps.tag.outputs.tag }}"
sanitized_tag="${{ steps.tag.outputs.tag }}"
# Remove the 'v' prefix if it exists
sanitized_tag="${sanitized_tag#v}"
echo "::set-output name=new_version::$sanitized_tag"
else
# Get the latest release tag from GitHub API
LATEST_TAG=$(get_latest_tag)

View File

@ -47,10 +47,10 @@ jobs:
with:
args: |
Jan App ${{ inputs.build_reason }} build artifact version {{ VERSION }}:
- Windows: https://delta.jan.ai/nightly/jan-nightly-win-x64-{{ VERSION }}.exe
- macOS Universal: https://delta.jan.ai/nightly/jan-nightly-mac-universal-{{ VERSION }}.dmg
- Linux Deb: https://delta.jan.ai/nightly/jan-nightly-linux-amd64-{{ VERSION }}.deb
- Linux AppImage: https://delta.jan.ai/nightly/jan-nightly-linux-x86_64-{{ VERSION }}.AppImage
- Windows: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_x64-setup.exe
- macOS Universal: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_universal.dmg
- Linux Deb: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_amd64.deb
- Linux AppImage: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_amd64.AppImage
- Github action run: https://github.com/menloresearch/jan/actions/runs/{{ GITHUB_RUN_ID }}
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}

View File

@ -0,0 +1,277 @@
name: tauri-build-linux-x64
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ""
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
TAURI_SIGNING_PUBLIC_KEY:
required: false
outputs:
DEB_SIG:
value: ${{ jobs.build-linux-x64.outputs.DEB_SIG }}
APPIMAGE_SIG:
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_SIG }}
APPIMAGE_FILE_NAME:
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}
jobs:
build-linux-x64:
runs-on: ubuntu-22.04
outputs:
DEB_SIG: ${{ steps.packageinfo.outputs.DEB_SIG }}
APPIMAGE_SIG: ${{ steps.packageinfo.outputs.APPIMAGE_SIG }}
APPIMAGE_FILE_NAME: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
environment: production
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Free Disk Space Before Build
run: |
echo "Disk space before cleanup:"
df -h
sudo rm -rf /usr/local/.ghcup
sudo rm -rf /opt/hostedtoolcache/CodeQL
sudo rm -rf /usr/local/lib/android/sdk/ndk
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf /usr/local/share/boost
sudo apt-get clean
echo "Disk space after cleanup:"
df -h
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Install Tauri dependecies
run: |
sudo apt update
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev
- name: Update app version base public_provider
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
cat ./src-tauri/Cargo.toml
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
fi
- name: Build app
run: |
make build-tauri
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
# Publish app
## Artifacts, for dev and test
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-deb
path: ./src-tauri/target/release/bundle/deb/*.deb
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
## create zip file and latest-linux.yml for linux electron auto updater
- name: Create zip file and latest-linux.yml for linux electron auto updater
id: packageinfo
run: |
cd ./src-tauri/target/release/bundle
if [ "${{ inputs.channel }}" != "stable" ]; then
DEB_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
APPIMAGE_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
DEB_SIG=$(cat deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig)
APPIMAGE_SIG=$(cat appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig)
else
DEB_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.deb
APPIMAGE_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.AppImage
DEB_SIG=$(cat deb/Jan_${{ inputs.new_version }}_amd64.deb.sig)
APPIMAGE_SIG=$(cat appimage/Jan_${{ inputs.new_version }}_amd64.AppImage.sig)
fi
DEB_FILE_SIZE=$(stat -c%s deb/$DEB_FILE_NAME)
APPIMAGE_FILE_SIZE=$(stat -c%s appimage/$APPIMAGE_FILE_NAME)
echo "deb file size: $DEB_FILE_SIZE"
echo "appimage file size: $APPIMAGE_FILE_SIZE"
DEB_SH512_CHECKSUM=$(python3 ../../../../.github/scripts/electron-checksum.py deb/$DEB_FILE_NAME)
APPIMAGE_SH512_CHECKSUM=$(python3 ../../../../.github/scripts/electron-checksum.py appimage/$APPIMAGE_FILE_NAME)
echo "deb sh512 checksum: $DEB_SH512_CHECKSUM"
echo "appimage sh512 checksum: $APPIMAGE_SH512_CHECKSUM"
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
echo "releaseDate: $CURRENT_TIME"
# Create latest-linux.yml file
echo "version: ${{ inputs.new_version }}" > latest-linux.yml
echo "files:" >> latest-linux.yml
echo " - url: $DEB_FILE_NAME" >> latest-linux.yml
echo " sha512: $DEB_SH512_CHECKSUM" >> latest-linux.yml
echo " size: $DEB_FILE_SIZE" >> latest-linux.yml
echo " - url: $APPIMAGE_FILE_NAME" >> latest-linux.yml
echo " sha512: $APPIMAGE_SH512_CHECKSUM" >> latest-linux.yml
echo " size: $APPIMAGE_FILE_SIZE" >> latest-linux.yml
echo "path: $APPIMAGE_FILE_NAME" >> latest-linux.yml
echo "sha512: $APPIMAGE_SH512_CHECKSUM" >> latest-linux.yml
echo "releaseDate: $CURRENT_TIME" >> latest-linux.yml
cat latest-linux.yml
cp latest-linux.yml beta-linux.yml
echo "DEB_SIG=$DEB_SIG" >> $GITHUB_OUTPUT
echo "APPIMAGE_SIG=$APPIMAGE_SIG" >> $GITHUB_OUTPUT
echo "DEB_FILE_NAME=$DEB_FILE_NAME" >> $GITHUB_OUTPUT
echo "APPIMAGE_FILE_NAME=$APPIMAGE_FILE_NAME" >> $GITHUB_OUTPUT
## Upload to s3 for nightly and beta
- name: upload to aws s3 if public provider is aws
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
run: |
cd ./src-tauri/target/release/bundle
# Upload for electron updater
aws s3 cp ./latest-linux.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest-linux.yml
aws s3 cp ./beta-linux.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta-linux.yml
# Upload for tauri updater
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: "true"
## Upload to github release for stable release
- name: Upload release assert if public provider is github
if: inputs.channel == 'stable'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/latest-linux.yml
asset_name: latest-linux.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.channel == 'beta'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/beta-linux.yml
asset_name: beta-linux.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/appimage/${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
asset_name: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
asset_content_type: application/octet-stream
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/deb/${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
asset_name: ${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
asset_content_type: application/octet-stream

View File

@ -0,0 +1,285 @@
name: tauri-build-macos
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ""
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
CODE_SIGN_P12_BASE64:
required: false
CODE_SIGN_P12_PASSWORD:
required: false
APPLE_ID:
required: false
APPLE_APP_SPECIFIC_PASSWORD:
required: false
DEVELOPER_ID:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
TAURI_SIGNING_PUBLIC_KEY:
required: false
outputs:
MAC_UNIVERSAL_SIG:
value: ${{ jobs.build-macos.outputs.MAC_UNIVERSAL_SIG }}
TAR_NAME:
value: ${{ jobs.build-macos.outputs.TAR_NAME }}
jobs:
build-macos:
runs-on: macos-latest
outputs:
MAC_UNIVERSAL_SIG: ${{ steps.metadata.outputs.MAC_UNIVERSAL_SIG }}
TAR_NAME: ${{ steps.metadata.outputs.TAR_NAME }}
environment: production
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Update app version based on latest release tag with build number
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
cat ./src-tauri/Cargo.toml
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
fi
- name: Get key for notarize
run: base64 -d <<< "$NOTARIZE_P8_BASE64" > /tmp/notary-key.p8
shell: bash
env:
NOTARIZE_P8_BASE64: ${{ secrets.NOTARIZE_P8_BASE64 }}
- uses: apple-actions/import-codesign-certs@v2
continue-on-error: true
with:
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
- name: Build app
run: |
rustup target add x86_64-apple-darwin
make build-tauri
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
APP_PATH: '.'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
APPLE_CERTIFICATE: ${{ secrets.CODE_SIGN_P12_BASE64 }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
APPLE_API_ISSUER: ${{ secrets.NOTARY_ISSUER }}
APPLE_API_KEY: ${{ secrets.NOTARY_KEY_ID }}
APPLE_API_KEY_PATH: /tmp/notary-key.p8
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
# Publish app
## Artifacts, for dev and test
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.dmg
path: |
./src-tauri/target/universal-apple-darwin/release/bundle/dmg/*.dmg
## create zip file and latest-mac.yml for mac electron auto updater
- name: create zip file and latest-mac.yml for mac electron auto updater
run: |
cd ./src-tauri/target/universal-apple-darwin/release/bundle/macos
if [ "${{ inputs.channel }}" != "stable" ]; then
zip -r jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip Jan-${{ inputs.channel }}.app
FILE_NAME=jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip
DMG_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
MAC_UNIVERSAL_SIG=$(cat Jan-${{ inputs.channel }}.app.tar.gz.sig)
TAR_NAME=Jan-${{ inputs.channel }}.app.tar.gz
else
zip -r jan-mac-universal-${{ inputs.new_version }}.zip Jan.app
FILE_NAME=jan-mac-universal-${{ inputs.new_version }}.zip
MAC_UNIVERSAL_SIG=$(cat Jan.app.tar.gz.sig)
DMG_NAME=Jan_${{ inputs.new_version }}_universal.dmg
TAR_NAME=Jan.app.tar.gz
fi
FILE_SIZE=$(stat -f%z $FILE_NAME)
echo "size: $FILE_SIZE"
SH512_CHECKSUM=$(python3 ../../../../../../.github/scripts/electron-checksum.py $FILE_NAME)
echo "sha512: $SH512_CHECKSUM"
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
echo "releaseDate: $CURRENT_TIME"
# Create latest-mac.yml file
echo "version: ${{ inputs.new_version }}" > latest-mac.yml
echo "files:" >> latest-mac.yml
echo " - url: $FILE_NAME" >> latest-mac.yml
echo " sha512: $SH512_CHECKSUM" >> latest-mac.yml
echo " size: $FILE_NAME" >> latest-mac.yml
echo "path: $FILE_NAME" >> latest-mac.yml
echo "sha512: $SH512_CHECKSUM" >> latest-mac.yml
echo "releaseDate: $CURRENT_TIME" >> latest-mac.yml
cat latest-mac.yml
cp latest-mac.yml beta-mac.yml
echo "::set-output name=MAC_UNIVERSAL_SIG::$MAC_UNIVERSAL_SIG"
echo "::set-output name=FILE_NAME::$FILE_NAME"
echo "::set-output name=DMG_NAME::$DMG_NAME"
echo "::set-output name=TAR_NAME::$TAR_NAME"
id: metadata
## Upload to s3 for nightly and beta
- name: upload to aws s3 if public provider is aws
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
run: |
cd ./src-tauri/target/universal-apple-darwin/release/bundle
# Upload for electron updater
aws s3 cp ./macos/latest-mac.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest-mac.yml
aws s3 cp ./macos/beta-mac.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta-mac.yml
aws s3 cp ./macos/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip
# Upload for tauri updater
aws s3 cp ./dmg/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}//Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: "true"
## Upload to github release for stable release
- name: Upload release assert if public provider is github
if: inputs.channel == 'stable'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/latest-mac.yml
asset_name: latest-mac.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.channel == 'beta'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/beta-mac.yml
asset_name: beta-mac.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
asset_content_type: application/gzip
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/dmg/${{ steps.metadata.outputs.DMG_NAME }}
asset_name: ${{ steps.metadata.outputs.DMG_NAME }}
asset_content_type: application/octet-stream
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.TAR_NAME }}
asset_name: ${{ steps.metadata.outputs.TAR_NAME }}
asset_content_type: application/gzip

View File

@ -0,0 +1,286 @@
name: tauri-build-windows-x64
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: "refs/heads/main"
public_provider:
required: true
type: string
default: none
description: "none: build only, github: build and publish to github, aws s3: build and publish to aws s3"
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ""
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
AZURE_KEY_VAULT_URI:
required: false
AZURE_CLIENT_ID:
required: false
AZURE_TENANT_ID:
required: false
AZURE_CLIENT_SECRET:
required: false
AZURE_CERT_NAME:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
TAURI_SIGNING_PUBLIC_KEY:
required: false
outputs:
WIN_SIG:
value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }}
FILE_NAME:
value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }}
jobs:
build-windows-x64:
runs-on: windows-latest
outputs:
WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }}
FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }}
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Update app version base on tag
id: version_update
shell: bash
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
cat ./src-tauri/Cargo.toml
generate_build_version() {
### Examble
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
local new_version="$1"
local base_version
local t_value
# Check if it has a "-"
if [[ "$new_version" == *-* ]]; then
base_version="${new_version%%-*}" # part before -
suffix="${new_version#*-}" # part after -
# Check if it is rcX-beta
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
t_value="${BASH_REMATCH[1]}"
else
t_value="$suffix"
fi
else
base_version="$new_version"
t_value="0"
fi
# Export two values
new_base_version="$base_version"
new_build_version="${base_version}.${t_value}"
}
generate_build_version ${{ inputs.new_version }}
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
echo "------------------"
cat ./src-tauri/tauri.bundle.windows.nsis.template
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
fi
- name: Install AzureSignTool
run: |
dotnet tool install --global --version 6.0.0 AzureSignTool
- name: Build app
shell: bash
run: |
make build-tauri
env:
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
AWS_EC2_METADATA_DISABLED: "true"
AWS_MAX_ATTEMPTS: "5"
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-windows-${{ inputs.new_version }}
path: |
./src-tauri/target/release/bundle/nsis/*.exe
## create zip file and latest.yml for windows electron auto updater
- name: create zip file and latest.yml for windows electron auto updater
shell: bash
run: |
cd ./src-tauri/target/release/bundle/nsis
if [ "${{ inputs.channel }}" != "stable" ]; then
FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe
WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig)
else
FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe
WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig)
fi
FILE_SIZE=$(stat -c %s $FILE_NAME)
echo "size: $FILE_SIZE"
SH512_CHECKSUM=$(python3 ../../../../../.github/scripts/electron-checksum.py $FILE_NAME)
echo "sha512: $SH512_CHECKSUM"
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
echo "releaseDate: $CURRENT_TIME"
# Create latest.yml file
echo "version: ${{ inputs.new_version }}" > latest.yml
echo "files:" >> latest.yml
echo " - url: $FILE_NAME" >> latest.yml
echo " sha512: $SH512_CHECKSUM" >> latest.yml
echo " size: $FILE_NAME" >> latest.yml
echo "path: $FILE_NAME" >> latest.yml
echo "sha512: $SH512_CHECKSUM" >> latest.yml
echo "releaseDate: $CURRENT_TIME" >> latest.yml
cat latest.yml
cp latest.yml beta.yml
echo "::set-output name=WIN_SIG::$WIN_SIG"
echo "::set-output name=FILE_NAME::$FILE_NAME"
id: metadata
## Upload to s3 for nightly and beta
- name: upload to aws s3 if public provider is aws
shell: bash
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
run: |
cd ./src-tauri/target/release/bundle/nsis
# Upload for electron updater
aws s3 cp ./latest.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest.yml
aws s3 cp ./beta.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta.yml
# Upload for tauri updater
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: "true"
## Upload to github release for stable release
- name: Upload release assert if public provider is github
if: inputs.channel == 'stable'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/nsis/latest.yml
asset_name: latest.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.channel == 'beta'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/nsis/beta.yml
asset_name: beta.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
asset_content_type: application/octet-stream

19
.gitignore vendored
View File

@ -23,19 +23,6 @@ coverage
*.log
core/lib/**
# Nitro binary files
extensions/*-extension/bin/*/nitro
extensions/*-extension/bin/*/*.metal
extensions/*-extension/bin/*/*.exe
extensions/*-extension/bin/*/*.dll
extensions/*-extension/bin/*/*.exp
extensions/*-extension/bin/*/*.lib
extensions/*-extension/bin/saved-*
extensions/*-extension/bin/*.tar.gz
extensions/*-extension/bin/vulkaninfoSDK.exe
extensions/*-extension/bin/vulkaninfo
# Turborepo
.turbo
electron/test-data
@ -51,3 +38,9 @@ electron/shared/**
# docs
docs/yarn.lock
electron/.version.bak
src-tauri/binaries/engines/cortex.llamacpp
src-tauri/resources/themes
src-tauri/Cargo.lock
src-tauri/icons
!src-tauri/icons/icon.png
src-tauri/gen/apple

View File

@ -39,6 +39,10 @@ endif
dev: check-file-counts
yarn dev
dev-tauri: check-file-counts
yarn install:cortex
yarn dev:tauri
# Linting
lint: check-file-counts
yarn lint
@ -115,6 +119,9 @@ build-and-publish: check-file-counts
build: check-file-counts
yarn build
build-tauri: check-file-counts
yarn build-tauri
clean:
ifeq ($(OS),Windows_NT)
-powershell -Command "Get-ChildItem -Path . -Include node_modules, .next, dist, build, out, .turbo, .yarn -Recurse -Directory | Remove-Item -Recurse -Force"

View File

@ -1,3 +1,6 @@
/**
* @jest-environment jsdom
*/
import { openExternalUrl } from './core'
import { joinPath } from './core'
import { openFileExplorer } from './core'
@ -25,7 +28,7 @@ describe('test core apis', () => {
},
}
const result = await joinPath(paths)
expect(globalThis.core.api.joinPath).toHaveBeenCalledWith(paths)
expect(globalThis.core.api.joinPath).toHaveBeenCalledWith({ args: paths })
expect(result).toBe('/path/one/path/two')
})
@ -37,7 +40,7 @@ describe('test core apis', () => {
},
}
const result = await openFileExplorer(path)
expect(globalThis.core.api.openFileExplorer).toHaveBeenCalledWith(path)
expect(globalThis.core.api.openFileExplorer).toHaveBeenCalledWith({ path })
expect(result).toBe('opened')
})
@ -51,20 +54,6 @@ describe('test core apis', () => {
expect(globalThis.core.api.getJanDataFolderPath).toHaveBeenCalled()
expect(result).toBe('/path/to/jan/data')
})
it('should execute function on main process', async () => {
const extension = 'testExtension'
const method = 'testMethod'
const args = ['arg1', 'arg2']
globalThis.core = {
api: {
invokeExtensionFunc: jest.fn().mockResolvedValue('result'),
},
}
const result = await executeOnMain(extension, method, ...args)
expect(globalThis.core.api.invokeExtensionFunc).toHaveBeenCalledWith(extension, method, ...args)
expect(result).toBe('result')
})
})
describe('dirName - just a pass thru api', () => {

View File

@ -13,8 +13,11 @@ const executeOnMain: (extension: string, method: string, ...args: any[]) => Prom
extension,
method,
...args
) => globalThis.core?.api?.invokeExtensionFunc(extension, method, ...args)
) => {
if ('electronAPI' in window && window.electronAPI)
return globalThis.core?.api?.invokeExtensionFunc(extension, method, ...args)
return () => {}
}
/**
* Gets Jan's data folder path.
@ -29,15 +32,15 @@ const getJanDataFolderPath = (): Promise<string> => globalThis.core.api?.getJanD
* @returns {Promise<any>} A promise that resolves when the file explorer is opened.
*/
const openFileExplorer: (path: string) => Promise<any> = (path) =>
globalThis.core.api?.openFileExplorer(path)
globalThis.core.api?.openFileExplorer({ path })
/**
* Joins multiple paths together.
* @param paths - The paths to join.
* @returns {Promise<string>} A promise that resolves with the joined path.
*/
const joinPath: (paths: string[]) => Promise<string> = (paths) =>
globalThis.core.api?.joinPath(paths)
const joinPath: (args: string[]) => Promise<string> = (args) =>
globalThis.core.api?.joinPath({ args })
/**
* Get dirname of a file path.

View File

@ -1,7 +1,5 @@
import { BaseExtension } from './extension'
import { SettingComponentProps } from '../types'
import { getJanDataFolderPath, joinPath } from './core'
import { fs } from './fs'
jest.mock('./core')
jest.mock('./fs')
@ -90,18 +88,32 @@ describe('BaseExtension', () => {
{ key: 'setting2', controllerProps: { value: 'value2' } } as any,
]
;(getJanDataFolderPath as jest.Mock).mockResolvedValue('/data')
;(joinPath as jest.Mock).mockResolvedValue('/data/settings/TestExtension')
;(fs.existsSync as jest.Mock).mockResolvedValue(false)
;(fs.mkdir as jest.Mock).mockResolvedValue(undefined)
;(fs.writeFileSync as jest.Mock).mockResolvedValue(undefined)
const localStorageMock = (() => {
let store: Record<string, string> = {}
return {
getItem: (key: string) => store[key] || null,
setItem: (key: string, value: string) => {
store[key] = value
},
removeItem: (key: string) => {
delete store[key]
},
clear: () => {
store = {}
},
}
})()
Object.defineProperty(global, 'localStorage', {
value: localStorageMock,
})
const mock = jest.spyOn(localStorage, 'setItem')
await baseExtension.registerSettings(settings)
expect(fs.mkdir).toHaveBeenCalledWith('/data/settings/TestExtension')
expect(fs.writeFileSync).toHaveBeenCalledWith(
'/data/settings/TestExtension',
JSON.stringify(settings, null, 2)
expect(mock).toHaveBeenCalledWith(
'TestExtension',
JSON.stringify(settings)
)
})
@ -125,17 +137,15 @@ describe('BaseExtension', () => {
]
jest.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings)
;(getJanDataFolderPath as jest.Mock).mockResolvedValue('/data')
;(joinPath as jest.Mock).mockResolvedValue('/data/settings/TestExtension/settings.json')
;(fs.writeFileSync as jest.Mock).mockResolvedValue(undefined)
const mockSetItem = jest.spyOn(localStorage, 'setItem')
await baseExtension.updateSettings([
{ key: 'setting1', controllerProps: { value: 'newValue' } } as any,
])
expect(fs.writeFileSync).toHaveBeenCalledWith(
'/data/settings/TestExtension/settings.json',
JSON.stringify([{ key: 'setting1', controllerProps: { value: 'newValue' } }], null, 2)
expect(mockSetItem).toHaveBeenCalledWith(
'TestExtension',
JSON.stringify([{ key: 'setting1', controllerProps: { value: 'newValue' } }])
)
})
})

View File

@ -1,7 +1,4 @@
import { Model, ModelEvent, SettingComponentProps } from '../types'
import { getJanDataFolderPath, joinPath } from './core'
import { events } from './events'
import { fs } from './fs'
import { Model, SettingComponentProps } from '../types'
import { ModelManager } from './models'
export enum ExtensionTypeEnum {
@ -117,22 +114,13 @@ export abstract class BaseExtension implements ExtensionType {
return
}
const extensionSettingFolderPath = await joinPath([
await getJanDataFolderPath(),
'settings',
this.name,
])
settings.forEach((setting) => {
setting.extensionName = this.name
})
try {
if (!(await fs.existsSync(extensionSettingFolderPath)))
await fs.mkdir(extensionSettingFolderPath)
const settingFilePath = await joinPath([extensionSettingFolderPath, this.settingFileName])
const oldSettings = localStorage.getItem(this.name)
// Persists new settings
if (await fs.existsSync(settingFilePath)) {
const oldSettings = JSON.parse(await fs.readFileSync(settingFilePath, 'utf-8'))
if (oldSettings) {
settings.forEach((setting) => {
// Keep setting value
if (setting.controllerProps && Array.isArray(oldSettings))
@ -141,7 +129,7 @@ export abstract class BaseExtension implements ExtensionType {
)?.controllerProps?.value
})
}
await fs.writeFileSync(settingFilePath, JSON.stringify(settings, null, 2))
localStorage.setItem(this.name, JSON.stringify(settings))
} catch (err) {
console.error(err)
}
@ -180,21 +168,14 @@ export abstract class BaseExtension implements ExtensionType {
async getSettings(): Promise<SettingComponentProps[]> {
if (!this.name) return []
const settingPath = await joinPath([
await getJanDataFolderPath(),
this.settingFolderName,
this.name,
this.settingFileName,
])
try {
if (!(await fs.existsSync(settingPath))) return []
const content = await fs.readFileSync(settingPath, 'utf-8')
const settings: SettingComponentProps[] = JSON.parse(content)
return settings
const settingsString = localStorage.getItem(this.name);
if (!settingsString) return [];
const settings: SettingComponentProps[] = JSON.parse(settingsString);
return settings;
} catch (err) {
console.warn(err)
return []
console.warn(err);
return [];
}
}
@ -220,20 +201,8 @@ export abstract class BaseExtension implements ExtensionType {
if (!updatedSettings.length) updatedSettings = componentProps as SettingComponentProps[]
const settingFolder = await joinPath([
await getJanDataFolderPath(),
this.settingFolderName,
this.name,
])
if (!(await fs.existsSync(settingFolder))) {
await fs.mkdir(settingFolder)
}
const settingPath = await joinPath([settingFolder, this.settingFileName])
await fs.writeFileSync(settingPath, JSON.stringify(updatedSettings, null, 2))
localStorage.setItem(this.name, JSON.stringify(updatedSettings));
updatedSettings.forEach((setting) => {
this.onSettingUpdate<typeof setting.controllerProps.value>(
setting.key,

View File

@ -40,12 +40,13 @@ export abstract class AIEngine extends BaseExtension {
* Stops the model.
*/
async unloadModel(model?: Model): Promise<any> {
if (model?.engine && model.engine.toString() !== this.provider) return Promise.resolve()
if (model?.engine && model.engine.toString() !== this.provider)
return Promise.resolve()
events.emit(ModelEvent.OnModelStopped, model ?? {})
return Promise.resolve()
}
/*
/**
* Inference request
*/
inference(data: MessageRequest) {}

View File

@ -76,7 +76,7 @@ export abstract class OAIEngine extends AIEngine {
const timestamp = Date.now() / 1000
const message: ThreadMessage = {
id: ulid(),
thread_id: data.threadId,
thread_id: data.thread?.id ?? data.threadId,
type: data.type,
assistant_id: data.assistantId,
role: ChatCompletionRole.Assistant,
@ -104,6 +104,7 @@ export abstract class OAIEngine extends AIEngine {
messages: data.messages ?? [],
model: model.id,
stream: true,
tools: data.tools,
...model.parameters,
}
if (this.transformPayload) {

View File

@ -36,31 +36,31 @@ describe('fs module', () => {
it('should call readFileSync with correct arguments', () => {
const args = ['path/to/file']
fs.readFileSync(...args)
expect(globalThis.core.api.readFileSync).toHaveBeenCalledWith(...args)
expect(globalThis.core.api.readFileSync).toHaveBeenCalledWith({ args })
})
it('should call existsSync with correct arguments', () => {
const args = ['path/to/file']
fs.existsSync(...args)
expect(globalThis.core.api.existsSync).toHaveBeenCalledWith(...args)
expect(globalThis.core.api.existsSync).toHaveBeenCalledWith({ args })
})
it('should call readdirSync with correct arguments', () => {
const args = ['path/to/directory']
fs.readdirSync(...args)
expect(globalThis.core.api.readdirSync).toHaveBeenCalledWith(...args)
expect(globalThis.core.api.readdirSync).toHaveBeenCalledWith({ args })
})
it('should call mkdir with correct arguments', () => {
const args = ['path/to/directory']
fs.mkdir(...args)
expect(globalThis.core.api.mkdir).toHaveBeenCalledWith(...args)
expect(globalThis.core.api.mkdir).toHaveBeenCalledWith({ args })
})
it('should call rm with correct arguments', () => {
const args = ['path/to/directory']
fs.rm(...args)
expect(globalThis.core.api.rm).toHaveBeenCalledWith(...args, { recursive: true, force: true })
expect(globalThis.core.api.rm).toHaveBeenCalledWith({ args })
})
it('should call unlinkSync with correct arguments', () => {

View File

@ -19,29 +19,29 @@ const writeBlob: (path: string, data: string) => Promise<any> = (path, data) =>
* Reads the contents of a file at the specified path.
* @returns {Promise<any>} A Promise that resolves with the contents of the file.
*/
const readFileSync = (...args: any[]) => globalThis.core.api?.readFileSync(...args)
const readFileSync = (...args: any[]) => globalThis.core.api?.readFileSync({ args })
/**
* Check whether the file exists
* @param {string} path
* @returns {boolean} A boolean indicating whether the path is a file.
*/
const existsSync = (...args: any[]) => globalThis.core.api?.existsSync(...args)
const existsSync = (...args: any[]) => globalThis.core.api?.existsSync({ args })
/**
* List the directory files
* @returns {Promise<any>} A Promise that resolves with the contents of the directory.
*/
const readdirSync = (...args: any[]) => globalThis.core.api?.readdirSync(...args)
const readdirSync = (...args: any[]) => globalThis.core.api?.readdirSync({ args })
/**
* Creates a directory at the specified path.
* @returns {Promise<any>} A Promise that resolves when the directory is created successfully.
*/
const mkdir = (...args: any[]) => globalThis.core.api?.mkdir(...args)
const mkdir = (...args: any[]) => globalThis.core.api?.mkdir({ args })
/**
* Removes a directory at the specified path.
* @returns {Promise<any>} A Promise that resolves when the directory is removed successfully.
*/
const rm = (...args: any[]) => globalThis.core.api?.rm(...args, { recursive: true, force: true })
const rm = (...args: any[]) => globalThis.core.api?.rm({ args })
/**
* Deletes a file from the local file system.

View File

@ -3,7 +3,6 @@ import * as Events from './events'
import * as FileSystem from './fs'
import * as Extension from './extension'
import * as Extensions from './extensions'
import * as Tools from './tools'
import * as Models from './models'
describe('Module Tests', () => {
@ -27,10 +26,6 @@ describe('Module Tests', () => {
expect(Extensions).toBeDefined()
})
it('should export all base tools', () => {
expect(Tools).toBeDefined()
})
it('should export all base tools', () => {
expect(Models).toBeDefined()
})

View File

@ -28,12 +28,6 @@ export * from './extension'
*/
export * from './extensions'
/**
* Export all base tools.
* @module
*/
export * from './tools'
/**
* Export all base models.
* @module

View File

@ -1,5 +0,0 @@
it('should not throw any errors when imported', () => {
expect(() => require('./index')).not.toThrow();
})

View File

@ -1,2 +0,0 @@
export * from './manager'
export * from './tool'

View File

@ -1,47 +0,0 @@
import { AssistantTool, MessageRequest } from '../../types'
import { InferenceTool } from './tool'
/**
* Manages the registration and retrieval of inference tools.
*/
export class ToolManager {
public tools = new Map<string, InferenceTool>()
/**
* Registers a tool.
* @param tool - The tool to register.
*/
register<T extends InferenceTool>(tool: T) {
this.tools.set(tool.name, tool)
}
/**
* Retrieves a tool by it's name.
* @param name - The name of the tool to retrieve.
* @returns The tool, if found.
*/
get<T extends InferenceTool>(name: string): T | undefined {
return this.tools.get(name) as T | undefined
}
/*
** Process the message request with the tools.
*/
process(request: MessageRequest, tools: AssistantTool[]): Promise<MessageRequest> {
return tools.reduce((prevPromise, currentTool) => {
return prevPromise.then((prevResult) => {
return currentTool.enabled
? this.get(currentTool.type)?.process(prevResult, currentTool) ??
Promise.resolve(prevResult)
: Promise.resolve(prevResult)
})
}, Promise.resolve(request))
}
/**
* The instance of the tool manager.
*/
static instance(): ToolManager {
return (window.core?.toolManager as ToolManager) ?? new ToolManager()
}
}

View File

@ -1,63 +0,0 @@
import { ToolManager } from '../../browser/tools/manager'
import { InferenceTool } from '../../browser/tools/tool'
import { AssistantTool, MessageRequest } from '../../types'
class MockInferenceTool implements InferenceTool {
name = 'mockTool'
process(request: MessageRequest, tool: AssistantTool): Promise<MessageRequest> {
return Promise.resolve(request)
}
}
it('should register a tool', () => {
const manager = new ToolManager()
const tool = new MockInferenceTool()
manager.register(tool)
expect(manager.get(tool.name)).toBe(tool)
})
it('should retrieve a tool by its name', () => {
const manager = new ToolManager()
const tool = new MockInferenceTool()
manager.register(tool)
const retrievedTool = manager.get(tool.name)
expect(retrievedTool).toBe(tool)
})
it('should return undefined for a non-existent tool', () => {
const manager = new ToolManager()
const retrievedTool = manager.get('nonExistentTool')
expect(retrievedTool).toBeUndefined()
})
it('should process the message request with enabled tools', async () => {
const manager = new ToolManager()
const tool = new MockInferenceTool()
manager.register(tool)
const request: MessageRequest = { message: 'test' } as any
const tools: AssistantTool[] = [{ type: 'mockTool', enabled: true }] as any
const result = await manager.process(request, tools)
expect(result).toBe(request)
})
it('should skip processing for disabled tools', async () => {
const manager = new ToolManager()
const tool = new MockInferenceTool()
manager.register(tool)
const request: MessageRequest = { message: 'test' } as any
const tools: AssistantTool[] = [{ type: 'mockTool', enabled: false }] as any
const result = await manager.process(request, tools)
expect(result).toBe(request)
})
it('should throw an error when process is called without implementation', () => {
class TestTool extends InferenceTool {
name = 'testTool'
}
const tool = new TestTool()
expect(() => tool.process({} as MessageRequest)).toThrowError()
})

View File

@ -1,12 +0,0 @@
import { AssistantTool, MessageRequest } from '../../types'
/**
* Represents a base inference tool.
*/
export abstract class InferenceTool {
abstract name: string
/*
** Process a message request and return the processed message request.
*/
abstract process(request: MessageRequest, tool?: AssistantTool): Promise<MessageRequest>
}

View File

@ -8,6 +8,7 @@ import {
normalizeFilePath,
getJanDataFolderPath,
} from '../../helper'
import { readdirSync, readFileSync } from 'fs'
export class App implements Processor {
observer?: Function
@ -25,8 +26,8 @@ export class App implements Processor {
/**
* Joins multiple paths together, respect to the current OS.
*/
joinPath(args: any[]) {
return join(...args)
joinPath(args: any) {
return join(...('args' in args ? args.args : args))
}
/**
@ -69,10 +70,32 @@ export class App implements Processor {
writeLog(args)
}
/**
* Get app configurations.
*/
getAppConfigurations() {
return appConfiguration()
}
/**
* Get themes from the app data folder.
* @returns
*/
getThemes() {
const themesPath = join(getJanDataFolderPath(), 'themes')
return readdirSync(themesPath)
}
/**
* Read theme.json
* @param theme
* @returns
*/
readTheme({ theme }: { theme: string }) {
const themePath = join(getJanDataFolderPath(), 'themes', theme, 'theme.json')
return readFileSync(themePath, { encoding: 'utf-8' })
}
async updateAppConfiguration(args: any) {
await updateAppConfiguration(args)
}

View File

@ -21,18 +21,21 @@ export class FileSystem implements Processor {
return import(FileSystem.moduleName).then((mdl) =>
mdl[route](
...args.map((arg: any, index: number) => {
if(index !== 0) {
const arg0 = args[0]
if ('args' in arg0) arg = arg0.args
if (Array.isArray(arg)) arg = arg[0]
if (index !== 0) {
return arg
}
if (index === 0 && typeof arg !== 'string') {
throw new Error(`Invalid argument ${JSON.stringify(args)}`)
}
const path =
(arg.startsWith(`file:/`) || arg.startsWith(`file:\\`))
? join(getJanDataFolderPath(), normalizeFilePath(arg))
: arg
arg.startsWith(`file:/`) || arg.startsWith(`file:\\`)
? join(getJanDataFolderPath(), normalizeFilePath(arg))
: arg
if(path.startsWith(`http://`) || path.startsWith(`https://`)) {
if (path.startsWith(`http://`) || path.startsWith(`https://`)) {
return path
}
const absolutePath = resolve(path)
@ -88,5 +91,4 @@ export class FileSystem implements Processor {
})
})
}
}

View File

@ -94,8 +94,6 @@ export default class Extension {
`Package ${this.origin} does not contain a valid manifest: ${error}`
)
}
return true
}
/**

View File

@ -18,9 +18,7 @@ export const getAppConfigurations = (): AppConfiguration => {
if (!fs.existsSync(configurationFile)) {
// create default app config if we don't have one
console.debug(
`App config not found, creating default config at ${configurationFile}`
)
console.debug(`App config not found, creating default config at ${configurationFile}`)
fs.writeFileSync(configurationFile, JSON.stringify(appDefaultConfiguration))
return appDefaultConfiguration
}
@ -31,28 +29,23 @@ export const getAppConfigurations = (): AppConfiguration => {
)
return appConfigurations
} catch (err) {
console.error(
`Failed to read app config, return default config instead! Err: ${err}`
)
console.error(`Failed to read app config, return default config instead! Err: ${err}`)
return defaultAppConfig()
}
}
const getConfigurationFilePath = () =>
join(
global.core?.appPath() ||
process.env[process.platform == 'win32' ? 'USERPROFILE' : 'HOME'],
global.core?.appPath() || process.env[process.platform == 'win32' ? 'USERPROFILE' : 'HOME'],
configurationFileName
)
export const updateAppConfiguration = (
export const updateAppConfiguration = ({
configuration,
}: {
configuration: AppConfiguration
): Promise<void> => {
}): Promise<void> => {
const configurationFile = getConfigurationFilePath()
console.debug(
'updateAppConfiguration, configurationFile: ',
configurationFile
)
fs.writeFileSync(configurationFile, JSON.stringify(configuration))
return Promise.resolve()
@ -87,14 +80,11 @@ export const getJanExtensionsPath = (): string => {
*/
export const defaultAppConfig = (): AppConfiguration => {
const { app } = require('electron')
const defaultJanDataFolder = join(
app?.getPath('userData') ?? os?.homedir() ?? '',
'data'
)
const defaultJanDataFolder = join(app?.getPath('userData') ?? os?.homedir() ?? '', 'data')
return {
data_folder:
process.env.CI === 'e2e'
? (process.env.APP_CONFIG_PATH ?? resolve('./test-data'))
? process.env.APP_CONFIG_PATH ?? resolve('./test-data')
: defaultJanDataFolder,
quick_ask: false,
}

View File

@ -40,7 +40,7 @@ export enum NativeRoute {
/**
* App Route APIs
* @description Enum of all the routes exposed by the app
*/
*/
export enum AppRoute {
getAppConfigurations = 'getAppConfigurations',
updateAppConfiguration = 'updateAppConfiguration',
@ -51,6 +51,8 @@ export enum AppRoute {
log = 'log',
systemInformation = 'systemInformation',
showToast = 'showToast',
getThemes = 'getThemes',
readTheme = 'readTheme'
}
export enum AppEvent {

View File

@ -7,6 +7,7 @@ export enum ChatCompletionRole {
System = 'system',
Assistant = 'assistant',
User = 'user',
Tool = 'tool',
}
/**
@ -18,6 +19,9 @@ export type ChatCompletionMessage = {
content?: ChatCompletionMessageContent
/** The role of the author of this message. **/
role: ChatCompletionRole
type?: string
output?: string
tool_call_id?: string
}
export type ChatCompletionMessageContent =

View File

@ -36,6 +36,8 @@ export type ThreadMessage = {
type?: string
/** The error code which explain what error type. Used in conjunction with MessageStatus.Error */
error_code?: ErrorCode
tool_call_id?: string
}
/**
@ -43,6 +45,9 @@ export type ThreadMessage = {
* @data_transfer_object
*/
export type MessageRequest = {
/**
* The id of the message request.
*/
id?: string
/**
@ -71,6 +76,11 @@ export type MessageRequest = {
// TODO: deprecate threadId field
thread?: Thread
/**
* ChatCompletion tools
*/
tools?: MessageTool[]
/** Engine name to process */
engine?: string
@ -78,6 +88,24 @@ export type MessageRequest = {
type?: string
}
/**
* ChatCompletion Tool parameters
*/
export type MessageTool = {
type: string
function: MessageFunction
}
/**
* ChatCompletion Tool's function parameters
*/
export type MessageFunction = {
name: string
description?: string
parameters?: Record<string, unknown>
strict?: boolean
}
/**
* The status of the message.
* @data_transfer_object

View File

@ -8,17 +8,10 @@
"author": "Jan <service@jan.ai>",
"license": "AGPL-3.0",
"scripts": {
"clean:modules": "rimraf node_modules/pdf-parse/test && cd node_modules/pdf-parse/lib/pdf.js && rimraf v1.9.426 v1.10.88 v2.0.550",
"build-universal-hnswlib": "[ \"$IS_TEST\" = \"true\" ] && echo \"Skip universal build\" || (cd node_modules/hnswlib-node && arch -x86_64 npx node-gyp rebuild --arch=x64 && mv build/Release/addon.node ./addon-amd64.node && node-gyp rebuild --arch=arm64 && mv build/Release/addon.node ./addon-arm64.node && lipo -create -output build/Release/addon.node ./addon-arm64.node ./addon-amd64.node && rm ./addon-arm64.node && rm ./addon-amd64.node)",
"build": "yarn clean:modules && rolldown -c rolldown.config.mjs",
"build:publish:linux": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install",
"build:publish:darwin": "rimraf *.tgz --glob || true && yarn build-universal-hnswlib && yarn build && ../../.github/scripts/auto-sign.sh && npm pack && cpx *.tgz ../../pre-install",
"build:publish:win32": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install",
"build:publish": "run-script-os",
"build:dev": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install"
"build": "rolldown -c rolldown.config.mjs",
"build:publish": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install"
},
"devDependencies": {
"@types/pdf-parse": "^1.1.4",
"cpx": "^1.5.0",
"rimraf": "^3.0.2",
"rolldown": "1.0.0-beta.1",
@ -27,11 +20,6 @@
},
"dependencies": {
"@janhq/core": "../../core/package.tgz",
"@langchain/community": "0.0.13",
"hnswlib-node": "^1.4.2",
"langchain": "^0.0.214",
"node-gyp": "^11.0.0",
"pdf-parse": "^1.1.1",
"ts-loader": "^9.5.0"
},
"files": [
@ -40,8 +28,7 @@
"README.md"
],
"bundleDependencies": [
"@janhq/core",
"hnswlib-node"
"@janhq/core"
],
"installConfig": {
"hoistingLimits": "workspaces"

View File

@ -13,22 +13,5 @@ export default defineConfig([
NODE: JSON.stringify(`${pkgJson.name}/${pkgJson.node}`),
VERSION: JSON.stringify(pkgJson.version),
},
},
{
input: 'src/node/index.ts',
external: ['@janhq/core/node', 'path', 'hnswlib-node'],
output: {
format: 'cjs',
file: 'dist/node/index.js',
sourcemap: false,
inlineDynamicImports: true,
},
resolve: {
extensions: ['.js', '.ts'],
},
define: {
CORTEX_API_URL: JSON.stringify(`http://127.0.0.1:${process.env.CORTEX_API_PORT ?? "39291"}`),
},
platform: 'node',
},
}
])

View File

@ -1,126 +1,20 @@
import {
fs,
Assistant,
events,
joinPath,
AssistantExtension,
AssistantEvent,
ToolManager,
} from '@janhq/core'
import { RetrievalTool } from './tools/retrieval'
import { Assistant, AssistantExtension } from '@janhq/core'
export default class JanAssistantExtension extends AssistantExtension {
private static readonly _homeDir = 'file://assistants'
async onLoad() {
// Register the retrieval tool
ToolManager.instance().register(new RetrievalTool())
// making the assistant directory
const assistantDirExist = await fs.existsSync(
JanAssistantExtension._homeDir
)
if (
localStorage.getItem(`${this.name}-version`) !== VERSION ||
!assistantDirExist
) {
if (!assistantDirExist) await fs.mkdir(JanAssistantExtension._homeDir)
// Write assistant metadata
await this.createJanAssistant()
// Finished migration
localStorage.setItem(`${this.name}-version`, VERSION)
// Update the assistant list
events.emit(AssistantEvent.OnAssistantsUpdate, {})
}
}
async onLoad() {}
/**
* Called when the extension is unloaded.
*/
onUnload(): void {}
async createAssistant(assistant: Assistant): Promise<void> {
const assistantDir = await joinPath([
JanAssistantExtension._homeDir,
assistant.id,
])
if (!(await fs.existsSync(assistantDir))) await fs.mkdir(assistantDir)
// store the assistant metadata json
const assistantMetadataPath = await joinPath([
assistantDir,
'assistant.json',
])
try {
await fs.writeFileSync(
assistantMetadataPath,
JSON.stringify(assistant, null, 2)
)
} catch (err) {
console.error(err)
}
}
async getAssistants(): Promise<Assistant[]> {
try {
// get all the assistant directories
// get all the assistant metadata json
const results: Assistant[] = []
const allFileName: string[] = await fs.readdirSync(
JanAssistantExtension._homeDir
)
for (const fileName of allFileName) {
const filePath = await joinPath([
JanAssistantExtension._homeDir,
fileName,
])
if (!(await fs.fileStat(filePath))?.isDirectory) continue
const jsonFiles: string[] = (await fs.readdirSync(filePath)).filter(
(file: string) => file === 'assistant.json'
)
if (jsonFiles.length !== 1) {
// has more than one assistant file -> ignore
continue
}
const content = await fs.readFileSync(
await joinPath([filePath, jsonFiles[0]]),
'utf-8'
)
const assistant: Assistant =
typeof content === 'object' ? content : JSON.parse(content)
results.push(assistant)
}
return results
} catch (err) {
console.debug(err)
return [this.defaultAssistant]
}
return [this.defaultAssistant]
}
async deleteAssistant(assistant: Assistant): Promise<void> {
if (assistant.id === 'jan') {
return Promise.reject('Cannot delete Jan Assistant')
}
// remove the directory
const assistantDir = await joinPath([
JanAssistantExtension._homeDir,
assistant.id,
])
return fs.rm(assistantDir)
}
private async createJanAssistant(): Promise<void> {
await this.createAssistant(this.defaultAssistant)
}
/** DEPRECATED */
async createAssistant(assistant: Assistant): Promise<void> {}
async deleteAssistant(assistant: Assistant): Promise<void> {}
private defaultAssistant: Assistant = {
avatar: '',

View File

@ -1,45 +0,0 @@
import { getJanDataFolderPath } from '@janhq/core/node'
import { retrieval } from './retrieval'
import path from 'path'
export function toolRetrievalUpdateTextSplitter(
chunkSize: number,
chunkOverlap: number
) {
retrieval.updateTextSplitter(chunkSize, chunkOverlap)
}
export async function toolRetrievalIngestNewDocument(
thread: string,
file: string,
model: string,
engine: string,
useTimeWeighted: boolean
) {
const threadPath = path.join(getJanDataFolderPath(), 'threads', thread)
const filePath = path.join(getJanDataFolderPath(), 'files', file)
retrieval.updateEmbeddingEngine(model, engine)
return retrieval
.ingestAgentKnowledge(filePath, `${threadPath}/memory`, useTimeWeighted)
.catch((err) => {
console.error(err)
})
}
export async function toolRetrievalLoadThreadMemory(threadId: string) {
return retrieval
.loadRetrievalAgent(
path.join(getJanDataFolderPath(), 'threads', threadId, 'memory')
)
.catch((err) => {
console.error(err)
})
}
export async function toolRetrievalQueryResult(
query: string,
useTimeWeighted: boolean = false
) {
return retrieval.generateResult(query, useTimeWeighted).catch((err) => {
console.error(err)
})
}

View File

@ -1,121 +0,0 @@
import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter'
import { formatDocumentsAsString } from 'langchain/util/document'
import { PDFLoader } from 'langchain/document_loaders/fs/pdf'
import { TimeWeightedVectorStoreRetriever } from 'langchain/retrievers/time_weighted'
import { MemoryVectorStore } from 'langchain/vectorstores/memory'
import { HNSWLib } from 'langchain/vectorstores/hnswlib'
import { OpenAIEmbeddings } from 'langchain/embeddings/openai'
export class Retrieval {
public chunkSize: number = 100
public chunkOverlap?: number = 0
private retriever: any
private embeddingModel?: OpenAIEmbeddings = undefined
private textSplitter?: RecursiveCharacterTextSplitter
// to support time-weighted retrieval
private timeWeightedVectorStore: MemoryVectorStore
private timeWeightedretriever: any | TimeWeightedVectorStoreRetriever
constructor(chunkSize: number = 4000, chunkOverlap: number = 200) {
this.updateTextSplitter(chunkSize, chunkOverlap)
this.initialize()
}
private async initialize() {
const apiKey = await window.core?.api.appToken() ?? 'cortex.cpp'
// declare time-weighted retriever and storage
this.timeWeightedVectorStore = new MemoryVectorStore(
new OpenAIEmbeddings(
{ openAIApiKey: apiKey },
{ basePath: `${CORTEX_API_URL}/v1` }
)
)
this.timeWeightedretriever = new TimeWeightedVectorStoreRetriever({
vectorStore: this.timeWeightedVectorStore,
memoryStream: [],
searchKwargs: 2,
})
}
public updateTextSplitter(chunkSize: number, chunkOverlap: number): void {
this.chunkSize = chunkSize
this.chunkOverlap = chunkOverlap
this.textSplitter = new RecursiveCharacterTextSplitter({
chunkSize: chunkSize,
chunkOverlap: chunkOverlap,
})
}
public async updateEmbeddingEngine(model: string, engine: string) {
const apiKey = await window.core?.api.appToken() ?? 'cortex.cpp'
this.embeddingModel = new OpenAIEmbeddings(
{ openAIApiKey: apiKey, model },
// TODO: Raw settings
{ basePath: `${CORTEX_API_URL}/v1` }
)
// update time-weighted embedding model
this.timeWeightedVectorStore.embeddings = this.embeddingModel
}
public ingestAgentKnowledge = async (
filePath: string,
memoryPath: string,
useTimeWeighted: boolean
): Promise<any> => {
const loader = new PDFLoader(filePath, {
splitPages: true,
})
if (!this.embeddingModel) return Promise.reject()
const doc = await loader.load()
const docs = await this.textSplitter!.splitDocuments(doc)
const vectorStore = await HNSWLib.fromDocuments(docs, this.embeddingModel)
// add documents with metadata by using the time-weighted retriever in order to support time-weighted retrieval
if (useTimeWeighted && this.timeWeightedretriever) {
await (
this.timeWeightedretriever as TimeWeightedVectorStoreRetriever
).addDocuments(docs)
}
return vectorStore.save(memoryPath)
}
public loadRetrievalAgent = async (memoryPath: string): Promise<void> => {
if (!this.embeddingModel) return Promise.reject()
const vectorStore = await HNSWLib.load(memoryPath, this.embeddingModel)
this.retriever = vectorStore.asRetriever(2)
return Promise.resolve()
}
public generateResult = async (
query: string,
useTimeWeighted: boolean
): Promise<string> => {
if (useTimeWeighted) {
if (!this.timeWeightedretriever) {
return Promise.resolve(' ')
}
// use invoke because getRelevantDocuments is deprecated
const relevantDocs = await this.timeWeightedretriever.invoke(query)
const serializedDoc = formatDocumentsAsString(relevantDocs)
return Promise.resolve(serializedDoc)
}
if (!this.retriever) {
return Promise.resolve(' ')
}
// should use invoke(query) because getRelevantDocuments is deprecated
const relevantDocs = await this.retriever.getRelevantDocuments(query)
const serializedDoc = formatDocumentsAsString(relevantDocs)
return Promise.resolve(serializedDoc)
}
}
export const retrieval = new Retrieval()

View File

@ -1,118 +0,0 @@
import {
AssistantTool,
executeOnMain,
fs,
InferenceTool,
joinPath,
MessageRequest,
} from '@janhq/core'
export class RetrievalTool extends InferenceTool {
private _threadDir = 'file://threads'
private retrievalThreadId: string | undefined = undefined
name: string = 'retrieval'
async process(
data: MessageRequest,
tool?: AssistantTool
): Promise<MessageRequest> {
if (!data.model || !data.messages) {
return Promise.resolve(data)
}
const latestMessage = data.messages[data.messages.length - 1]
// 1. Ingest the document if needed
if (
latestMessage &&
latestMessage.content &&
typeof latestMessage.content !== 'string' &&
latestMessage.content.length > 1
) {
const docFile = latestMessage.content[1]?.doc_url?.url
if (docFile) {
await executeOnMain(
NODE,
'toolRetrievalIngestNewDocument',
data.thread?.id,
docFile,
data.model?.id,
data.model?.engine,
tool?.useTimeWeightedRetriever ?? false
)
} else {
return Promise.resolve(data)
}
} else if (
// Check whether we need to ingest document or not
// Otherwise wrong context will be sent
!(await fs.existsSync(
await joinPath([this._threadDir, data.threadId, 'memory'])
))
) {
// No document ingested, reroute the result to inference engine
return Promise.resolve(data)
}
// 2. Load agent on thread changed
if (this.retrievalThreadId !== data.threadId) {
await executeOnMain(NODE, 'toolRetrievalLoadThreadMemory', data.threadId)
this.retrievalThreadId = data.threadId
// Update the text splitter
await executeOnMain(
NODE,
'toolRetrievalUpdateTextSplitter',
tool?.settings?.chunk_size ?? 4000,
tool?.settings?.chunk_overlap ?? 200
)
}
// 3. Using the retrieval template with the result and query
if (latestMessage.content) {
const prompt =
typeof latestMessage.content === 'string'
? latestMessage.content
: latestMessage.content[0].text
// Retrieve the result
const retrievalResult = await executeOnMain(
NODE,
'toolRetrievalQueryResult',
prompt,
tool?.useTimeWeightedRetriever ?? false
)
console.debug('toolRetrievalQueryResult', retrievalResult)
// Update message content
if (retrievalResult)
data.messages[data.messages.length - 1].content =
tool?.settings?.retrieval_template
?.replace('{CONTEXT}', retrievalResult)
.replace('{QUESTION}', prompt)
}
// 4. Reroute the result to inference engine
return Promise.resolve(this.normalize(data))
}
// Filter out all the messages that are not text
// TODO: Remove it until engines can handle multiple content types
normalize(request: MessageRequest): MessageRequest {
request.messages = request.messages?.map((message) => {
if (
message.content &&
typeof message.content !== 'string' &&
(message.content.length ?? 0) > 0
) {
return {
...message,
content: [message.content[0]],
}
}
return message
})
return request
}
}

View File

@ -23,9 +23,7 @@
"typescript": "^5.7.2"
},
"dependencies": {
"@janhq/core": "../../core/package.tgz",
"ky": "^1.7.2",
"p-queue": "^8.0.1"
"@janhq/core": "../../core/package.tgz"
},
"engines": {
"node": ">=18.0.0"

View File

@ -4,45 +4,17 @@ import {
ThreadAssistantInfo,
ThreadMessage,
} from '@janhq/core'
import ky, { KyInstance } from 'ky'
import PQueue from 'p-queue'
type ThreadList = {
data: Thread[]
}
type MessageList = {
data: ThreadMessage[]
}
/**
* JSONConversationalExtension is a ConversationalExtension implementation that provides
* functionality for managing threads.
*/
export default class CortexConversationalExtension extends ConversationalExtension {
queue = new PQueue({ concurrency: 1 })
api?: KyInstance
/**
* Get the API instance
* @returns
*/
async apiInstance(): Promise<KyInstance> {
if(this.api) return this.api
const apiKey = (await window.core?.api.appToken()) ?? 'cortex.cpp'
this.api = ky.extend({
prefixUrl: API_URL,
headers: {
Authorization: `Bearer ${apiKey}`,
},
})
return this.api
}
/**
* Called when the extension is loaded.
*/
async onLoad() {
this.queue.add(() => this.healthz())
// this.queue.add(() => this.healthz())
}
/**
@ -54,14 +26,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi
* Returns a Promise that resolves to an array of Conversation objects.
*/
async listThreads(): Promise<Thread[]> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.get('v1/threads?limit=-1')
.json<ThreadList>()
.then((e) => e.data)
)
) as Promise<Thread[]>
return window.core.api.listThreads()
}
/**
@ -69,11 +34,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi
* @param thread The Thread object to save.
*/
async createThread(thread: Thread): Promise<Thread> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api.post('v1/threads', { json: thread }).json<Thread>()
)
) as Promise<Thread>
return window.core.api.createThread({ thread })
}
/**
@ -81,13 +42,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi
* @param thread The Thread object to save.
*/
async modifyThread(thread: Thread): Promise<void> {
return this.queue
.add(() =>
this.apiInstance().then((api) =>
api.patch(`v1/threads/${thread.id}`, { json: thread })
)
)
.then()
return window.core.api.modifyThread({ thread })
}
/**
@ -95,11 +50,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi
* @param threadId The ID of the thread to delete.
*/
async deleteThread(threadId: string): Promise<void> {
return this.queue
.add(() =>
this.apiInstance().then((api) => api.delete(`v1/threads/${threadId}`))
)
.then()
return window.core.api.deleteThread({ threadId })
}
/**
@ -108,15 +59,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi
* @returns A Promise that resolves when the message has been added.
*/
async createMessage(message: ThreadMessage): Promise<ThreadMessage> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.post(`v1/threads/${message.thread_id}/messages`, {
json: message,
})
.json<ThreadMessage>()
)
) as Promise<ThreadMessage>
return window.core.api.createMessage({ message })
}
/**
@ -125,15 +68,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi
* @returns
*/
async modifyMessage(message: ThreadMessage): Promise<ThreadMessage> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.patch(`v1/threads/${message.thread_id}/messages/${message.id}`, {
json: message,
})
.json<ThreadMessage>()
)
) as Promise<ThreadMessage>
return window.core.api.modifyMessage({ message })
}
/**
@ -143,13 +78,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi
* @returns A Promise that resolves when the message has been successfully deleted.
*/
async deleteMessage(threadId: string, messageId: string): Promise<void> {
return this.queue
.add(() =>
this.apiInstance().then((api) =>
api.delete(`v1/threads/${threadId}/messages/${messageId}`)
)
)
.then()
return window.core.api.deleteMessage({ threadId, messageId })
}
/**
@ -158,14 +87,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi
* @returns A Promise that resolves to an array of ThreadMessage objects.
*/
async listMessages(threadId: string): Promise<ThreadMessage[]> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.get(`v1/threads/${threadId}/messages?order=asc&limit=-1`)
.json<MessageList>()
.then((e) => e.data)
)
) as Promise<ThreadMessage[]>
return window.core.api.listMessages({ threadId })
}
/**
@ -175,13 +97,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi
* the details of the assistant associated with the specified thread.
*/
async getThreadAssistant(threadId: string): Promise<ThreadAssistantInfo> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.get(`v1/assistants/${threadId}?limit=-1`)
.json<ThreadAssistantInfo>()
)
) as Promise<ThreadAssistantInfo>
return window.core.api.getThreadAssistant({ threadId })
}
/**
* Creates a new assistant for the specified thread.
@ -193,13 +109,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi
threadId: string,
assistant: ThreadAssistantInfo
): Promise<ThreadAssistantInfo> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.post(`v1/assistants/${threadId}`, { json: assistant })
.json<ThreadAssistantInfo>()
)
) as Promise<ThreadAssistantInfo>
return window.core.api.createThreadAssistant(threadId, assistant)
}
/**
@ -212,26 +122,6 @@ export default class CortexConversationalExtension extends ConversationalExtensi
threadId: string,
assistant: ThreadAssistantInfo
): Promise<ThreadAssistantInfo> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.patch(`v1/assistants/${threadId}`, { json: assistant })
.json<ThreadAssistantInfo>()
)
) as Promise<ThreadAssistantInfo>
}
/**
* Do health check on cortex.cpp
* @returns
*/
async healthz(): Promise<void> {
return this.apiInstance()
.then((api) =>
api.get('healthz', {
retry: { limit: 20, delay: () => 500, methods: ['get'] },
})
)
.then(() => {})
return window.core.api.modifyThreadAssistant({ threadId, assistant })
}
}

View File

@ -16,7 +16,6 @@ import {
EngineEvent,
} from '@janhq/core'
import ky, { HTTPError, KyInstance } from 'ky'
import PQueue from 'p-queue'
import { EngineError } from './error'
import { getJanDataFolderPath } from '@janhq/core'
import { engineVariant } from './utils'
@ -29,21 +28,22 @@ interface ModelList {
* functionality for managing engines.
*/
export default class JanEngineManagementExtension extends EngineManagementExtension {
queue = new PQueue({ concurrency: 1 })
api?: KyInstance
/**
* Get the API instance
* @returns
*/
async apiInstance(): Promise<KyInstance> {
if(this.api) return this.api
const apiKey = (await window.core?.api.appToken()) ?? 'cortex.cpp'
if (this.api) return this.api
const apiKey = (await window.core?.api.appToken())
this.api = ky.extend({
prefixUrl: API_URL,
headers: {
Authorization: `Bearer ${apiKey}`,
},
headers: apiKey
? {
Authorization: `Bearer ${apiKey}`,
}
: {},
retry: 10,
})
return this.api
}
@ -53,8 +53,6 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
async onLoad() {
// Symlink Engines Directory
await executeOnMain(NODE, 'symlinkEngines')
// Run Healthcheck
this.queue.add(() => this.healthz())
// Update default local engine
this.updateDefaultEngine()
@ -74,13 +72,11 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
* @returns A Promise that resolves to an object of list engines.
*/
async getEngines(): Promise<Engines> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.get('v1/engines')
.json<Engines>()
.then((e) => e)
)
return this.apiInstance().then((api) =>
api
.get('v1/engines')
.json<Engines>()
.then((e) => e)
) as Promise<Engines>
}
@ -104,13 +100,11 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
* @returns A Promise that resolves to an array of installed engine.
*/
async getInstalledEngines(name: InferenceEngine): Promise<EngineVariant[]> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.get(`v1/engines/${name}`)
.json<EngineVariant[]>()
.then((e) => e)
)
return this.apiInstance().then((api) =>
api
.get(`v1/engines/${name}`)
.json<EngineVariant[]>()
.then((e) => e)
) as Promise<EngineVariant[]>
}
@ -125,15 +119,13 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
version: string,
platform?: string
) {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.get(`v1/engines/${name}/releases/${version}`)
.json<EngineReleased[]>()
.then((e) =>
platform ? e.filter((r) => r.name.includes(platform)) : e
)
)
return this.apiInstance().then((api) =>
api
.get(`v1/engines/${name}/releases/${version}`)
.json<EngineReleased[]>()
.then((e) =>
platform ? e.filter((r) => r.name.includes(platform)) : e
)
) as Promise<EngineReleased[]>
}
@ -143,15 +135,13 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
* @returns A Promise that resolves to an array of latest released engine by version.
*/
async getLatestReleasedEngine(name: InferenceEngine, platform?: string) {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.get(`v1/engines/${name}/releases/latest`)
.json<EngineReleased[]>()
.then((e) =>
platform ? e.filter((r) => r.name.includes(platform)) : e
)
)
return this.apiInstance().then((api) =>
api
.get(`v1/engines/${name}/releases/latest`)
.json<EngineReleased[]>()
.then((e) =>
platform ? e.filter((r) => r.name.includes(platform)) : e
)
) as Promise<EngineReleased[]>
}
@ -160,12 +150,10 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
* @returns A Promise that resolves to intall of engine.
*/
async installEngine(name: string, engineConfig: EngineConfig) {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.post(`v1/engines/${name}/install`, { json: engineConfig })
.then((e) => e)
)
return this.apiInstance().then((api) =>
api
.post(`v1/engines/${name}/install`, { json: engineConfig })
.then((e) => e)
) as Promise<{ messages: string }>
}
@ -195,18 +183,16 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
if (engineConfig.metadata && !engineConfig.metadata?.header_template)
engineConfig.metadata.header_template = DEFAULT_REQUEST_HEADERS_TRANSFORM
return this.queue.add(() =>
this.apiInstance().then((api) =>
api.post('v1/engines', { json: engineConfig }).then((e) => {
if (persistModels && engineConfig.metadata?.get_models_url) {
// Pull /models from remote models endpoint
return this.populateRemoteModels(engineConfig)
.then(() => e)
.catch(() => e)
}
return e
})
)
return this.apiInstance().then((api) =>
api.post('v1/engines', { json: engineConfig }).then((e) => {
if (persistModels && engineConfig.metadata?.get_models_url) {
// Pull /models from remote models endpoint
return this.populateRemoteModels(engineConfig)
.then(() => e)
.catch(() => e)
}
return e
})
) as Promise<{ messages: string }>
}
@ -215,12 +201,10 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
* @returns A Promise that resolves to unintall of engine.
*/
async uninstallEngine(name: InferenceEngine, engineConfig: EngineConfig) {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.delete(`v1/engines/${name}/install`, { json: engineConfig })
.then((e) => e)
)
return this.apiInstance().then((api) =>
api
.delete(`v1/engines/${name}/install`, { json: engineConfig })
.then((e) => e)
) as Promise<{ messages: string }>
}
@ -229,25 +213,22 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
* @param model - Remote model object.
*/
async addRemoteModel(model: Model) {
return this.queue.add(() =>
this.apiInstance()
.then((api) =>
api
.post('v1/models/add', {
json: {
inference_params: {
max_tokens: 4096,
temperature: 0.7,
top_p: 0.95,
stream: true,
frequency_penalty: 0,
presence_penalty: 0,
},
...model,
},
})
.then((e) => e)
)
return this.apiInstance().then((api) =>
api
.post('v1/models/add', {
json: {
inference_params: {
max_tokens: 4096,
temperature: 0.7,
top_p: 0.95,
stream: true,
frequency_penalty: 0,
presence_penalty: 0,
},
...model,
},
})
.then((e) => e)
.then(() => {})
)
}
@ -257,13 +238,11 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
* @returns A Promise that resolves to an object of default engine.
*/
async getDefaultEngineVariant(name: InferenceEngine) {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.get(`v1/engines/${name}/default`)
.json<{ messages: string }>()
.then((e) => e)
)
return this.apiInstance().then((api) =>
api
.get(`v1/engines/${name}/default`)
.json<{ messages: string }>()
.then((e) => e)
) as Promise<DefaultEngineVariant>
}
@ -276,12 +255,10 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
name: InferenceEngine,
engineConfig: EngineConfig
) {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.post(`v1/engines/${name}/default`, { json: engineConfig })
.then((e) => e)
)
return this.apiInstance().then((api) =>
api
.post(`v1/engines/${name}/default`, { json: engineConfig })
.then((e) => e)
) as Promise<{ messages: string }>
}
@ -289,31 +266,13 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
* @returns A Promise that resolves to update engine.
*/
async updateEngine(name: InferenceEngine, engineConfig?: EngineConfig) {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.post(`v1/engines/${name}/update`, { json: engineConfig })
.then((e) => e)
)
return this.apiInstance().then((api) =>
api
.post(`v1/engines/${name}/update`, { json: engineConfig })
.then((e) => e)
) as Promise<{ messages: string }>
}
/**
* Do health check on cortex.cpp
* @returns
*/
async healthz(): Promise<void> {
return this.apiInstance()
.then((api) =>
api.get('healthz', {
retry: { limit: 20, delay: () => 500, methods: ['get'] },
})
)
.then(() => {
this.queue.concurrency = Infinity
})
}
/**
* Update default local engine
* This is to use built-in engine variant in case there is no default engine set
@ -428,8 +387,6 @@ export default class JanEngineManagementExtension extends EngineManagementExtens
*/
migrate = async () => {
// Ensure health check is done
await this.queue.onEmpty()
const version = await this.getSetting<string>('version', '0.0.0')
const engines = await this.getEngines()
if (version < VERSION) {

View File

@ -1,21 +1,15 @@
import { HardwareManagementExtension, HardwareInformation } from '@janhq/core'
import ky, { KyInstance } from 'ky'
import PQueue from 'p-queue'
/**
* JSONHardwareManagementExtension is a HardwareManagementExtension implementation that provides
* functionality for managing engines.
*/
export default class JSONHardwareManagementExtension extends HardwareManagementExtension {
queue = new PQueue({ concurrency: 1 })
/**
* Called when the extension is loaded.
*/
async onLoad() {
// Run Healthcheck
this.queue.add(() => this.healthz())
}
async onLoad() {}
api?: KyInstance
/**
@ -23,13 +17,16 @@ export default class JSONHardwareManagementExtension extends HardwareManagementE
* @returns
*/
async apiInstance(): Promise<KyInstance> {
if(this.api) return this.api
const apiKey = (await window.core?.api.appToken()) ?? 'cortex.cpp'
if (this.api) return this.api
const apiKey = (await window.core?.api.appToken())
this.api = ky.extend({
prefixUrl: API_URL,
headers: {
Authorization: `Bearer ${apiKey}`,
},
headers: apiKey
? {
Authorization: `Bearer ${apiKey}`,
}
: {},
retry: 10,
})
return this.api
}
@ -39,31 +36,15 @@ export default class JSONHardwareManagementExtension extends HardwareManagementE
*/
onUnload() {}
/**
* Do health check on cortex.cpp
* @returns
*/
async healthz(): Promise<void> {
return this.apiInstance().then((api) =>
api
.get('healthz', {
retry: { limit: 20, delay: () => 500, methods: ['get'] },
})
.then(() => {})
)
}
/**
* @returns A Promise that resolves to an object of hardware.
*/
async getHardware(): Promise<HardwareInformation> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.get('v1/hardware')
.json<HardwareInformation>()
.then((e) => e)
)
return this.apiInstance().then((api) =>
api
.get('v1/hardware')
.json<HardwareInformation>()
.then((e) => e)
) as Promise<HardwareInformation>
}
@ -74,10 +55,8 @@ export default class JSONHardwareManagementExtension extends HardwareManagementE
message: string
activated_gpus: number[]
}> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api.post('v1/hardware/activate', { json: data }).then((e) => e)
)
return this.apiInstance().then((api) =>
api.post('v1/hardware/activate', { json: data }).then((e) => e)
) as Promise<{
message: string
activated_gpus: number[]

View File

@ -16,7 +16,6 @@ import {
events,
ModelEvent,
} from '@janhq/core'
import PQueue from 'p-queue'
import ky, { KyInstance } from 'ky'
/**
@ -48,8 +47,6 @@ export enum Settings {
export default class JanInferenceCortexExtension extends LocalOAIEngine {
nodeModule: string = 'node'
queue = new PQueue({ concurrency: 1 })
provider: string = InferenceEngine.cortex
shouldReconnect = true
@ -81,13 +78,16 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
* @returns
*/
async apiInstance(): Promise<KyInstance> {
if(this.api) return this.api
const apiKey = (await window.core?.api.appToken()) ?? 'cortex.cpp'
if (this.api) return this.api
const apiKey = await window.core?.api.appToken()
this.api = ky.extend({
prefixUrl: CORTEX_API_URL,
headers: {
Authorization: `Bearer ${apiKey}`,
},
headers: apiKey
? {
Authorization: `Bearer ${apiKey}`,
}
: {},
retry: 10,
})
return this.api
}
@ -129,10 +129,8 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
)
if (!Number.isNaN(threads_number)) this.cpu_threads = threads_number
// Run the process watchdog
// const systemInfo = await systemInformation()
this.queue.add(() => executeOnMain(NODE, 'run'))
this.queue.add(() => this.healthz())
await executeOnMain(NODE, 'run')
this.subscribeToEvents()
window.addEventListener('beforeunload', () => {
@ -179,35 +177,33 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
this.abortControllers.set(model.id, controller)
return await this.queue.add(() =>
this.apiInstance().then((api) =>
api
.post('v1/models/start', {
json: {
...extractModelLoadParams(model.settings),
model: model.id,
engine:
model.engine === InferenceEngine.nitro // Legacy model cache
? InferenceEngine.cortex_llamacpp
: model.engine,
cont_batching: this.cont_batching,
n_parallel: this.n_parallel,
caching_enabled: this.caching_enabled,
flash_attn: this.flash_attn,
cache_type: this.cache_type,
use_mmap: this.use_mmap,
...(this.cpu_threads ? { cpu_threads: this.cpu_threads } : {}),
},
timeout: false,
signal,
})
.json()
.catch(async (e) => {
throw (await e.response?.json()) ?? e
})
.finally(() => this.abortControllers.delete(model.id))
.then()
)
return await this.apiInstance().then((api) =>
api
.post('v1/models/start', {
json: {
...extractModelLoadParams(model.settings),
model: model.id,
engine:
model.engine === InferenceEngine.nitro // Legacy model cache
? InferenceEngine.cortex_llamacpp
: model.engine,
cont_batching: this.cont_batching,
n_parallel: this.n_parallel,
caching_enabled: this.caching_enabled,
flash_attn: this.flash_attn,
cache_type: this.cache_type,
use_mmap: this.use_mmap,
...(this.cpu_threads ? { cpu_threads: this.cpu_threads } : {}),
},
timeout: false,
signal,
})
.json()
.catch(async (e) => {
throw (await e.response?.json()) ?? e
})
.finally(() => this.abortControllers.delete(model.id))
.then()
)
}
@ -225,24 +221,6 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
)
}
/**
* Do health check on cortex.cpp
* @returns
*/
private async healthz(): Promise<void> {
return this.apiInstance().then((api) =>
api
.get('healthz', {
retry: {
limit: 20,
delay: () => 500,
methods: ['get'],
},
})
.then(() => {})
)
}
/**
* Clean cortex processes
* @returns
@ -266,76 +244,64 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
* Subscribe to cortex.cpp websocket events
*/
private subscribeToEvents() {
this.queue.add(
() =>
new Promise<void>((resolve) => {
this.socket = new WebSocket(`${CORTEX_SOCKET_URL}/events`)
console.log('Subscribing to events...')
this.socket = new WebSocket(`${CORTEX_SOCKET_URL}/events`)
this.socket.addEventListener('message', (event) => {
const data = JSON.parse(event.data)
this.socket.addEventListener('message', (event) => {
const data = JSON.parse(event.data)
const transferred = data.task.items.reduce(
(acc: number, cur: any) => acc + cur.downloadedBytes,
0
)
const total = data.task.items.reduce(
(acc: number, cur: any) => acc + cur.bytes,
0
)
const percent = total > 0 ? transferred / total : 0
const transferred = data.task.items.reduce(
(acc: number, cur: any) => acc + cur.downloadedBytes,
0
)
const total = data.task.items.reduce(
(acc: number, cur: any) => acc + cur.bytes,
0
)
const percent = total > 0 ? transferred / total : 0
events.emit(
DownloadTypes[data.type as keyof typeof DownloadTypes],
{
modelId: data.task.id,
percent: percent,
size: {
transferred: transferred,
total: total,
},
downloadType: data.task.type,
}
)
events.emit(DownloadTypes[data.type as keyof typeof DownloadTypes], {
modelId: data.task.id,
percent: percent,
size: {
transferred: transferred,
total: total,
},
downloadType: data.task.type,
})
if (data.task.type === 'Engine') {
events.emit(EngineEvent.OnEngineUpdate, {
type: DownloadTypes[data.type as keyof typeof DownloadTypes],
percent: percent,
id: data.task.id,
})
} else {
if (data.type === DownloadTypes.DownloadSuccess) {
// Delay for the state update from cortex.cpp
// Just to be sure
setTimeout(() => {
events.emit(ModelEvent.OnModelsUpdate, {
fetch: true,
})
}, 500)
}
}
})
/**
* This is to handle the server segfault issue
*/
this.socket.onclose = (event) => {
console.log('WebSocket closed:', event)
// Notify app to update model running state
events.emit(ModelEvent.OnModelStopped, {})
// Reconnect to the /events websocket
if (this.shouldReconnect) {
console.log(`Attempting to reconnect...`)
setTimeout(() => this.subscribeToEvents(), 1000)
}
// Queue up health check
this.queue.add(() => this.healthz())
}
resolve()
if (data.task.type === 'Engine') {
events.emit(EngineEvent.OnEngineUpdate, {
type: DownloadTypes[data.type as keyof typeof DownloadTypes],
percent: percent,
id: data.task.id,
})
)
} else {
if (data.type === DownloadTypes.DownloadSuccess) {
// Delay for the state update from cortex.cpp
// Just to be sure
setTimeout(() => {
events.emit(ModelEvent.OnModelsUpdate, {
fetch: true,
})
}, 500)
}
}
})
/**
* This is to handle the server segfault issue
*/
this.socket.onclose = (event) => {
console.log('WebSocket closed:', event)
// Notify app to update model running state
events.emit(ModelEvent.OnModelStopped, {})
// Reconnect to the /events websocket
if (this.shouldReconnect) {
console.log(`Attempting to reconnect...`)
setTimeout(() => this.subscribeToEvents(), 1000)
}
}
}
}

View File

@ -12,7 +12,6 @@ import {
} from '@janhq/core'
import { scanModelsFolder } from './legacy/model-json'
import { deleteModelFiles } from './legacy/delete'
import PQueue from 'p-queue'
import ky, { KyInstance } from 'ky'
/**
@ -31,21 +30,22 @@ type Data<T> = {
* A extension for models
*/
export default class JanModelExtension extends ModelExtension {
queue = new PQueue({ concurrency: 1 })
api?: KyInstance
/**
* Get the API instance
* @returns
*/
async apiInstance(): Promise<KyInstance> {
if(this.api) return this.api
const apiKey = (await window.core?.api.appToken()) ?? 'cortex.cpp'
if (this.api) return this.api
const apiKey = (await window.core?.api.appToken())
this.api = ky.extend({
prefixUrl: CORTEX_API_URL,
headers: {
Authorization: `Bearer ${apiKey}`,
},
headers: apiKey
? {
Authorization: `Bearer ${apiKey}`,
}
: {},
retry: 10
})
return this.api
}
@ -53,8 +53,6 @@ export default class JanModelExtension extends ModelExtension {
* Called when the extension is loaded.
*/
async onLoad() {
this.queue.add(() => this.healthz())
this.registerSettings(SETTINGS)
// Configure huggingface token if available
@ -97,16 +95,14 @@ export default class JanModelExtension extends ModelExtension {
/**
* Sending POST to /models/pull/{id} endpoint to pull the model
*/
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.post('v1/models/pull', { json: { model, id, name }, timeout: false })
.json()
.catch(async (e) => {
throw (await e.response?.json()) ?? e
})
.then()
)
return this.apiInstance().then((api) =>
api
.post('v1/models/pull', { json: { model, id, name }, timeout: false })
.json()
.catch(async (e) => {
throw (await e.response?.json()) ?? e
})
.then()
)
}
@ -120,13 +116,11 @@ export default class JanModelExtension extends ModelExtension {
/**
* Sending DELETE to /models/pull/{id} endpoint to cancel a model pull
*/
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.delete('v1/models/pull', { json: { taskId: model } })
.json()
.then()
)
return this.apiInstance().then((api) =>
api
.delete('v1/models/pull', { json: { taskId: model } })
.json()
.then()
)
}
@ -136,12 +130,8 @@ export default class JanModelExtension extends ModelExtension {
* @returns A Promise that resolves when the model is deleted.
*/
async deleteModel(model: string): Promise<void> {
return this.queue
.add(() =>
this.apiInstance().then((api) =>
api.delete(`v1/models/${model}`).json().then()
)
)
return this.apiInstance()
.then((api) => api.delete(`v1/models/${model}`).json().then())
.catch((e) => console.debug(e))
.finally(async () => {
// Delete legacy model files
@ -241,17 +231,15 @@ export default class JanModelExtension extends ModelExtension {
* @param model - The metadata of the model
*/
async updateModel(model: Partial<Model>): Promise<Model> {
return this.queue
.add(() =>
this.apiInstance().then((api) =>
api
.patch(`v1/models/${model.id}`, {
json: { ...model },
timeout: false,
})
.json()
.then()
)
return this.apiInstance()
.then((api) =>
api
.patch(`v1/models/${model.id}`, {
json: { ...model },
timeout: false,
})
.json()
.then()
)
.then(() => this.getModel(model.id))
}
@ -261,13 +249,11 @@ export default class JanModelExtension extends ModelExtension {
* @param model - The ID of the model
*/
async getModel(model: string): Promise<Model> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.get(`v1/models/${model}`)
.json()
.then((e) => this.transformModel(e))
)
return this.apiInstance().then((api) =>
api
.get(`v1/models/${model}`)
.json()
.then((e) => this.transformModel(e))
) as Promise<Model>
}
@ -282,17 +268,15 @@ export default class JanModelExtension extends ModelExtension {
name?: string,
option?: OptionType
): Promise<void> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api
.post('v1/models/import', {
json: { model, modelPath, name, option },
timeout: false,
})
.json()
.catch((e) => console.debug(e)) // Ignore error
.then()
)
return this.apiInstance().then((api) =>
api
.post('v1/models/import', {
json: { model, modelPath, name, option },
timeout: false,
})
.json()
.catch((e) => console.debug(e)) // Ignore error
.then()
)
}
@ -302,12 +286,8 @@ export default class JanModelExtension extends ModelExtension {
* @param model
*/
async getSources(): Promise<ModelSource[]> {
const sources = await this.queue
.add(() =>
this.apiInstance().then((api) =>
api.get('v1/models/sources').json<Data<ModelSource>>()
)
)
const sources = await this.apiInstance()
.then((api) => api.get('v1/models/sources').json<Data<ModelSource>>())
.then((e) => (typeof e === 'object' ? (e.data as ModelSource[]) : []))
.catch(() => [])
return sources.concat(
@ -320,14 +300,12 @@ export default class JanModelExtension extends ModelExtension {
* @param model
*/
async addSource(source: string): Promise<any> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api.post('v1/models/sources', {
json: {
source,
},
})
)
return this.apiInstance().then((api) =>
api.post('v1/models/sources', {
json: {
source,
},
})
)
}
@ -336,15 +314,13 @@ export default class JanModelExtension extends ModelExtension {
* @param model
*/
async deleteSource(source: string): Promise<any> {
return this.queue.add(() =>
this.apiInstance().then((api) =>
api.delete('v1/models/sources', {
json: {
source,
},
timeout: false,
})
)
return this.apiInstance().then((api) =>
api.delete('v1/models/sources', {
json: {
source,
},
timeout: false,
})
)
}
// END - Model Sources
@ -354,10 +330,8 @@ export default class JanModelExtension extends ModelExtension {
* @param model
*/
async isModelLoaded(model: string): Promise<boolean> {
return this.queue
.add(() =>
this.apiInstance().then((api) => api.get(`v1/models/status/${model}`))
)
return this.apiInstance()
.then((api) => api.get(`v1/models/status/${model}`))
.then((e) => true)
.catch(() => false)
}
@ -375,12 +349,8 @@ export default class JanModelExtension extends ModelExtension {
* @returns
*/
async fetchModels(): Promise<Model[]> {
return this.queue
.add(() =>
this.apiInstance().then((api) =>
api.get('v1/models?limit=-1').json<Data<Model>>()
)
)
return this.apiInstance()
.then((api) => api.get('v1/models?limit=-1').json<Data<Model>>())
.then((e) =>
typeof e === 'object' ? e.data.map((e) => this.transformModel(e)) : []
)
@ -418,33 +388,9 @@ export default class JanModelExtension extends ModelExtension {
private async updateCortexConfig(body: {
[key: string]: any
}): Promise<void> {
return this.queue
.add(() =>
this.apiInstance().then((api) =>
api.patch('v1/configs', { json: body }).then(() => {})
)
)
.catch((e) => console.debug(e))
}
/**
* Do health check on cortex.cpp
* @returns
*/
private healthz(): Promise<void> {
return this.apiInstance()
.then((api) =>
api.get('healthz', {
retry: {
limit: 20,
delay: () => 500,
methods: ['get'],
},
})
)
.then(() => {
this.queue.concurrency = Infinity
})
.then((api) => api.patch('v1/configs', { json: body }).then(() => {}))
.catch((e) => console.debug(e))
}
/**
@ -453,25 +399,23 @@ export default class JanModelExtension extends ModelExtension {
private fetchCortexsoModels = async () => {
const models = await this.fetchModels()
return this.queue.add(() =>
this.apiInstance()
.then((api) =>
api
.get('v1/models/hub?author=cortexso&tag=cortex.cpp')
.json<Data<string>>()
.then((e) => {
e.data?.forEach((model) => {
if (
!models.some(
(e) => 'modelSource' in e && e.modelSource === model
)
return this.apiInstance()
.then((api) =>
api
.get('v1/models/hub?author=cortexso&tag=cortex.cpp')
.json<Data<string>>()
.then((e) => {
e.data?.forEach((model) => {
if (
!models.some(
(e) => 'modelSource' in e && e.modelSource === model
)
this.addSource(model).catch((e) => console.debug(e))
})
)
this.addSource(model).catch((e) => console.debug(e))
})
)
.catch((e) => console.debug(e))
)
})
)
.catch((e) => console.debug(e))
}
// END: - Private API
}

View File

@ -17,10 +17,20 @@
"test": "yarn workspace jan test:e2e",
"test-local": "yarn lint && yarn build:test && yarn test",
"copy:assets": "cpx \"pre-install/*.tgz\" \"electron/pre-install/\" && cpx \"themes/**\" \"electron/themes\"",
"copy:assets:tauri": "cpx \"pre-install/*.tgz\" \"src-tauri/resources/pre-install/\" && cpx \"themes/**\" \"src-tauri/resources/themes\"",
"dev:electron": "yarn copy:assets && yarn workspace jan dev",
"dev:web:standalone": "concurrently \"yarn workspace @janhq/web dev\" \"wait-on http://localhost:3000 && rsync -av --prune-empty-dirs --include '*/' --include 'dist/***' --include 'package.json' --include 'tsconfig.json' --exclude '*' ./extensions/ web/.next/static/extensions/\"",
"dev:web": "yarn workspace @janhq/web dev",
"dev:server": "yarn workspace @janhq/server dev",
"dev": "concurrently -n \"NEXT,ELECTRON\" -c \"yellow,blue\" --kill-others \"yarn dev:web\" \"yarn dev:electron\"",
"install:cortex:linux:darwin": "cd src-tauri/binaries && ./download.sh",
"install:cortex:win32": "cd src-tauri/binaries && download.bat",
"install:cortex": "run-script-os",
"dev:tauri": "yarn build:icon && yarn copy:assets:tauri && tauri dev",
"build:tauri:linux:win32": "yarn install:cortex && yarn build:icon && yarn copy:assets:tauri && yarn tauri build --verbose",
"build:tauri:darwin": "yarn install:cortex && yarn build:icon && yarn copy:assets:tauri && yarn tauri build --verbose --target universal-apple-darwin",
"build:tauri": "run-script-os",
"build:icon": "tauri icon ./src-tauri/icons/icon.png",
"build:server": "cd server && yarn build",
"build:core": "cd core && yarn build && yarn pack",
"build:web": "yarn workspace @janhq/web build && cpx \"web/out/**\" \"electron/renderer/\"",
@ -29,18 +39,22 @@
"build:extensions": "rimraf ./pre-install/*.tgz || true && yarn workspace @janhq/core build && cd extensions && yarn install && yarn workspaces foreach -Apt run build:publish",
"build:test": "yarn copy:assets && yarn workspace @janhq/web build && cpx \"web/out/**\" \"electron/renderer/\" && yarn workspace jan build:test",
"build": "yarn build:web && yarn build:electron",
"build-tauri": "yarn build:web && yarn build:tauri",
"build:publish": "yarn copy:assets && yarn build:web && yarn workspace jan build:publish",
"dev:joi": "yarn workspace @janhq/joi install && yarn workspace @janhq/joi dev",
"build:joi": "yarn workspace @janhq/joi build",
"prepare": "husky"
},
"devDependencies": {
"@tauri-apps/cli": "^2.2.5",
"concurrently": "^9.1.0",
"cpx": "^1.5.0",
"cross-env": "^7.0.3",
"husky": "^9.1.5",
"jest": "^29.7.0",
"jest-environment-jsdom": "^29.7.0",
"rimraf": "^3.0.2",
"run-script-os": "^1.1.6",
"wait-on": "^7.0.1"
},
"version": "0.0.0",

7
src-tauri/.gitignore vendored Normal file
View File

@ -0,0 +1,7 @@
# Generated by Cargo
# will have compiled files and executables
/target/
/gen/schemas
binaries
!binaries/download.sh
!binaries/download.bat

45
src-tauri/Cargo.toml Normal file
View File

@ -0,0 +1,45 @@
[package]
name = "Jan"
version = "0.5.16"
description = "Use offline LLMs with your own data. Run open source models like Llama2 or Falcon on your internal computers/servers."
authors = ["Jan <service@jan.ai>"]
license = "MIT"
repository = "https://github.com/menloresearch/jan"
edition = "2021"
rust-version = "1.77.2"
[lib]
name = "app_lib"
crate-type = ["staticlib", "cdylib", "rlib"]
[build-dependencies]
tauri-build = { version = "2.0.2", features = [] }
[dependencies]
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
log = "0.4"
tauri = { version = "2.1.0", features = [ "protocol-asset", "macos-private-api",
"test",
] }
tauri-plugin-log = "2.0.0-rc"
tauri-plugin-shell = "2.2.0"
flate2 = "1.0"
tar = "0.4"
rand = "0.8"
tauri-plugin-http = { version = "2", features = ["unsafe-headers"] }
tauri-plugin-store = "2"
hyper = { version = "0.14", features = ["server"] }
reqwest = { version = "0.11", features = ["json"] }
tokio = { version = "1", features = ["full"] }
rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", branch = "main", features = [
"client",
"transport-sse",
"transport-child-process",
"tower",
] }
uuid = { version = "1.7", features = ["v4"] }
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies]
tauri-plugin-updater = "2"
once_cell = "1.18"

BIN
src-tauri/app-icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

View File

@ -0,0 +1,43 @@
@echo off
set CORTEX_VERSION=1.0.13-rc1
set ENGINE_VERSION=0.1.55
set ENGINE_DOWNLOAD_URL=https://github.com/menloresearch/cortex.llamacpp/releases/download/v%ENGINE_VERSION%/cortex.llamacpp-%ENGINE_VERSION%-windows-amd64
set CUDA_DOWNLOAD_URL=https://github.com/menloresearch/cortex.llamacpp/releases/download/v%ENGINE_VERSION%
@REM set SUBFOLDERS=windows-amd64-noavx-cuda-12-0 windows-amd64-noavx-cuda-11-7 windows-amd64-avx2-cuda-12-0 windows-amd64-avx2-cuda-11-7 windows-amd64-noavx windows-amd64-avx windows-amd64-avx2 windows-amd64-avx512 windows-amd64-vulkan
set BIN_PATH="./"
set DOWNLOAD_TOOL=..\..\extensions\inference-cortex-extension\node_modules\.bin\download
@REM Download cortex.llamacpp binaries
call %DOWNLOAD_TOOL% -e --strip 1 -o %BIN_PATH% https://github.com/menloresearch/cortex.cpp/releases/download/v%CORTEX_VERSION%/cortex-%CORTEX_VERSION%-windows-amd64.tar.gz
@REM call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-avx2-cuda-12-0.tar.gz -e --strip 1 -o ./engines/cortex.llamacpp/windows-amd64-avx2-cuda-12-0/v%ENGINE_VERSION%
@REM call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-avx2-cuda-11-7.tar.gz -e --strip 1 -o ./engines/cortex.llamacpp/windows-amd64-avx2-cuda-11-7/v%ENGINE_VERSION%
@REM call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-noavx-cuda-12-0.tar.gz -e --strip 1 -o ./engines/cortex.llamacpp/windows-amd64-noavx-cuda-12-0/v%ENGINE_VERSION%
@REM call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-noavx-cuda-11-7.tar.gz -e --strip 1 -o ./engines/cortex.llamacpp/windows-amd64-noavx-cuda-11-7/v%ENGINE_VERSION%
call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-noavx.tar.gz -e --strip 1 -o ./engines/cortex.llamacpp/windows-amd64-noavx/v%ENGINE_VERSION%
call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-avx.tar.gz -e --strip 1 -o ./engines/cortex.llamacpp/windows-amd64-avx/v%ENGINE_VERSION%
call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-avx2.tar.gz -e --strip 1 -o ./engines/cortex.llamacpp/windows-amd64-avx2/v%ENGINE_VERSION%
call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-avx512.tar.gz -e --strip 1 -o ./engines/cortex.llamacpp/windows-amd64-avx512/v%ENGINE_VERSION%
call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-vulkan.tar.gz -e --strip 1 -o ./engines/cortex.llamacpp/windows-amd64-vulkan/v%ENGINE_VERSION%
@REM call %DOWNLOAD_TOOL% %CUDA_DOWNLOAD_URL%/cuda-12-0-windows-amd64.tar.gz -e --strip 1 -o %BIN_PATH%
@REM call %DOWNLOAD_TOOL% %CUDA_DOWNLOAD_URL%/cuda-11-7-windows-amd64.tar.gz -e --strip 1 -o %BIN_PATH%
move %BIN_PATH%cortex-server-beta.exe %BIN_PATH%cortex-server.exe
copy %BIN_PATH%cortex-server.exe %BIN_PATH%cortex-server-x86_64-pc-windows-msvc.exe
del %BIN_PATH%cortex-beta.exe
del %BIN_PATH%cortex.exe
@REM Loop through each folder and move DLLs (excluding engine.dll)
for %%F in (%SUBFOLDERS%) do (
echo Processing folder: .\engines\cortex.llamacpp\%%F\v%ENGINE_VERSION%
@REM Move all .dll files except engine.dll
for %%D in (.\engines\cortex.llamacpp\%%F\v%ENGINE_VERSION%\*.dll) do (
if /I not "%%~nxD"=="engine.dll" (
move "%%D" "%BIN_PATH%"
)
)
)
echo DLL files moved successfully.

72
src-tauri/binaries/download.sh Executable file
View File

@ -0,0 +1,72 @@
#!/bin/bash
download() {
URL="$1"
EXTRA_ARGS="${@:2}"
OUTPUT_DIR="${EXTRA_ARGS[${#EXTRA_ARGS[@]} -1]}"
mkdir -p "$OUTPUT_DIR"
echo "Downloading $URL to $OUTPUT_DIR using curl..."
curl -L "$URL" -o "$OUTPUT_DIR/$(basename "$URL")"
tar -xzf "$OUTPUT_DIR/$(basename "$URL")" -C "$OUTPUT_DIR" --strip-components 1
rm "$OUTPUT_DIR/$(basename "$URL")"
}
# Read CORTEX_VERSION
CORTEX_VERSION=1.0.13-rc1
ENGINE_VERSION=0.1.55
CORTEX_RELEASE_URL="https://github.com/menloresearch/cortex.cpp/releases/download"
ENGINE_DOWNLOAD_URL="https://github.com/menloresearch/cortex.llamacpp/releases/download/v${ENGINE_VERSION}/cortex.llamacpp-${ENGINE_VERSION}"
CUDA_DOWNLOAD_URL="https://github.com/menloresearch/cortex.llamacpp/releases/download/v${ENGINE_VERSION}"
BIN_PATH=./
SHARED_PATH="."
# Detect platform
OS_TYPE=$(uname)
if ls ./cortex-server* 1> /dev/null 2>&1; then
echo "cortex-server file with prefix already exists. Exiting."
exit 0
fi
if [ "$OS_TYPE" == "Linux" ]; then
# Linux downloads
download "${CORTEX_RELEASE_URL}/v${CORTEX_VERSION}/cortex-${CORTEX_VERSION}-linux-amd64.tar.gz" "${BIN_PATH}"
mv ./cortex-server-beta ./cortex-server
rm -rf ./cortex
rm -rf ./cortex-beta
chmod +x "./cortex-server"
cp ./cortex-server ./cortex-server-x86_64-unknown-linux-gnu
# Download engines for Linux
download "${ENGINE_DOWNLOAD_URL}-linux-amd64-noavx.tar.gz" "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-noavx/v${ENGINE_VERSION}"
download "${ENGINE_DOWNLOAD_URL}-linux-amd64-avx.tar.gz" "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-avx/v${ENGINE_VERSION}"
download "${ENGINE_DOWNLOAD_URL}-linux-amd64-avx2.tar.gz" "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-avx2/v${ENGINE_VERSION}"
download "${ENGINE_DOWNLOAD_URL}-linux-amd64-avx512.tar.gz" "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-avx512/v${ENGINE_VERSION}"
# download "${ENGINE_DOWNLOAD_URL}-linux-amd64-avx2-cuda-12-0.tar.gz" "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-avx2-cuda-12-0/v${ENGINE_VERSION}"
# download "${ENGINE_DOWNLOAD_URL}-linux-amd64-avx2-cuda-11-7.tar.gz" "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-avx2-cuda-11-7/v${ENGINE_VERSION}"
# download "${ENGINE_DOWNLOAD_URL}-linux-amd64-noavx-cuda-12-0.tar.gz" "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-noavx-cuda-12-0/v${ENGINE_VERSION}"
# download "${ENGINE_DOWNLOAD_URL}-linux-amd64-noavx-cuda-11-7.tar.gz" "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-noavx-cuda-11-7/v${ENGINE_VERSION}"
download "${ENGINE_DOWNLOAD_URL}-linux-amd64-vulkan.tar.gz" "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-vulkan/v${ENGINE_VERSION}"
# download "${CUDA_DOWNLOAD_URL}/cuda-12-0-linux-amd64.tar.gz" "${BIN_PATH}"
# download "${CUDA_DOWNLOAD_URL}/cuda-11-7-linux-amd64.tar.gz" "${BIN_PATH}"
elif [ "$OS_TYPE" == "Darwin" ]; then
# macOS downloads
download "${CORTEX_RELEASE_URL}/v${CORTEX_VERSION}/cortex-${CORTEX_VERSION}-mac-universal.tar.gz" "${BIN_PATH}"
mv ./cortex-server-beta ./cortex-server
rm -rf ./cortex
rm -rf ./cortex-beta
chmod +x "./cortex-server"
mv ./cortex-server ./cortex-server-universal-apple-darwin
cp ./cortex-server-universal-apple-darwin ./cortex-server-aarch64-apple-darwin
cp ./cortex-server-universal-apple-darwin ./cortex-server-x86_64-apple-darwin
# Download engines for macOS
download "${ENGINE_DOWNLOAD_URL}-mac-arm64.tar.gz" "${SHARED_PATH}/engines/cortex.llamacpp/mac-arm64/v${ENGINE_VERSION}"
download "${ENGINE_DOWNLOAD_URL}-mac-amd64.tar.gz" "${SHARED_PATH}/engines/cortex.llamacpp/mac-amd64/v${ENGINE_VERSION}"
else
echo "Unsupported operating system: $OS_TYPE"
exit 1
fi

3
src-tauri/build.rs Normal file
View File

@ -0,0 +1,3 @@
fn main() {
tauri_build::build()
}

View File

@ -0,0 +1,56 @@
{
"$schema": "../gen/schemas/desktop-schema.json",
"identifier": "default",
"description": "enables the default permissions",
"windows": ["main"],
"remote": {
"urls": ["http://*"]
},
"permissions": [
"core:default",
"core:window:allow-start-dragging",
"shell:allow-spawn",
"shell:allow-open",
"log:default",
{
"identifier": "http:default",
"allow": [
{
"url": "https://*:*"
},
{
"url": "http://*:*"
}
],
"deny": []
},
{
"identifier": "shell:allow-execute",
"allow": [
{
"args": [
"--start-server",
{
"validator": "\\S+"
},
"--port",
{
"validator": "\\S+"
},
"--config_file_path",
{
"validator": "\\S+"
},
"--data_folder_path",
{
"validator": "\\S+"
}
],
"name": "binaries/cortex-server",
"sidecar": true
}
]
},
"store:default"
]
}

BIN
src-tauri/icons/icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

View File

@ -0,0 +1,23 @@
{
"version": "",
"notes": "",
"pub_date": "",
"platforms": {
"linux-x86_64": {
"signature": "",
"url": ""
},
"windows-x86_64": {
"signature": "",
"url": ""
},
"darwin-aarch64": {
"signature": "",
"url": ""
},
"darwin-x86_64": {
"signature": "",
"url": ""
}
}
}

12
src-tauri/sign.ps1 Normal file
View File

@ -0,0 +1,12 @@
param (
[string]$Target
)
AzureSignTool.exe sign `
-tr http://timestamp.digicert.com `
-kvu $env:AZURE_KEY_VAULT_URI `
-kvi $env:AZURE_CLIENT_ID `
-kvt $env:AZURE_TENANT_ID `
-kvs $env:AZURE_CLIENT_SECRET `
-kvc $env:AZURE_CERT_NAME `
-v $Target

395
src-tauri/src/core/cmd.rs Normal file
View File

@ -0,0 +1,395 @@
use rmcp::model::{CallToolRequestParam, CallToolResult, Tool};
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value};
use std::{fs, path::PathBuf};
use tauri::{AppHandle, Manager, Runtime, State};
use super::{server, setup, state::AppState};
const CONFIGURATION_FILE_NAME: &str = "settings.json";
const DEFAULT_MCP_CONFIG: &str = r#"{
"mcpServers": {}
}"#;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct AppConfiguration {
pub data_folder: String,
// Add other fields as needed
}
impl AppConfiguration {
pub fn default() -> Self {
Self {
data_folder: String::from("./data"), // Set a default value for the data_folder
// Add other fields with default values as needed
}
}
}
#[tauri::command]
pub fn get_app_configurations<R: Runtime>(app_handle: tauri::AppHandle<R>) -> AppConfiguration {
let mut app_default_configuration = AppConfiguration::default();
if std::env::var("CI").unwrap_or_default() == "e2e" {
return app_default_configuration;
}
let configuration_file = get_configuration_file_path(app_handle.clone());
let default_data_folder = default_data_folder_path(app_handle.clone());
if !configuration_file.exists() {
log::info!(
"App config not found, creating default config at {:?}",
configuration_file
);
app_default_configuration.data_folder = default_data_folder;
if let Err(err) = fs::write(
&configuration_file,
serde_json::to_string(&app_default_configuration).unwrap(),
) {
log::error!("Failed to create default config: {}", err);
}
return app_default_configuration;
}
match fs::read_to_string(&configuration_file) {
Ok(content) => match serde_json::from_str::<AppConfiguration>(&content) {
Ok(app_configurations) => app_configurations,
Err(err) => {
log::error!(
"Failed to parse app config, returning default config instead. Error: {}",
err
);
app_default_configuration
}
},
Err(err) => {
log::error!(
"Failed to read app config, returning default config instead. Error: {}",
err
);
app_default_configuration
}
}
}
#[tauri::command]
pub fn update_app_configuration(
app_handle: tauri::AppHandle,
configuration: AppConfiguration,
) -> Result<(), String> {
let configuration_file = get_configuration_file_path(app_handle);
log::info!(
"update_app_configuration, configuration_file: {:?}",
configuration_file
);
fs::write(
configuration_file,
serde_json::to_string(&configuration).map_err(|e| e.to_string())?,
)
.map_err(|e| e.to_string())
}
#[tauri::command]
pub fn get_jan_data_folder_path<R: Runtime>(app_handle: tauri::AppHandle<R>) -> PathBuf {
if cfg!(test) {
return PathBuf::from("./data");
}
let app_configurations = get_app_configurations(app_handle);
log::info!("data_folder: {}", app_configurations.data_folder);
PathBuf::from(app_configurations.data_folder)
}
#[tauri::command]
pub fn get_jan_extensions_path(app_handle: tauri::AppHandle) -> PathBuf {
get_jan_data_folder_path(app_handle).join("extensions")
}
#[tauri::command]
pub fn get_themes(app_handle: tauri::AppHandle) -> Vec<String> {
let mut themes = vec![];
let themes_path = get_jan_data_folder_path(app_handle).join("themes");
if themes_path.exists() {
for entry in fs::read_dir(themes_path).unwrap() {
let entry = entry.unwrap();
if entry.path().is_dir() {
if let Some(name) = entry.file_name().to_str() {
themes.push(name.to_string());
}
}
}
}
themes
}
#[tauri::command]
pub fn read_theme(app_handle: tauri::AppHandle, theme_name: String) -> Result<String, String> {
let themes_path = get_jan_data_folder_path(app_handle)
.join("themes")
.join(theme_name.clone())
.join("theme.json");
if themes_path.exists() {
let content = fs::read_to_string(themes_path).map_err(|e| e.to_string())?;
Ok(content)
} else {
Err(format!("Theme {} not found", theme_name.clone()))
}
}
#[tauri::command]
pub fn get_configuration_file_path<R: Runtime>(app_handle: tauri::AppHandle<R>) -> PathBuf {
let app_path = app_handle.path().app_data_dir().unwrap_or_else(|err| {
log::error!(
"Failed to get app data directory: {}. Using home directory instead.",
err
);
let home_dir = std::env::var(if cfg!(target_os = "windows") {
"USERPROFILE"
} else {
"HOME"
})
.expect("Failed to determine the home directory");
PathBuf::from(home_dir)
});
let package_name = env!("CARGO_PKG_NAME");
log::info!("Package name: {}", package_name);
let old_data_dir = app_path
.clone()
.parent()
.unwrap_or(&app_path.join("../"))
.join(package_name);
if old_data_dir.exists() {
return old_data_dir.join(CONFIGURATION_FILE_NAME);
} else {
return app_path.join(CONFIGURATION_FILE_NAME);
}
}
#[tauri::command]
pub fn default_data_folder_path<R: Runtime>(app_handle: tauri::AppHandle<R>) -> String {
return app_handle
.path()
.app_data_dir()
.unwrap()
.to_str()
.unwrap()
.to_string();
}
#[tauri::command]
pub fn relaunch(app: AppHandle) {
app.restart()
}
#[tauri::command]
pub fn open_app_directory(app: AppHandle) {
let app_path = app.path().app_data_dir().unwrap();
if cfg!(target_os = "windows") {
std::process::Command::new("explorer")
.arg(app_path)
.spawn()
.expect("Failed to open app directory");
} else if cfg!(target_os = "macos") {
std::process::Command::new("open")
.arg(app_path)
.spawn()
.expect("Failed to open app directory");
} else {
std::process::Command::new("xdg-open")
.arg(app_path)
.spawn()
.expect("Failed to open app directory");
}
}
#[tauri::command]
pub fn open_file_explorer(path: String) {
let path = PathBuf::from(path);
if cfg!(target_os = "windows") {
std::process::Command::new("explorer")
.arg(path)
.spawn()
.expect("Failed to open file explorer");
} else if cfg!(target_os = "macos") {
std::process::Command::new("open")
.arg(path)
.spawn()
.expect("Failed to open file explorer");
} else {
std::process::Command::new("xdg-open")
.arg(path)
.spawn()
.expect("Failed to open file explorer");
}
}
#[tauri::command]
pub fn install_extensions(app: AppHandle) {
if let Err(err) = setup::install_extensions(app, true) {
log::error!("Failed to install extensions: {}", err);
}
}
#[tauri::command]
pub fn get_active_extensions(app: AppHandle) -> Vec<serde_json::Value> {
let mut path = get_jan_extensions_path(app);
path.push("extensions.json");
log::info!("get jan extensions, path: {:?}", path);
let contents = fs::read_to_string(path);
let contents: Vec<serde_json::Value> = match contents {
Ok(data) => match serde_json::from_str::<Vec<serde_json::Value>>(&data) {
Ok(exts) => exts
.into_iter()
.map(|ext| {
serde_json::json!({
"url": ext["url"],
"name": ext["name"],
"productName": ext["productName"],
"active": ext["_active"],
"description": ext["description"],
"version": ext["version"]
})
})
.collect(),
Err(_) => vec![],
},
Err(_) => vec![],
};
return contents;
}
#[tauri::command]
pub fn get_user_home_path(app: AppHandle) -> String {
return get_app_configurations(app.clone()).data_folder;
}
#[tauri::command]
pub fn app_token(state: State<'_, AppState>) -> Option<String> {
state.app_token.clone()
}
#[tauri::command]
pub async fn start_server(
app: AppHandle,
host: String,
port: u16,
prefix: String,
) -> Result<bool, String> {
server::start_server(host, port, prefix, app_token(app.state()).unwrap())
.await
.map_err(|e| e.to_string())?;
Ok(true)
}
#[tauri::command]
pub async fn stop_server() -> Result<(), String> {
server::stop_server().await.map_err(|e| e.to_string())?;
Ok(())
}
/// Retrieves all available tools from all MCP servers
///
/// # Arguments
/// * `state` - Application state containing MCP server connections
///
/// # Returns
/// * `Result<Vec<Tool>, String>` - A vector of all tools if successful, or an error message if failed
///
/// This function:
/// 1. Locks the MCP servers mutex to access server connections
/// 2. Iterates through all connected servers
/// 3. Gets the list of tools from each server
/// 4. Combines all tools into a single vector
/// 5. Returns the combined list of all available tools
#[tauri::command]
pub async fn get_tools(state: State<'_, AppState>) -> Result<Vec<Tool>, String> {
let servers = state.mcp_servers.lock().await;
let mut all_tools: Vec<Tool> = Vec::new();
for (_, service) in servers.iter() {
// List tools
let tools = service.list_all_tools().await.map_err(|e| e.to_string())?;
for tool in tools {
all_tools.push(tool);
}
}
Ok(all_tools)
}
/// Calls a tool on an MCP server by name with optional arguments
///
/// # Arguments
/// * `state` - Application state containing MCP server connections
/// * `tool_name` - Name of the tool to call
/// * `arguments` - Optional map of argument names to values
///
/// # Returns
/// * `Result<CallToolResult, String>` - Result of the tool call if successful, or error message if failed
///
/// This function:
/// 1. Locks the MCP servers mutex to access server connections
/// 2. Searches through all servers for one containing the named tool
/// 3. When found, calls the tool on that server with the provided arguments
/// 4. Returns error if no server has the requested tool
#[tauri::command]
pub async fn call_tool(
state: State<'_, AppState>,
tool_name: String,
arguments: Option<Map<String, Value>>,
) -> Result<CallToolResult, String> {
let servers = state.mcp_servers.lock().await;
// Iterate through servers and find the first one that contains the tool
for (_, service) in servers.iter() {
if let Ok(tools) = service.list_all_tools().await {
if tools.iter().any(|t| t.name == tool_name) {
return service
.call_tool(CallToolRequestParam {
name: tool_name.into(),
arguments,
})
.await
.map_err(|e| e.to_string());
}
}
}
Err(format!("Tool {} not found", tool_name))
}
#[tauri::command]
pub async fn get_mcp_configs(app: AppHandle) -> Result<String, String> {
let mut path = get_jan_data_folder_path(app);
path.push("mcp_config.json");
log::info!("read mcp configs, path: {:?}", path);
// Create default empty config if file doesn't exist
if !path.exists() {
log::info!("mcp_config.json not found, creating default empty config");
fs::write(&path, DEFAULT_MCP_CONFIG)
.map_err(|e| format!("Failed to create default MCP config: {}", e))?;
}
let contents = fs::read_to_string(path).map_err(|e| e.to_string())?;
return Ok(contents);
}
#[tauri::command]
pub async fn save_mcp_configs(app: AppHandle, configs: String) -> Result<(), String> {
let mut path = get_jan_data_folder_path(app);
path.push("mcp_config.json");
log::info!("save mcp configs, path: {:?}", path);
fs::write(path, configs).map_err(|e| e.to_string())
}

198
src-tauri/src/core/fs.rs Normal file
View File

@ -0,0 +1,198 @@
// WARNING: These APIs will be deprecated soon due to removing FS API access from frontend.
// It's added to ensure the legacy implementation from frontend still functions before removal.
use crate::core::cmd::get_jan_data_folder_path;
use std::fs;
use std::path::PathBuf;
use tauri::Runtime;
#[tauri::command]
pub fn rm<R: Runtime>(app_handle: tauri::AppHandle<R>, args: Vec<String>) -> Result<(), String> {
if args.is_empty() || args[0].is_empty() {
return Err("rm error: Invalid argument".to_string());
}
let path = resolve_path(app_handle, &args[0]);
fs::remove_dir_all(&path).map_err(|e| e.to_string())
}
#[tauri::command]
pub fn mkdir<R: Runtime>(app_handle: tauri::AppHandle<R>, args: Vec<String>) -> Result<(), String> {
if args.is_empty() || args[0].is_empty() {
return Err("mkdir error: Invalid argument".to_string());
}
let path = resolve_path(app_handle, &args[0]);
fs::create_dir_all(&path).map_err(|e| e.to_string())
}
#[tauri::command]
pub fn join_path<R: Runtime>(
app_handle: tauri::AppHandle<R>,
args: Vec<String>,
) -> Result<String, String> {
if args.is_empty() {
return Err("join_path error: Invalid argument".to_string());
}
let path = resolve_path(app_handle, &args[0]);
let joined_path = path.join(args[1..].join("/"));
Ok(joined_path.to_string_lossy().to_string())
}
#[tauri::command]
pub fn exists_sync<R: Runtime>(
app_handle: tauri::AppHandle<R>,
args: Vec<String>,
) -> Result<bool, String> {
if args.is_empty() || args[0].is_empty() {
return Err("exist_sync error: Invalid argument".to_string());
}
let path = resolve_path(app_handle, &args[0]);
Ok(path.exists())
}
#[tauri::command]
pub fn read_file_sync<R: Runtime>(
app_handle: tauri::AppHandle<R>,
args: Vec<String>,
) -> Result<String, String> {
if args.is_empty() || args[0].is_empty() {
return Err("read_file_sync error: Invalid argument".to_string());
}
let path = resolve_path(app_handle, &args[0]);
fs::read_to_string(&path).map_err(|e| e.to_string())
}
#[tauri::command]
pub fn readdir_sync<R: Runtime>(
app_handle: tauri::AppHandle<R>,
args: Vec<String>,
) -> Result<Vec<String>, String> {
if args.is_empty() || args[0].is_empty() {
return Err("read_dir_sync error: Invalid argument".to_string());
}
let path = resolve_path(app_handle, &args[0]);
log::error!("Reading directory: {:?}", path);
let entries = fs::read_dir(&path).map_err(|e| e.to_string())?;
let paths: Vec<String> = entries
.filter_map(|entry| entry.ok())
.map(|entry| entry.path().to_string_lossy().to_string())
.collect();
Ok(paths)
}
fn normalize_file_path(path: &str) -> String {
path.replace("file:/", "").replace("file:\\", "")
}
fn resolve_path<R: Runtime>(app_handle: tauri::AppHandle<R>, path: &str) -> PathBuf {
let path = if path.starts_with("file:/") || path.starts_with("file:\\") {
let normalized = normalize_file_path(path);
let relative_normalized = normalized.strip_prefix("/").unwrap_or(&normalized);
get_jan_data_folder_path(app_handle).join(relative_normalized)
} else {
PathBuf::from(path)
};
if path.starts_with("http://") || path.starts_with("https://") {
path
} else {
path.canonicalize().unwrap_or(path)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs::{self, File};
use std::io::Write;
use serde_json::to_string;
use tauri::test::mock_app;
#[test]
fn test_rm() {
let app = mock_app();
let path = "test_rm_dir";
fs::create_dir_all(get_jan_data_folder_path(app.handle().clone()).join(path)).unwrap();
let args = vec![format!("file://{}", path).to_string()];
let result = rm(app.handle().clone(), args);
assert!(result.is_ok());
assert!(!get_jan_data_folder_path(app.handle().clone())
.join(path)
.exists());
}
#[test]
fn test_mkdir() {
let app = mock_app();
let path = "test_mkdir_dir";
let args = vec![format!("file://{}", path).to_string()];
let result = mkdir(app.handle().clone(), args);
assert!(result.is_ok());
assert!(get_jan_data_folder_path(app.handle().clone())
.join(path)
.exists());
fs::remove_dir_all(get_jan_data_folder_path(app.handle().clone()).join(path)).unwrap();
}
#[test]
fn test_join_path() {
let app = mock_app();
let path = "file://test_dir";
let args = vec![path.to_string(), "test_file".to_string()];
let result = join_path(app.handle().clone(), args).unwrap();
assert_eq!(
result,
get_jan_data_folder_path(app.handle().clone())
.join("test_dir/test_file")
.to_string_lossy()
.to_string()
);
}
#[test]
fn test_exists_sync() {
let app = mock_app();
let path = "file://test_exists_sync_file";
let dir_path = get_jan_data_folder_path(app.handle().clone());
fs::create_dir_all(&dir_path).unwrap();
let file_path = dir_path.join("test_exists_sync_file");
File::create(&file_path).unwrap();
let args: Vec<String> = vec![path.to_string()];
let result = exists_sync(app.handle().clone(), args).unwrap();
assert!(result);
fs::remove_file(file_path).unwrap();
}
#[test]
fn test_read_file_sync() {
let app = mock_app();
let path = "file://test_read_file_sync_file";
let dir_path = get_jan_data_folder_path(app.handle().clone());
fs::create_dir_all(&dir_path).unwrap();
let file_path = dir_path.join("test_read_file_sync_file");
let mut file = File::create(&file_path).unwrap();
file.write_all(b"test content").unwrap();
let args = vec![path.to_string()];
let result = read_file_sync(app.handle().clone(), args).unwrap();
assert_eq!(result, "test content".to_string());
fs::remove_file(file_path).unwrap();
}
#[test]
fn test_readdir_sync() {
let app = mock_app();
let path = "file://test_readdir_sync_dir";
let dir_path = get_jan_data_folder_path(app.handle().clone()).join(path);
fs::create_dir_all(&dir_path).unwrap();
File::create(dir_path.join("file1.txt")).unwrap();
File::create(dir_path.join("file2.txt")).unwrap();
let args = vec![dir_path.to_string_lossy().to_string()];
let result = readdir_sync(app.handle().clone(), args).unwrap();
assert_eq!(result.len(), 2);
fs::remove_dir_all(dir_path).unwrap();
}
}

141
src-tauri/src/core/mcp.rs Normal file
View File

@ -0,0 +1,141 @@
use std::{collections::HashMap, sync::Arc};
use rmcp::{service::RunningService, transport::TokioChildProcess, RoleClient, ServiceExt};
use serde_json::Value;
use tauri::{AppHandle, State};
use tokio::{process::Command, sync::Mutex};
use super::{cmd::get_jan_data_folder_path, state::AppState};
/// Runs MCP commands by reading configuration from a JSON file and initializing servers
///
/// # Arguments
/// * `app_path` - Path to the application directory containing mcp_config.json
/// * `servers_state` - Shared state containing running MCP services
///
/// # Returns
/// * `Ok(())` if servers were initialized successfully
/// * `Err(String)` if there was an error reading config or starting servers
pub async fn run_mcp_commands(
app_path: String,
servers_state: Arc<Mutex<HashMap<String, RunningService<RoleClient, ()>>>>,
) -> Result<(), String> {
log::info!(
"Load MCP configs from {}",
app_path.clone() + "/mcp_config.json"
);
// let mut client_list = HashMap::new();
let config_content = std::fs::read_to_string(app_path.clone() + "/mcp_config.json")
.map_err(|e| format!("Failed to read config file: {}", e))?;
let mcp_servers: serde_json::Value = serde_json::from_str(&config_content)
.map_err(|e| format!("Failed to parse config: {}", e))?;
if let Some(server_map) = mcp_servers.get("mcpServers").and_then(Value::as_object) {
log::info!("MCP Servers: {server_map:#?}");
for (name, config) in server_map {
if let Some((command, args, envs)) = extract_command_args(config) {
let mut cmd = Command::new(command);
args.iter().filter_map(Value::as_str).for_each(|arg| {
cmd.arg(arg);
});
envs.iter().for_each(|(k, v)| {
if let Some(v_str) = v.as_str() {
cmd.env(k, v_str);
}
});
let service =
().serve(TokioChildProcess::new(&mut cmd).map_err(|e| e.to_string())?)
.await
.map_err(|e| e.to_string())?;
servers_state.lock().await.insert(name.clone(), service);
}
}
}
// Collect servers into a Vec to avoid holding the RwLockReadGuard across await points
let servers_map = servers_state.lock().await;
for (_, service) in servers_map.iter() {
// Initialize
let _server_info = service.peer_info();
log::info!("Connected to server: {_server_info:#?}");
}
Ok(())
}
fn extract_command_args(
config: &Value,
) -> Option<(String, Vec<Value>, serde_json::Map<String, Value>)> {
let obj = config.as_object()?;
let command = obj.get("command")?.as_str()?.to_string();
let args = obj.get("args")?.as_array()?.clone();
let envs = obj
.get("env")
.unwrap_or(&Value::Object(serde_json::Map::new()))
.as_object()?
.clone();
Some((command, args, envs))
}
#[tauri::command]
pub async fn restart_mcp_servers(
app: AppHandle,
state: State<'_, AppState>,
) -> Result<(), String> {
let app_path = get_jan_data_folder_path(app.clone());
let app_path_str = app_path.to_str().unwrap().to_string();
let servers = state.mcp_servers.clone();
// Stop the servers
stop_mcp_servers(state.mcp_servers.clone()).await?;
// Restart the servers
run_mcp_commands(app_path_str, servers).await
}
pub async fn stop_mcp_servers(
servers_state: Arc<Mutex<HashMap<String, RunningService<RoleClient, ()>>>>,
) -> Result<(), String> {
let mut servers_map = servers_state.lock().await;
let keys: Vec<String> = servers_map.keys().cloned().collect();
for key in keys {
if let Some(service) = servers_map.remove(&key) {
service.cancel().await.map_err(|e| e.to_string())?;
}
}
drop(servers_map); // Release the lock after stopping
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
use std::fs::File;
use std::io::Write;
use std::sync::Arc;
use tokio::sync::Mutex;
#[tokio::test]
async fn test_run_mcp_commands() {
// Create a mock mcp_config.json file
let config_path = "mcp_config.json";
let mut file = File::create(config_path).expect("Failed to create config file");
file.write_all(b"{\"mcpServers\":{}}")
.expect("Failed to write to config file");
// Call the run_mcp_commands function
let app_path = ".".to_string();
let servers_state: Arc<Mutex<HashMap<String, RunningService<RoleClient, ()>>>> =
Arc::new(Mutex::new(HashMap::new()));
let result = run_mcp_commands(app_path, servers_state).await;
// Assert that the function returns Ok(())
assert!(result.is_ok());
// Clean up the mock config file
std::fs::remove_file(config_path).expect("Failed to remove config file");
}
}

View File

@ -0,0 +1,8 @@
pub mod cmd;
pub mod fs;
pub mod mcp;
pub mod server;
pub mod setup;
pub mod state;
pub mod threads;
pub mod utils;

View File

@ -0,0 +1,203 @@
use hyper::service::{make_service_fn, service_fn};
use hyper::{Body, Request, Response, Server, StatusCode};
use reqwest::Client;
use std::convert::Infallible;
use std::net::SocketAddr;
use std::sync::LazyLock;
use tokio::sync::Mutex;
use tokio::task::JoinHandle;
/// Server handle type for managing the proxy server lifecycle
type ServerHandle = JoinHandle<Result<(), Box<dyn std::error::Error + Send + Sync>>>;
/// Global singleton for the current server instance
static SERVER_HANDLE: LazyLock<Mutex<Option<ServerHandle>>> = LazyLock::new(|| Mutex::new(None));
/// Configuration for the proxy server
#[derive(Clone)]
struct ProxyConfig {
upstream: String,
prefix: String,
auth_token: String,
}
/// Removes a prefix from a path, ensuring proper formatting
fn remove_prefix(path: &str, prefix: &str) -> String {
log::debug!("Processing path: {}, removing prefix: {}", path, prefix);
if !prefix.is_empty() && path.starts_with(prefix) {
let result = path[prefix.len()..].to_string();
if result.is_empty() {
"/".to_string()
} else {
result
}
} else {
path.to_string()
}
}
/// Determines the final destination path based on the original request path
fn get_destination_path(original_path: &str, prefix: &str) -> String {
let removed_prefix_path = remove_prefix(original_path, prefix);
// Special paths don't need the /v1 prefix
if !original_path.contains(prefix)
|| removed_prefix_path.contains("/healthz")
|| removed_prefix_path.contains("/process")
{
original_path.to_string()
} else {
format!("/v1{}", removed_prefix_path)
}
}
/// Creates the full upstream URL for the proxied request
fn build_upstream_url(upstream: &str, path: &str) -> String {
let upstream_clean = upstream.trim_end_matches('/');
let path_clean = path.trim_start_matches('/');
format!("{}/{}", upstream_clean, path_clean)
}
/// Handles the proxy request logic
async fn proxy_request(
req: Request<Body>,
client: Client,
config: ProxyConfig,
) -> Result<Response<Body>, hyper::Error> {
let original_path = req.uri().path();
let path = get_destination_path(original_path, &config.prefix);
// Block access to /configs endpoint
if path.contains("/configs") {
return Ok(Response::builder()
.status(StatusCode::NOT_FOUND)
.body(Body::from("Not Found"))
.unwrap());
}
// Build the outbound request
let upstream_url = build_upstream_url(&config.upstream, &path);
log::debug!("Proxying request to: {}", upstream_url);
let mut outbound_req = client.request(req.method().clone(), &upstream_url);
// Copy original headers
for (name, value) in req.headers() {
if name != hyper::header::HOST {
// Skip host header
outbound_req = outbound_req.header(name, value);
}
}
// Add authorization header
outbound_req = outbound_req.header("Authorization", format!("Bearer {}", config.auth_token));
// Send the request and handle the response
match outbound_req.body(req.into_body()).send().await {
Ok(response) => {
let status = response.status();
log::debug!("Received response with status: {}", status);
let mut builder = Response::builder().status(status);
// Copy response headers
for (name, value) in response.headers() {
builder = builder.header(name, value);
}
// Read response body
match response.bytes().await {
Ok(bytes) => Ok(builder.body(Body::from(bytes)).unwrap()),
Err(e) => {
log::error!("Failed to read response body: {}", e);
Ok(Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(Body::from("Error reading upstream response"))
.unwrap())
}
}
}
Err(e) => {
log::error!("Proxy request failed: {}", e);
Ok(Response::builder()
.status(StatusCode::BAD_GATEWAY)
.body(Body::from(format!("Upstream error: {}", e)))
.unwrap())
}
}
}
/// Starts the proxy server
pub async fn start_server(
host: String,
port: u16,
prefix: String,
auth_token: String,
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
// Check if server is already running
let mut handle_guard = SERVER_HANDLE.lock().await;
if handle_guard.is_some() {
return Err("Server is already running".into());
}
// Create server address
let addr: SocketAddr = format!("{}:{}", host, port)
.parse()
.map_err(|e| format!("Invalid address: {}", e))?;
// Configure proxy settings
let config = ProxyConfig {
upstream: "http://127.0.0.1:39291".to_string(),
prefix,
auth_token,
};
// Create HTTP client
let client = Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()?;
// Create service handler
let make_svc = make_service_fn(move |_conn| {
let client = client.clone();
let config = config.clone();
async move {
Ok::<_, Infallible>(service_fn(move |req| {
proxy_request(req, client.clone(), config.clone())
}))
}
});
// Create and start the server
let server = Server::bind(&addr).serve(make_svc);
log::info!("Proxy server started on http://{}", addr);
// Spawn server task
let server_handle = tokio::spawn(async move {
if let Err(e) = server.await {
log::error!("Server error: {}", e);
return Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>);
}
Ok(())
});
*handle_guard = Some(server_handle);
Ok(true)
}
/// Stops the currently running proxy server
pub async fn stop_server() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let mut handle_guard = SERVER_HANDLE.lock().await;
if let Some(handle) = handle_guard.take() {
handle.abort();
log::info!("Proxy server stopped");
} else {
log::debug!("No server was running");
}
Ok(())
}

301
src-tauri/src/core/setup.rs Normal file
View File

@ -0,0 +1,301 @@
use flate2::read::GzDecoder;
use std::{
fs::{self, File},
io::Read,
path::PathBuf,
sync::{Arc, Mutex},
};
use tar::Archive;
use tauri::{App, Listener, Manager};
use tauri_plugin_shell::process::CommandEvent;
use tauri_plugin_shell::ShellExt;
use tauri_plugin_store::StoreExt;
// MCP
use super::{
cmd::{get_jan_data_folder_path, get_jan_extensions_path},
mcp::run_mcp_commands,
state::AppState,
};
pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), String> {
let mut store_path = get_jan_data_folder_path(app.clone());
store_path.push("store.json");
let store = app.store(store_path).expect("Store not initialized");
let stored_version = store
.get("version")
.and_then(|v| v.as_str().map(String::from))
.unwrap_or_default();
let app_version = app
.config()
.version
.clone()
.unwrap_or_else(|| "".to_string());
if !force && stored_version == app_version {
return Ok(());
}
let extensions_path = get_jan_extensions_path(app.clone());
let pre_install_path = app
.path()
.resource_dir()
.unwrap()
.join("resources")
.join("pre-install");
// Attempt to remove extensions folder
if extensions_path.exists() {
fs::remove_dir_all(&extensions_path).unwrap_or_else(|_| {
log::info!("Failed to remove existing extensions folder, it may not exist.");
});
}
if !force {
return Ok(());
};
// Attempt to create it again
if !extensions_path.exists() {
fs::create_dir_all(&extensions_path).map_err(|e| e.to_string())?;
}
let extensions_json_path = extensions_path.join("extensions.json");
let mut extensions_list = if extensions_json_path.exists() {
let existing_data =
fs::read_to_string(&extensions_json_path).unwrap_or_else(|_| "[]".to_string());
serde_json::from_str::<Vec<serde_json::Value>>(&existing_data).unwrap_or_else(|_| vec![])
} else {
vec![]
};
for entry in fs::read_dir(&pre_install_path).map_err(|e| e.to_string())? {
let entry = entry.map_err(|e| e.to_string())?;
let path = entry.path();
if path.extension().map_or(false, |ext| ext == "tgz") {
log::info!("Installing extension from {:?}", path);
let tar_gz = File::open(&path).map_err(|e| e.to_string())?;
let gz_decoder = GzDecoder::new(tar_gz);
let mut archive = Archive::new(gz_decoder);
let mut extension_name = None;
let mut extension_manifest = None;
extract_extension_manifest(&mut archive)
.map_err(|e| e.to_string())
.and_then(|manifest| match manifest {
Some(manifest) => {
extension_name = manifest["name"].as_str().map(|s| s.to_string());
extension_manifest = Some(manifest);
Ok(())
}
None => Err("Manifest is None".to_string()),
})?;
let extension_name = extension_name.ok_or("package.json not found in archive")?;
let extension_dir = extensions_path.join(extension_name.clone());
fs::create_dir_all(&extension_dir).map_err(|e| e.to_string())?;
let tar_gz = File::open(&path).map_err(|e| e.to_string())?;
let gz_decoder = GzDecoder::new(tar_gz);
let mut archive = Archive::new(gz_decoder);
for entry in archive.entries().map_err(|e| e.to_string())? {
let mut entry = entry.map_err(|e| e.to_string())?;
let file_path = entry.path().map_err(|e| e.to_string())?;
let components: Vec<_> = file_path.components().collect();
if components.len() > 1 {
let relative_path: PathBuf = components[1..].iter().collect();
let target_path = extension_dir.join(relative_path);
if let Some(parent) = target_path.parent() {
fs::create_dir_all(parent).map_err(|e| e.to_string())?;
}
let _result = entry.unpack(&target_path).map_err(|e| e.to_string())?;
}
}
let main_entry = extension_manifest
.as_ref()
.and_then(|manifest| manifest["main"].as_str())
.unwrap_or("index.js");
let url = extension_dir.join(main_entry).to_string_lossy().to_string();
let new_extension = serde_json::json!({
"url": url,
"name": extension_name.clone(),
"origin": extension_dir.to_string_lossy(),
"active": true,
"description": extension_manifest
.as_ref()
.and_then(|manifest| manifest["description"].as_str())
.unwrap_or(""),
"version": extension_manifest
.as_ref()
.and_then(|manifest| manifest["version"].as_str())
.unwrap_or(""),
"productName": extension_manifest
.as_ref()
.and_then(|manifest| manifest["productName"].as_str())
.unwrap_or(""),
});
extensions_list.push(new_extension);
log::info!("Installed extension to {:?}", extension_dir);
}
}
fs::write(
&extensions_json_path,
serde_json::to_string_pretty(&extensions_list).map_err(|e| e.to_string())?,
)
.map_err(|e| e.to_string())?;
// Store the new app version
store.set("version", serde_json::json!(app_version));
store.save().expect("Failed to save store");
Ok(())
}
fn extract_extension_manifest<R: Read>(
archive: &mut Archive<R>,
) -> Result<Option<serde_json::Value>, String> {
let entry = archive
.entries()
.map_err(|e| e.to_string())?
.filter_map(|e| e.ok()) // Ignore errors in individual entries
.find(|entry| {
if let Ok(file_path) = entry.path() {
let path_str = file_path.to_string_lossy();
path_str == "package/package.json" || path_str == "package.json"
} else {
false
}
});
if let Some(mut entry) = entry {
let mut content = String::new();
entry
.read_to_string(&mut content)
.map_err(|e| e.to_string())?;
let package_json: serde_json::Value =
serde_json::from_str(&content).map_err(|e| e.to_string())?;
return Ok(Some(package_json));
}
Ok(None)
}
pub fn setup_mcp(app: &App) {
let app_path = get_jan_data_folder_path(app.handle().clone());
let state = app.state::<AppState>().inner();
let app_path_str = app_path.to_str().unwrap().to_string();
let servers = state.mcp_servers.clone();
tauri::async_runtime::spawn(async move {
if let Err(e) = run_mcp_commands(app_path_str, servers).await {
log::error!("Failed to run mcp commands: {}", e);
}
});
}
pub fn setup_sidecar(app: &App) -> Result<(), String> {
// Setup sidecar
let app_state = app.state::<AppState>();
let app_data_dir = get_jan_data_folder_path(app.handle().clone());
let mut sidecar_command = app.shell().sidecar("cortex-server").unwrap().args([
"--start-server",
"--port",
"39291",
"--config_file_path",
app_data_dir.join(".janrc").to_str().unwrap(),
"--data_folder_path",
app_data_dir.to_str().unwrap(),
"--cors",
"ON",
"--allowed_origins",
// TODO(sang) '*' is only for testing purpose, will remove it later
"http://localhost:3000,tauri://localhost,*",
"config",
"--api_keys",
app_state.inner().app_token.as_deref().unwrap_or(""),
]);
#[cfg(target_os = "windows")]
{
sidecar_command = sidecar_command.env("PATH", {
let app_data_dir = app.app_handle().path().app_data_dir().unwrap();
let dest = app_data_dir.to_str().unwrap();
let path = std::env::var("PATH").unwrap_or_default();
format!("{}{}{}", path, std::path::MAIN_SEPARATOR, dest)
});
}
#[cfg(not(target_os = "windows"))]
{
sidecar_command = sidecar_command.env("LD_LIBRARY_PATH", {
let app_data_dir = app.app_handle().path().app_data_dir().unwrap();
let dest = app_data_dir.to_str().unwrap();
let ld_library_path = std::env::var("LD_LIBRARY_PATH").unwrap_or_default();
format!("{}{}{}", ld_library_path, std::path::MAIN_SEPARATOR, dest)
});
}
let (mut rx, _child) = sidecar_command.spawn().expect("Failed to spawn sidecar");
let child = Arc::new(Mutex::new(Some(_child)));
let child_clone = child.clone();
tauri::async_runtime::spawn(async move {
// read events such as stdout
while let Some(event) = rx.recv().await {
if let CommandEvent::Stdout(line_bytes) = event {
let line = String::from_utf8_lossy(&line_bytes);
log::info!("Outputs: {:?}", line)
}
}
});
app.handle().listen("kill-sidecar", move |_| {
let mut child_guard = child_clone.lock().unwrap();
if let Some(actual_child) = child_guard.take() {
actual_child.kill().unwrap();
}
});
Ok(())
}
fn copy_dir_all(src: PathBuf, dst: PathBuf) -> Result<(), String> {
fs::create_dir_all(&dst).map_err(|e| e.to_string())?;
log::info!("Copying from {:?} to {:?}", src, dst);
for entry in fs::read_dir(src).map_err(|e| e.to_string())? {
let entry = entry.map_err(|e| e.to_string())?;
let ty = entry.file_type().map_err(|e| e.to_string())?;
if ty.is_dir() {
copy_dir_all(entry.path(), dst.join(entry.file_name())).map_err(|e| e.to_string())?;
} else {
fs::copy(entry.path(), dst.join(entry.file_name())).map_err(|e| e.to_string())?;
}
}
Ok(())
}
pub fn setup_engine_binaries(app: &App) -> Result<(), String> {
// Copy engine binaries to app_data
let app_data_dir = get_jan_data_folder_path(app.handle().clone());
let binaries_dir = app.handle().path().resource_dir().unwrap().join("binaries");
let themes_dir = app
.handle()
.path()
.resource_dir()
.unwrap()
.join("resources");
if let Err(e) = copy_dir_all(binaries_dir, app_data_dir.clone()) {
log::error!("Failed to copy binaries: {}", e);
}
if let Err(e) = copy_dir_all(themes_dir, app_data_dir.clone()) {
log::error!("Failed to copy themes: {}", e);
}
Ok(())
}

View File

@ -0,0 +1,18 @@
use std::{collections::HashMap, sync::Arc};
use rand::{distributions::Alphanumeric, Rng};
use rmcp::{service::RunningService, RoleClient};
use tokio::sync::Mutex;
#[derive(Default)]
pub struct AppState {
pub app_token: Option<String>,
pub mcp_servers: Arc<Mutex<HashMap<String, RunningService<RoleClient, ()>>>>,
}
pub fn generate_app_token() -> String {
rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(32)
.map(char::from)
.collect()
}

View File

@ -0,0 +1,613 @@
/*!
Thread and Message Persistence Module
This module provides all logic for managing threads and their messages, including creation, modification, deletion, and listing.
Messages for each thread are persisted in a JSONL file (messages.jsonl) per thread directory.
**Concurrency and Consistency Guarantee:**
- All operations that write or modify messages for a thread are protected by a global, per-thread asynchronous lock.
- This design ensures that only one operation can write to a thread's messages.jsonl file at a time, preventing race conditions.
- As a result, the messages.jsonl file for each thread is always consistent and never corrupted, even under concurrent access.
*/
use serde::{Deserialize, Serialize};
use std::fs::{self, File};
use std::io::{BufRead, BufReader, Write};
use tauri::command;
use tauri::Runtime;
use uuid::Uuid;
// For async file write serialization
use once_cell::sync::Lazy;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::Mutex;
// Global per-thread locks for message file writes
static MESSAGE_LOCKS: Lazy<Mutex<HashMap<String, Arc<Mutex<()>>>>> =
Lazy::new(|| Mutex::new(HashMap::new()));
use super::utils::{
ensure_data_dirs, ensure_thread_dir_exists, get_data_dir, get_messages_path, get_thread_dir,
get_thread_metadata_path, THREADS_FILE,
};
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Thread {
pub id: String,
pub object: String,
pub title: String,
pub assistants: Vec<ThreadAssistantInfo>,
pub created: i64,
pub updated: i64,
pub metadata: Option<serde_json::Value>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ThreadMessage {
pub id: String,
pub object: String,
pub thread_id: String,
pub assistant_id: Option<String>,
pub attachments: Option<Vec<Attachment>>,
pub role: String,
pub content: Vec<ThreadContent>,
pub status: String,
pub created_at: i64,
pub completed_at: i64,
pub metadata: Option<serde_json::Value>,
pub type_: Option<String>,
pub error_code: Option<String>,
pub tool_call_id: Option<String>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Attachment {
pub file_id: Option<String>,
pub tools: Option<Vec<Tool>>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(tag = "type")]
pub enum Tool {
#[serde(rename = "file_search")]
FileSearch,
#[serde(rename = "code_interpreter")]
CodeInterpreter,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ThreadContent {
pub type_: String,
pub text: Option<ContentValue>,
pub image_url: Option<ImageContentValue>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ContentValue {
pub value: String,
pub annotations: Vec<String>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ImageContentValue {
pub detail: Option<String>,
pub url: Option<String>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ThreadAssistantInfo {
pub assistant_id: String,
pub assistant_name: String,
pub model: ModelInfo,
pub instructions: Option<String>,
pub tools: Option<Vec<AssistantTool>>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ModelInfo {
pub id: String,
pub name: String,
pub settings: serde_json::Value,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(tag = "type")]
pub enum AssistantTool {
#[serde(rename = "code_interpreter")]
CodeInterpreter,
#[serde(rename = "retrieval")]
Retrieval,
#[serde(rename = "function")]
Function {
name: String,
description: Option<String>,
parameters: Option<serde_json::Value>,
},
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ThreadState {
pub has_more: bool,
pub waiting_for_response: bool,
pub error: Option<String>,
pub last_message: Option<String>,
}
/// Lists all threads by reading their metadata from the threads directory.
/// Returns a vector of thread metadata as JSON values.
#[command]
pub async fn list_threads<R: Runtime>(
app_handle: tauri::AppHandle<R>,
) -> Result<Vec<serde_json::Value>, String> {
ensure_data_dirs(app_handle.clone())?;
let data_dir = get_data_dir(app_handle.clone());
let mut threads = Vec::new();
if !data_dir.exists() {
return Ok(threads);
}
for entry in fs::read_dir(&data_dir).map_err(|e| e.to_string())? {
let entry = entry.map_err(|e| e.to_string())?;
let path = entry.path();
if path.is_dir() {
let thread_metadata_path = path.join(THREADS_FILE);
if thread_metadata_path.exists() {
let data = fs::read_to_string(&thread_metadata_path).map_err(|e| e.to_string())?;
match serde_json::from_str(&data) {
Ok(thread) => threads.push(thread),
Err(e) => {
println!("Failed to parse thread file: {}", e);
continue; // skip invalid thread files
}
}
}
}
}
Ok(threads)
}
/// Creates a new thread, assigns it a unique ID, and persists its metadata.
/// Ensures the thread directory exists and writes thread.json.
#[command]
pub async fn create_thread<R: Runtime>(
app_handle: tauri::AppHandle<R>,
mut thread: serde_json::Value,
) -> Result<serde_json::Value, String> {
ensure_data_dirs(app_handle.clone())?;
let uuid = Uuid::new_v4().to_string();
thread["id"] = serde_json::Value::String(uuid.clone());
let thread_dir = get_thread_dir(app_handle.clone(), &uuid);
if !thread_dir.exists() {
fs::create_dir_all(&thread_dir).map_err(|e| e.to_string())?;
}
let path = get_thread_metadata_path(app_handle.clone(), &uuid);
let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?;
fs::write(path, data).map_err(|e| e.to_string())?;
Ok(thread)
}
/// Modifies an existing thread's metadata by overwriting its thread.json file.
/// Returns an error if the thread directory does not exist.
#[command]
pub async fn modify_thread<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread: serde_json::Value,
) -> Result<(), String> {
let thread_id = thread
.get("id")
.and_then(|id| id.as_str())
.ok_or("Missing thread id")?;
let thread_dir = get_thread_dir(app_handle.clone(), thread_id);
if !thread_dir.exists() {
return Err("Thread directory does not exist".to_string());
}
let path = get_thread_metadata_path(app_handle.clone(), thread_id);
let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?;
fs::write(path, data).map_err(|e| e.to_string())?;
Ok(())
}
/// Deletes a thread and all its associated files by removing its directory.
#[command]
pub async fn delete_thread<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread_id: String,
) -> Result<(), String> {
let thread_dir = get_thread_dir(app_handle.clone(), &thread_id);
if thread_dir.exists() {
fs::remove_dir_all(thread_dir).map_err(|e| e.to_string())?;
}
Ok(())
}
/// Lists all messages for a given thread by reading and parsing its messages.jsonl file.
/// Returns a vector of message JSON values.
#[command]
pub async fn list_messages<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread_id: String,
) -> Result<Vec<serde_json::Value>, String> {
let path = get_messages_path(app_handle, &thread_id);
if !path.exists() {
return Ok(vec![]);
}
let file = File::open(&path).map_err(|e| {
eprintln!("Error opening file {}: {}", path.display(), e);
e.to_string()
})?;
let reader = BufReader::new(file);
let mut messages = Vec::new();
for line in reader.lines() {
let line = line.map_err(|e| {
eprintln!("Error reading line from file {}: {}", path.display(), e);
e.to_string()
})?;
let message: serde_json::Value = serde_json::from_str(&line).map_err(|e| {
eprintln!(
"Error parsing JSON from line in file {}: {}",
path.display(),
e
);
e.to_string()
})?;
messages.push(message);
}
Ok(messages)
}
/// Appends a new message to a thread's messages.jsonl file.
/// Uses a per-thread async lock to prevent race conditions and ensure file consistency.
#[command]
pub async fn create_message<R: Runtime>(
app_handle: tauri::AppHandle<R>,
mut message: serde_json::Value,
) -> Result<serde_json::Value, String> {
let thread_id = {
let id = message
.get("thread_id")
.and_then(|v| v.as_str())
.ok_or("Missing thread_id")?;
id.to_string()
};
ensure_thread_dir_exists(app_handle.clone(), &thread_id)?;
let path = get_messages_path(app_handle.clone(), &thread_id);
if message.get("id").is_none() {
let uuid = Uuid::new_v4().to_string();
message["id"] = serde_json::Value::String(uuid);
}
// Acquire per-thread lock before writing
{
let mut locks = MESSAGE_LOCKS.lock().await;
let lock = locks
.entry(thread_id.to_string())
.or_insert_with(|| Arc::new(Mutex::new(())))
.clone();
drop(locks); // Release the map lock before awaiting the file lock
let _guard = lock.lock().await;
let mut file: File = fs::OpenOptions::new()
.create(true)
.append(true)
.open(path)
.map_err(|e| e.to_string())?;
let data = serde_json::to_string(&message).map_err(|e| e.to_string())?;
writeln!(file, "{}", data).map_err(|e| e.to_string())?;
}
Ok(message)
}
/// Modifies an existing message in a thread's messages.jsonl file.
/// Uses a per-thread async lock to prevent race conditions and ensure file consistency.
/// Rewrites the entire messages.jsonl file for the thread.
#[command]
pub async fn modify_message<R: Runtime>(
app_handle: tauri::AppHandle<R>,
message: serde_json::Value,
) -> Result<serde_json::Value, String> {
let thread_id = message
.get("thread_id")
.and_then(|v| v.as_str())
.ok_or("Missing thread_id")?;
let message_id = message
.get("id")
.and_then(|v| v.as_str())
.ok_or("Missing message id")?;
// Acquire per-thread lock before modifying
{
let mut locks = MESSAGE_LOCKS.lock().await;
let lock = locks
.entry(thread_id.to_string())
.or_insert_with(|| Arc::new(Mutex::new(())))
.clone();
drop(locks); // Release the map lock before awaiting the file lock
let _guard = lock.lock().await;
let mut messages = list_messages(app_handle.clone(), thread_id.to_string()).await?;
if let Some(index) = messages
.iter()
.position(|m| m.get("id").and_then(|v| v.as_str()) == Some(message_id))
{
messages[index] = message.clone();
// Rewrite all messages
let path = get_messages_path(app_handle.clone(), thread_id);
let mut file = File::create(path).map_err(|e| e.to_string())?;
for msg in messages {
let data = serde_json::to_string(&msg).map_err(|e| e.to_string())?;
writeln!(file, "{}", data).map_err(|e| e.to_string())?;
}
}
}
Ok(message)
}
/// Deletes a message from a thread's messages.jsonl file by message ID.
/// Rewrites the entire messages.jsonl file for the thread.
/// Uses a per-thread async lock to prevent race conditions and ensure file consistency.
#[command]
pub async fn delete_message<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread_id: String,
message_id: String,
) -> Result<(), String> {
// Acquire per-thread lock before modifying
{
let mut locks = MESSAGE_LOCKS.lock().await;
let lock = locks
.entry(thread_id.to_string())
.or_insert_with(|| Arc::new(Mutex::new(())))
.clone();
drop(locks); // Release the map lock before awaiting the file lock
let _guard = lock.lock().await;
let mut messages = list_messages(app_handle.clone(), thread_id.clone()).await?;
messages.retain(|m| m.get("id").and_then(|v| v.as_str()) != Some(message_id.as_str()));
// Rewrite remaining messages
let path = get_messages_path(app_handle.clone(), &thread_id);
let mut file = File::create(path).map_err(|e| e.to_string())?;
for msg in messages {
let data = serde_json::to_string(&msg).map_err(|e| e.to_string())?;
writeln!(file, "{}", data).map_err(|e| e.to_string())?;
}
}
Ok(())
}
/// Retrieves the first assistant associated with a thread.
/// Returns an error if the thread or assistant is not found.
#[command]
pub async fn get_thread_assistant<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread_id: String,
) -> Result<serde_json::Value, String> {
let path = get_thread_metadata_path(app_handle, &thread_id);
if !path.exists() {
return Err("Thread not found".to_string());
}
let data = fs::read_to_string(&path).map_err(|e| e.to_string())?;
let thread: serde_json::Value = serde_json::from_str(&data).map_err(|e| e.to_string())?;
if let Some(assistants) = thread.get("assistants").and_then(|a| a.as_array()) {
if let Some(first) = assistants.get(0) {
Ok(first.clone())
} else {
Err("Assistant not found".to_string())
}
} else {
Err("Assistant not found".to_string())
}
}
/// Adds a new assistant to a thread's metadata.
/// Updates thread.json with the new assistant information.
#[command]
pub async fn create_thread_assistant<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread_id: String,
assistant: serde_json::Value,
) -> Result<serde_json::Value, String> {
let path = get_thread_metadata_path(app_handle.clone(), &thread_id);
if !path.exists() {
return Err("Thread not found".to_string());
}
let mut thread: serde_json::Value = {
let data = fs::read_to_string(&path).map_err(|e| e.to_string())?;
serde_json::from_str(&data).map_err(|e| e.to_string())?
};
if let Some(assistants) = thread.get_mut("assistants").and_then(|a| a.as_array_mut()) {
assistants.push(assistant.clone());
} else {
thread["assistants"] = serde_json::Value::Array(vec![assistant.clone()]);
}
let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?;
fs::write(&path, data).map_err(|e| e.to_string())?;
Ok(assistant)
}
/// Modifies an existing assistant's information in a thread's metadata.
/// Updates thread.json with the modified assistant data.
#[command]
pub async fn modify_thread_assistant<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread_id: String,
assistant: serde_json::Value,
) -> Result<serde_json::Value, String> {
let path = get_thread_metadata_path(app_handle.clone(), &thread_id);
if !path.exists() {
return Err("Thread not found".to_string());
}
let mut thread: serde_json::Value = {
let data = fs::read_to_string(&path).map_err(|e| e.to_string())?;
serde_json::from_str(&data).map_err(|e| e.to_string())?
};
let assistant_id = assistant
.get("assistant_id")
.and_then(|v| v.as_str())
.ok_or("Missing assistant_id")?;
if let Some(assistants) = thread
.get_mut("assistants")
.and_then(|a: &mut serde_json::Value| a.as_array_mut())
{
if let Some(index) = assistants
.iter()
.position(|a| a.get("assistant_id").and_then(|v| v.as_str()) == Some(assistant_id))
{
assistants[index] = assistant.clone();
let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?;
fs::write(&path, data).map_err(|e| e.to_string())?;
}
}
Ok(assistant)
}
#[cfg(test)]
mod tests {
use crate::core::cmd::get_jan_data_folder_path;
use super::*;
use serde_json::json;
use std::fs;
use std::path::PathBuf;
use tauri::test::{mock_app, MockRuntime};
// Helper to create a mock app handle with a temp data dir
fn mock_app_with_temp_data_dir() -> (tauri::App<MockRuntime>, PathBuf) {
let app = mock_app();
let data_dir = get_jan_data_folder_path(app.handle().clone());
println!("Mock app data dir: {}", data_dir.display());
// Patch get_data_dir to use temp dir (requires get_data_dir to be overridable or injectable)
// For now, we assume get_data_dir uses tauri::api::path::app_data_dir(&app_handle)
// and that we can set the environment variable to redirect it.
(app, data_dir)
}
#[tokio::test]
async fn test_create_and_list_threads() {
let (app, data_dir) = mock_app_with_temp_data_dir();
// Create a thread
let thread = json!({
"object": "thread",
"title": "Test Thread",
"assistants": [],
"created": 1234567890,
"updated": 1234567890,
"metadata": null
});
let created = create_thread(app.handle().clone(), thread.clone())
.await
.unwrap();
assert_eq!(created["title"], "Test Thread");
// List threads
let threads = list_threads(app.handle().clone()).await.unwrap();
assert!(threads.len() > 0);
// Clean up
fs::remove_dir_all(data_dir).unwrap();
}
#[tokio::test]
async fn test_create_and_list_messages() {
let (app, data_dir) = mock_app_with_temp_data_dir();
// Create a thread first
let thread = json!({
"object": "thread",
"title": "Msg Thread",
"assistants": [],
"created": 123,
"updated": 123,
"metadata": null
});
let created = create_thread(app.handle().clone(), thread.clone())
.await
.unwrap();
let thread_id = created["id"].as_str().unwrap().to_string();
// Create a message
let message = json!({
"object": "message",
"thread_id": thread_id,
"assistant_id": null,
"attachments": null,
"role": "user",
"content": [],
"status": "sent",
"created_at": 123,
"completed_at": 123,
"metadata": null,
"type_": null,
"error_code": null,
"tool_call_id": null
});
let created_msg = create_message(app.handle().clone(), message).await.unwrap();
assert_eq!(created_msg["role"], "user");
// List messages
let messages = list_messages(app.handle().clone(), thread_id.clone())
.await
.unwrap();
assert!(messages.len() > 0);
assert_eq!(messages[0]["role"], "user");
// Clean up
fs::remove_dir_all(data_dir).unwrap();
}
#[tokio::test]
async fn test_create_and_get_thread_assistant() {
let (app, data_dir) = mock_app_with_temp_data_dir();
// Create a thread
let thread = json!({
"object": "thread",
"title": "Assistant Thread",
"assistants": [],
"created": 1,
"updated": 1,
"metadata": null
});
let created = create_thread(app.handle().clone(), thread.clone())
.await
.unwrap();
let thread_id = created["id"].as_str().unwrap().to_string();
// Add assistant
let assistant = json!({
"id": "assistant-1",
"assistant_name": "Test Assistant",
"model": {
"id": "model-1",
"name": "Test Model",
"settings": json!({})
},
"instructions": null,
"tools": null
});
let _ = create_thread_assistant(app.handle().clone(), thread_id.clone(), assistant.clone())
.await
.unwrap();
// Get assistant
let got = get_thread_assistant(app.handle().clone(), thread_id.clone())
.await
.unwrap();
assert_eq!(got["assistant_name"], "Test Assistant");
// Clean up
fs::remove_dir_all(data_dir).unwrap();
}
}

View File

@ -0,0 +1,48 @@
use std::fs;
use std::path::PathBuf;
use tauri::Runtime;
use super::cmd::get_jan_data_folder_path;
pub const THREADS_DIR: &str = "threads";
pub const THREADS_FILE: &str = "thread.json";
pub const MESSAGES_FILE: &str = "messages.jsonl";
pub fn get_data_dir<R: Runtime>(app_handle: tauri::AppHandle<R>) -> PathBuf {
get_jan_data_folder_path(app_handle).join(THREADS_DIR)
}
pub fn get_thread_dir<R: Runtime>(app_handle: tauri::AppHandle<R>, thread_id: &str) -> PathBuf {
get_data_dir(app_handle).join(thread_id)
}
pub fn get_thread_metadata_path<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread_id: &str,
) -> PathBuf {
get_thread_dir(app_handle, thread_id).join(THREADS_FILE)
}
pub fn get_messages_path<R: Runtime>(app_handle: tauri::AppHandle<R>, thread_id: &str) -> PathBuf {
get_thread_dir(app_handle, thread_id).join(MESSAGES_FILE)
}
pub fn ensure_data_dirs<R: Runtime>(app_handle: tauri::AppHandle<R>) -> Result<(), String> {
let data_dir = get_data_dir(app_handle.clone());
if !data_dir.exists() {
fs::create_dir_all(&data_dir).map_err(|e| e.to_string())?;
}
Ok(())
}
pub fn ensure_thread_dir_exists<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread_id: &str,
) -> Result<(), String> {
ensure_data_dirs(app_handle.clone())?;
let thread_dir = get_thread_dir(app_handle, thread_id);
if !thread_dir.exists() {
fs::create_dir(&thread_dir).map_err(|e| e.to_string())?;
}
Ok(())
}

95
src-tauri/src/lib.rs Normal file
View File

@ -0,0 +1,95 @@
mod core;
use core::{
cmd::get_jan_data_folder_path,
setup::{self, setup_engine_binaries, setup_mcp, setup_sidecar},
state::{generate_app_token, AppState},
};
use std::{collections::HashMap, sync::Arc};
use tauri::Emitter;
use tokio::sync::Mutex;
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
tauri::Builder::default()
.plugin(tauri_plugin_http::init())
.plugin(tauri_plugin_store::Builder::new().build())
.plugin(tauri_plugin_shell::init())
.invoke_handler(tauri::generate_handler![
// FS commands - Deperecate soon
core::fs::join_path,
core::fs::mkdir,
core::fs::exists_sync,
core::fs::readdir_sync,
core::fs::read_file_sync,
core::fs::rm,
// App commands
core::cmd::get_themes,
core::cmd::get_app_configurations,
core::cmd::get_active_extensions,
core::cmd::get_user_home_path,
core::cmd::update_app_configuration,
core::cmd::get_jan_data_folder_path,
core::cmd::get_jan_extensions_path,
core::cmd::relaunch,
core::cmd::open_app_directory,
core::cmd::open_file_explorer,
core::cmd::install_extensions,
core::cmd::read_theme,
core::cmd::app_token,
core::cmd::start_server,
core::cmd::stop_server,
core::cmd::save_mcp_configs,
core::cmd::get_mcp_configs,
// MCP commands
core::cmd::get_tools,
core::cmd::call_tool,
core::mcp::restart_mcp_servers,
// Threads
core::threads::list_threads,
core::threads::create_thread,
core::threads::modify_thread,
core::threads::delete_thread,
core::threads::list_messages,
core::threads::create_message,
core::threads::modify_message,
core::threads::delete_message,
core::threads::get_thread_assistant,
core::threads::create_thread_assistant,
core::threads::modify_thread_assistant
])
.manage(AppState {
app_token: Some(generate_app_token()),
mcp_servers: Arc::new(Mutex::new(HashMap::new())),
})
.setup(|app| {
app.handle().plugin(
tauri_plugin_log::Builder::default()
.targets([if cfg!(debug_assertions) {
tauri_plugin_log::Target::new(tauri_plugin_log::TargetKind::Stdout)
} else {
tauri_plugin_log::Target::new(tauri_plugin_log::TargetKind::Folder {
path: get_jan_data_folder_path(app.handle().clone()).join("logs"),
file_name: Some("app".to_string()),
})
}])
.build(),
)?;
// Install extensions
if let Err(e) = setup::install_extensions(app.handle().clone(), false) {
log::error!("Failed to install extensions: {}", e);
}
setup_mcp(app);
setup_sidecar(app).expect("Failed to setup sidecar");
setup_engine_binaries(app).expect("Failed to setup engine binaries");
Ok(())
})
.on_window_event(|window, event| match event {
tauri::WindowEvent::CloseRequested { .. } => {
window.emit("kill-sidecar", ()).unwrap();
}
_ => {}
})
.run(tauri::generate_context!())
.expect("error while running tauri application");
}

6
src-tauri/src/main.rs Normal file
View File

@ -0,0 +1,6 @@
// Prevents additional console window on Windows in release, DO NOT REMOVE!!
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
fn main() {
app_lib::run();
}

File diff suppressed because it is too large Load Diff

80
src-tauri/tauri.conf.json Normal file
View File

@ -0,0 +1,80 @@
{
"$schema": "https://schema.tauri.app/config/2",
"productName": "Jan",
"version": "0.5.16",
"identifier": "jan.ai.app",
"build": {
"frontendDist": "../web/out",
"devUrl": "http://localhost:3000",
"beforeDevCommand": "cross-env IS_TAURI=true yarn dev:web",
"beforeBuildCommand": "cross-env IS_TAURI=true yarn build:web"
},
"app": {
"macOSPrivateApi": true,
"windows": [
{
"title": "Jan",
"width": 1024,
"height": 768,
"resizable": true,
"fullscreen": false,
"hiddenTitle": true,
"transparent": true,
"titleBarStyle": "Overlay",
"windowEffects": {
"effects": ["fullScreenUI", "mica", "blur", "acrylic"],
"state": "active"
}
}
],
"security": {
"csp": {
"default-src": "'self' customprotocol: asset: http://localhost:* http://127.0.0.1:* ws://localhost:* ws://127.0.0.1:*",
"connect-src": "ipc: http://ipc.localhost http://127.0.0.1:* ws://localhost:* ws://127.0.0.1:* https://registry.npmjs.org",
"font-src": ["https://fonts.gstatic.com blob: data:"],
"img-src": "'self' asset: http://asset.localhost blob: data:",
"style-src": "'unsafe-inline' 'self' https://fonts.googleapis.com",
"script-src": "'self' asset: $APPDATA/**.* http://asset.localhost"
},
"assetProtocol": {
"enable": true,
"scope": {
"requireLiteralLeadingDot": false,
"allow": ["**/*"]
}
}
}
},
"plugins": {
"updater": {
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IDJFNDEzMEVCMUEzNUFENDQKUldSRXJUVWE2ekJCTGc1Mm1BVXgrWmtES3huUlBFR0lCdG5qbWFvMzgyNDhGN3VTTko5Q1NtTW0K",
"endpoints": [
"https://github.com/menloresearch/jan/releases/latest/download/latest.json"
],
"windows": {
"installMode": "passive"
}
}
},
"bundle": {
"active": true,
"targets": ["nsis", "app", "dmg", "deb", "appimage"],
"createUpdaterArtifacts": false,
"icon": [
"icons/32x32.png",
"icons/128x128.png",
"icons/128x128@2x.png",
"icons/icon.icns",
"icons/icon.ico"
],
"resources": [
"binaries/engines/**/*",
"resources/themes/**/*",
"resources/pre-install/**/*"
],
"externalBin": ["binaries/cortex-server"],
"windows": {
"signCommand": "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1"
}
}
}

View File

@ -91,7 +91,7 @@ const SystemMonitor = () => {
<div className="unset-drag flex cursor-pointer items-center gap-x-2">
<div
className="flex cursor-pointer items-center gap-x-1 rounded px-1 py-0.5 hover:bg-[hsla(var(--secondary-bg))]"
onClick={() => onRevealInFinder('Logs')}
onClick={() => onRevealInFinder('logs')}
>
<FolderOpenIcon size={12} /> App Log
</div>

View File

@ -70,9 +70,13 @@ const TopPanel = () => {
reduceTransparent &&
'border-b border-[hsla(var(--app-border))] bg-[hsla(var(--top-panel-bg))]'
)}
data-tauri-drag-region
>
{!isMac && <LogoMark width={24} height={24} className="-ml-1 mr-2" />}
<div className="flex w-full items-center justify-between text-[hsla(var(--text-secondary))]">
<div
className="flex w-full items-center justify-between text-[hsla(var(--text-secondary))]"
data-tauri-drag-region
>
<div className="unset-drag flex cursor-pointer gap-x-0.5">
{!isMac && (
<Button

View File

@ -1,7 +1,8 @@
import React, { useEffect, useState } from 'react'
import React, { useEffect, useRef, useState } from 'react'
import { Button, Modal } from '@janhq/joi'
import { check, Update } from '@tauri-apps/plugin-updater'
import { useAtom } from 'jotai'
import { useGetLatestRelease } from '@/hooks/useGetLatestRelease'
@ -16,6 +17,7 @@ const ModalAppUpdaterChangelog = () => {
const [appUpdateAvailable, setAppUpdateAvailable] = useAtom(
appUpdateAvailableAtom
)
const updaterRef = useRef<Update | null>(null)
const [open, setOpen] = useState(appUpdateAvailable)
@ -26,6 +28,17 @@ const ModalAppUpdaterChangelog = () => {
const beta = VERSION.includes('beta')
const nightly = VERSION.includes('-')
const checkForUpdate = async () => {
const update = await check()
if (update) {
setAppUpdateAvailable(true)
updaterRef.current = update
}
}
useEffect(() => {
checkForUpdate()
}, [])
const { release } = useGetLatestRelease(beta ? true : false)
return (
@ -73,8 +86,8 @@ const ModalAppUpdaterChangelog = () => {
</Button>
<Button
autoFocus
onClick={() => {
window.core?.api?.appUpdateDownload()
onClick={async () => {
await updaterRef.current?.downloadAndInstall((event) => {})
setOpen(false)
setAppUpdateAvailable(false)
}}

View File

@ -70,7 +70,7 @@ describe('AppLogs Component', () => {
const openButton = screen.getByText('Open')
userEvent.click(openButton)
expect(mockOnRevealInFinder).toHaveBeenCalledWith('Logs')
expect(mockOnRevealInFinder).toHaveBeenCalledWith('logs')
})
})

View File

@ -41,7 +41,7 @@ const AppLogs = () => {
<Button
theme="ghost"
variant="outline"
onClick={() => onRevealInFinder('Logs')}
onClick={() => onRevealInFinder('logs')}
>
<div className="flex items-center space-x-2">
<>

View File

@ -41,7 +41,7 @@ const CortexLogs = () => {
<Button
theme="ghost"
variant="outline"
onClick={() => onRevealInFinder('Logs')}
onClick={() => onRevealInFinder('logs')}
>
<div className="flex items-center space-x-2">
<>

View File

@ -232,26 +232,6 @@ const ModelDropdown = ({
stopModel()
if (activeThread) {
// Change assistand tools based on model support RAG
updateThreadMetadata({
...activeThread,
assistants: [
{
...activeAssistant,
tools: [
{
type: 'retrieval',
enabled: model?.engine === InferenceEngine.cortex,
settings: {
...(activeAssistant.tools &&
activeAssistant.tools[0]?.settings),
},
},
],
},
],
})
const contextLength = model?.settings.ctx_len
? Math.min(8192, model?.settings.ctx_len ?? 8192)
: undefined
@ -273,11 +253,25 @@ const ModelDropdown = ({
// Update model parameter to the thread file
if (model)
updateModelParameter(activeThread, {
params: modelParams,
modelId: model.id,
engine: model.engine,
})
updateModelParameter(
activeThread,
{
params: modelParams,
modelId: model.id,
engine: model.engine,
},
// Update tools
[
{
type: 'retrieval',
enabled: model?.engine === InferenceEngine.cortex,
settings: {
...(activeAssistant.tools &&
activeAssistant.tools[0]?.settings),
},
},
]
)
}
},
[

View File

@ -83,7 +83,7 @@ const ModelSearch = ({ onSearchLocal }: Props) => {
value={searchText}
clearable={searchText.length > 0}
onClear={onClear}
className="border-0 bg-[hsla(var(--app-bg))]"
className="bg-[hsla(var(--app-bg))]"
onClick={() => {
onSearchLocal?.(inputRef.current?.value ?? '')
}}

View File

@ -24,7 +24,9 @@ export const CoreConfigurator = ({ children }: PropsWithChildren) => {
setTimeout(async () => {
if (!isCoreExtensionInstalled()) {
setSettingUp(true)
await setupBaseExtensions()
await new Promise((resolve) => setTimeout(resolve, 500))
setupBaseExtensions()
return
}

View File

@ -114,7 +114,7 @@ export default function ModelHandler() {
const onNewMessageResponse = useCallback(
async (message: ThreadMessage) => {
if (message.type === MessageRequestType.Thread) {
if (message.type !== MessageRequestType.Summary) {
addNewMessage(message)
}
},
@ -129,35 +129,20 @@ export default function ModelHandler() {
const updateThreadTitle = useCallback(
(message: ThreadMessage) => {
// Update only when it's finished
if (message.status !== MessageStatus.Ready) {
return
}
if (message.status !== MessageStatus.Ready) return
const thread = threadsRef.current?.find((e) => e.id == message.thread_id)
if (!thread) {
console.warn(
`Failed to update title for thread ${message.thread_id}: Thread not found!`
)
return
}
let messageContent = message.content[0]?.text?.value
if (!messageContent) {
console.warn(
`Failed to update title for thread ${message.thread_id}: Responded content is null!`
)
return
}
if (!thread || !messageContent) return
// No new line character is presented in the title
// And non-alphanumeric characters should be removed
if (messageContent.includes('\n')) {
if (messageContent.includes('\n'))
messageContent = messageContent.replace(/\n/g, ' ')
}
const match = messageContent.match(/<\/think>(.*)$/)
if (match) {
messageContent = match[1]
}
if (match) messageContent = match[1]
// Remove non-alphanumeric characters
const cleanedMessageContent = messageContent
.replace(/[^\p{L}\s]+/gu, '')
@ -193,18 +178,13 @@ export default function ModelHandler() {
const updateThreadMessage = useCallback(
(message: ThreadMessage) => {
if (
messageGenerationSubscriber.current &&
message.thread_id === activeThreadRef.current?.id &&
!messageGenerationSubscriber.current!.thread_id
) {
updateMessage(
message.id,
message.thread_id,
message.content,
message.status
)
}
updateMessage(
message.id,
message.thread_id,
message.content,
message.metadata,
message.status
)
if (message.status === MessageStatus.Pending) {
if (message.content.length) {
@ -236,82 +216,66 @@ export default function ModelHandler() {
model: activeModelRef.current?.name,
}
})
return
} else if (
message.status === MessageStatus.Error &&
activeModelRef.current?.engine &&
engines &&
isLocalEngine(engines, activeModelRef.current.engine)
) {
;(async () => {
if (
!(await extensionManager
.get<ModelExtension>(ExtensionTypeEnum.Model)
?.isModelLoaded(activeModelRef.current?.id as string))
) {
setActiveModel(undefined)
setStateModel({ state: 'start', loading: false, model: undefined })
}
})()
}
// Mark the thread as not waiting for response
updateThreadWaiting(message.thread_id, false)
} else {
// Mark the thread as not waiting for response
updateThreadWaiting(message.thread_id, false)
setIsGeneratingResponse(false)
setIsGeneratingResponse(false)
const thread = threadsRef.current?.find((e) => e.id == message.thread_id)
if (!thread) return
const thread = threadsRef.current?.find(
(e) => e.id == message.thread_id
)
if (!thread) return
const messageContent = message.content[0]?.text?.value
const messageContent = message.content[0]?.text?.value
const metadata = {
...thread.metadata,
...(messageContent && { lastMessage: messageContent }),
updated_at: Date.now(),
}
const metadata = {
...thread.metadata,
...(messageContent && { lastMessage: messageContent }),
updated_at: Date.now(),
}
updateThread({
...thread,
metadata,
})
extensionManager
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
?.modifyThread({
updateThread({
...thread,
metadata,
})
// Update message's metadata with token usage
message.metadata = {
...message.metadata,
token_speed: tokenSpeedRef.current?.tokenSpeed,
model: activeModelRef.current?.name,
}
extensionManager
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
?.modifyThread({
...thread,
metadata,
})
if (message.status === MessageStatus.Error) {
// Update message's metadata with token usage
message.metadata = {
...message.metadata,
error: message.content[0]?.text?.value,
error_code: message.error_code,
token_speed: tokenSpeedRef.current?.tokenSpeed,
model: activeModelRef.current?.name,
}
}
;(async () => {
const updatedMessage = await extensionManager
if (message.status === MessageStatus.Error) {
message.metadata = {
...message.metadata,
error: message.content[0]?.text?.value,
error_code: message.error_code,
}
// Unassign active model if any
setActiveModel(undefined)
setStateModel({
state: 'start',
loading: false,
model: undefined,
})
}
extensionManager
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
?.createMessage(message)
.catch(() => undefined)
if (updatedMessage) {
deleteMessage(message.id)
addNewMessage(updatedMessage)
setTokenSpeed((prev) =>
prev ? { ...prev, message: updatedMessage.id } : undefined
)
}
})()
// Attempt to generate the title of the Thread when needed
generateThreadTitle(message, thread)
// Attempt to generate the title of the Thread when needed
generateThreadTitle(message, thread)
}
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[setIsGeneratingResponse, updateMessage, updateThread, updateThreadWaiting]
@ -319,25 +283,21 @@ export default function ModelHandler() {
const onMessageResponseUpdate = useCallback(
(message: ThreadMessage) => {
switch (message.type) {
case MessageRequestType.Summary:
updateThreadTitle(message)
break
default:
updateThreadMessage(message)
break
}
if (message.type === MessageRequestType.Summary)
updateThreadTitle(message)
else updateThreadMessage(message)
},
[updateThreadMessage, updateThreadTitle]
)
const generateThreadTitle = (message: ThreadMessage, thread: Thread) => {
// If this is the first ever prompt in the thread
if ((thread.title ?? thread.metadata?.title)?.trim() !== defaultThreadTitle)
if (
!activeModelRef.current ||
(thread.title ?? thread.metadata?.title)?.trim() !== defaultThreadTitle
)
return
if (!activeModelRef.current) return
// Check model engine; we don't want to generate a title when it's not a local engine. remote model using first promp
if (
activeModelRef.current?.engine !== InferenceEngine.cortex &&

View File

@ -103,7 +103,7 @@ const ServerLogs = (props: ServerLogsProps) => {
<Button
theme="ghost"
variant="outline"
onClick={() => onRevealInFinder('Logs')}
onClick={() => onRevealInFinder('logs')}
>
<div className="flex items-center space-x-2">
<>

View File

@ -2,8 +2,9 @@
import { AIEngine, BaseExtension, ExtensionTypeEnum } from '@janhq/core'
import Extension from './Extension'
import { convertFileSrc } from '@tauri-apps/api/core'
import Extension from './Extension'
/**
* Manages the registration and retrieval of extensions.
*/
@ -123,33 +124,30 @@ export class ExtensionManager {
*/
async activateExtension(extension: Extension) {
// Import class
const extensionUrl = window.electronAPI
? extension.url
: extension.url.replace(
'extension://',
`${window.core?.api?.baseApiUrl ?? ''}/extensions/`
)
await import(/* webpackIgnore: true */ extensionUrl).then(
(extensionClass) => {
// Register class if it has a default export
if (
typeof extensionClass.default === 'function' &&
extensionClass.default.prototype
) {
this.register(
const extensionUrl = extension.url
await import(
/* webpackIgnore: true */ IS_TAURI
? convertFileSrc(extensionUrl)
: extensionUrl
).then((extensionClass) => {
// Register class if it has a default export
if (
typeof extensionClass.default === 'function' &&
extensionClass.default.prototype
) {
this.register(
extension.name,
new extensionClass.default(
extension.url,
extension.name,
new extensionClass.default(
extension.url,
extension.name,
extension.productName,
extension.active,
extension.description,
extension.version
)
extension.productName,
extension.active,
extension.description,
extension.version
)
}
)
}
)
})
}
/**
@ -158,7 +156,7 @@ export class ExtensionManager {
*/
async registerActive() {
// Get active extensions
const activeExtensions = await this.getActive()
const activeExtensions = (await this.getActive()) ?? []
// Activate all
await Promise.all(
activeExtensions.map((ext: Extension) => this.activateExtension(ext))

View File

@ -165,6 +165,7 @@ export const updateMessageAtom = atom(
id: string,
conversationId: string,
text: ThreadContent[],
metadata: Record<string, unknown> | undefined,
status: MessageStatus
) => {
const messages = get(chatMessages)[conversationId] ?? []
@ -172,6 +173,7 @@ export const updateMessageAtom = atom(
if (message) {
message.content = text
message.status = status
message.metadata = metadata
const updatedMessages = [...messages]
const newData: Record<string, ThreadMessage[]> = {
@ -192,6 +194,7 @@ export const updateMessageAtom = atom(
created_at: Date.now() / 1000,
completed_at: Date.now() / 1000,
object: 'thread.message',
metadata: metadata,
})
}
}

View File

@ -24,7 +24,7 @@ export const themesOptionsAtom = atomWithStorage<
export const selectedThemeIdAtom = atomWithStorage<string>(
THEME,
'',
'joi-light',
undefined,
{ getOnInit: true }
)
@ -36,7 +36,7 @@ export const themeDataAtom = atomWithStorage<Theme | undefined>(
)
export const reduceTransparentAtom = atomWithStorage<boolean>(
REDUCE_TRANSPARENT,
false,
true,
undefined,
{ getOnInit: true }
)

View File

@ -180,7 +180,7 @@ export const useCreateNewThread = () => {
updateThreadCallback(thread)
if (thread.assistants && thread.assistants?.length > 0) {
setActiveAssistant(thread.assistants[0])
updateAssistantCallback(thread.id, thread.assistants[0])
return updateAssistantCallback(thread.id, thread.assistants[0])
}
},
[

View File

@ -9,8 +9,6 @@ import { extensionManager } from '@/extension/ExtensionManager'
import { useCreateNewThread } from './useCreateNewThread'
import { Thread } from '@janhq/core/dist/types/types'
import { currentPromptAtom } from '@/containers/Providers/Jotai'
import { setActiveThreadIdAtom, deleteThreadStateAtom } from '@/helpers/atoms/Thread.atom'
import { deleteChatMessageAtom as deleteChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
// Mock the necessary dependencies
// Mock dependencies
jest.mock('jotai', () => ({
@ -44,6 +42,7 @@ describe('useDeleteThread', () => {
extensionManager.get = jest.fn().mockReturnValue({
deleteThread: mockDeleteThread,
getThreadAssistant: jest.fn().mockResolvedValue({}),
})
const { result } = renderHook(() => useDeleteThread())

View File

@ -38,12 +38,13 @@ export default function useDeleteThread() {
?.listMessages(threadId)
.catch(console.error)
if (messages) {
messages.forEach((message) => {
extensionManager
for (const message of messages) {
await extensionManager
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
?.deleteMessage(threadId, message.id)
.catch(console.error)
})
}
const thread = threads.find((e) => e.id === threadId)
if (thread) {
const updatedThread = {

View File

@ -20,9 +20,7 @@ jest.mock('@janhq/core', () => ({
EngineManager: {
instance: jest.fn().mockReturnValue({
get: jest.fn(),
engines: {
values: jest.fn().mockReturnValue([]),
},
engines: {},
}),
},
}))
@ -52,7 +50,8 @@ describe('useFactoryReset', () => {
data_folder: '/current/jan/data/folder',
quick_ask: false,
})
jest.spyOn(global, 'setTimeout')
// @ts-ignore
jest.spyOn(global, 'setTimeout').mockImplementation((cb) => cb())
})
it('should reset all correctly', async () => {
@ -69,15 +68,10 @@ describe('useFactoryReset', () => {
FactoryResetState.StoppingModel
)
expect(mockStopModel).toHaveBeenCalled()
expect(setTimeout).toHaveBeenCalledWith(expect.any(Function), 4000)
expect(mockSetFactoryResetState).toHaveBeenCalledWith(
FactoryResetState.DeletingData
)
expect(fs.rm).toHaveBeenCalledWith('/current/jan/data/folder')
expect(mockUpdateAppConfiguration).toHaveBeenCalledWith({
data_folder: '/default/jan/data/folder',
quick_ask: false,
})
expect(fs.rm).toHaveBeenCalledWith({ args: ['/current/jan/data/folder'] })
expect(mockSetFactoryResetState).toHaveBeenCalledWith(
FactoryResetState.ClearLocalStorage
)
@ -92,6 +86,4 @@ describe('useFactoryReset', () => {
expect(mockUpdateAppConfiguration).not.toHaveBeenCalled()
})
// Add more tests as needed for error cases, edge cases, etc.
})

View File

@ -39,18 +39,16 @@ export default function useFactoryReset() {
await stopModel()
await Promise.all(
EngineManager.instance()
.engines.values()
.map(async (engine) => {
await engine.onUnload()
})
Object.values(EngineManager.instance().engines).map(async (engine) => {
await engine.onUnload()
})
)
await new Promise((resolve) => setTimeout(resolve, 4000))
// 2: Delete the old jan data folder
setFactoryResetState(FactoryResetState.DeletingData)
await fs.rm(janDataFolderPath)
await fs.rm({ args: [janDataFolderPath] })
// 3: Set the default jan data folder
if (!keepCurrentFolder) {
@ -60,20 +58,19 @@ export default function useFactoryReset() {
quick_ask: appConfiguration?.quick_ask ?? false,
distinct_id: appConfiguration?.distinct_id,
}
await window.core?.api?.updateAppConfiguration(configuration)
await window.core?.api?.updateAppConfiguration({ configuration })
}
// Perform factory reset
await window.core?.api?.factoryReset()
// await window.core?.api?.factoryReset()
// 4: Clear app local storage
setFactoryResetState(FactoryResetState.ClearLocalStorage)
// reset the localStorage
localStorage.clear()
window.core = undefined
// 5: Relaunch the app
window.location.reload()
window.core.api.relaunch()
},
[defaultJanDataFolder, stopModel, setFactoryResetState]
)

View File

@ -4,7 +4,6 @@ import { fs, joinPath } from '@janhq/core'
import { useAtom, useAtomValue, useSetAtom } from 'jotai'
import { useLoadTheme } from './useLoadTheme'
import { janDataFolderPathAtom } from '@/helpers/atoms/AppConfig.atom'
import {
selectedThemeIdAtom,
themeDataAtom,
@ -27,8 +26,6 @@ describe('useLoadTheme', () => {
jest.clearAllMocks()
})
const mockJanDataFolderPath = '/mock/path'
const mockThemesPath = '/mock/path/themes'
const mockSelectedThemeId = 'joi-light'
const mockThemeData = {
id: 'joi-light',
@ -40,11 +37,17 @@ describe('useLoadTheme', () => {
}
it('should load theme and set variables', async () => {
const readTheme = jest.fn().mockResolvedValue("{}")
global.window.core = {
api: {
getThemes: () => ['joi-light', 'joi-dark'],
readTheme,
},
}
// Mock Jotai hooks
;(useAtomValue as jest.Mock).mockImplementation((atom) => {
switch (atom) {
case janDataFolderPathAtom:
return mockJanDataFolderPath
default:
return undefined
}
@ -72,15 +75,6 @@ describe('useLoadTheme', () => {
const mockSetTheme = jest.fn()
;(useTheme as jest.Mock).mockReturnValue({ setTheme: mockSetTheme })
// Mock window.electronAPI
Object.defineProperty(window, 'electronAPI', {
value: {
setNativeThemeLight: jest.fn(),
setNativeThemeDark: jest.fn(),
},
writable: true,
})
const { result } = renderHook(() => useLoadTheme())
await act(async () => {
@ -88,18 +82,11 @@ describe('useLoadTheme', () => {
})
// Assertions
expect(fs.readdirSync).toHaveBeenCalledWith(mockThemesPath)
expect(fs.readFileSync).toHaveBeenCalledWith(
`${mockThemesPath}/${mockSelectedThemeId}/theme.json`,
'utf-8'
)
expect(mockSetTheme).toHaveBeenCalledWith('light')
expect(window.electronAPI.setNativeThemeLight).toHaveBeenCalled()
expect(readTheme).toHaveBeenLastCalledWith({ themeName: 'joi-light' })
})
it('should set default theme if no selected theme', async () => {
// Mock Jotai hooks with empty selected theme
;(useAtomValue as jest.Mock).mockReturnValue(mockJanDataFolderPath)
;(useSetAtom as jest.Mock).mockReturnValue(jest.fn())
;(useAtom as jest.Mock).mockReturnValue(['', jest.fn()])
;(useAtom as jest.Mock).mockReturnValue([{}, jest.fn()])

View File

@ -2,13 +2,10 @@ import { useCallback, useEffect } from 'react'
import { useTheme } from 'next-themes'
import { fs, joinPath } from '@janhq/core'
import { useAtom, useAtomValue } from 'jotai'
import { useAtom } from 'jotai'
import cssVars from '@/utils/jsonToCssVariables'
import { janDataFolderPathAtom } from '@/helpers/atoms/AppConfig.atom'
import {
selectedThemeIdAtom,
themeDataAtom,
@ -18,7 +15,6 @@ import {
type NativeThemeProps = 'light' | 'dark'
export const useLoadTheme = () => {
const janDataFolderPath = useAtomValue(janDataFolderPathAtom)
const [themeOptions, setThemeOptions] = useAtom(themesOptionsAtom)
const [themeData, setThemeData] = useAtom(themeDataAtom)
const [selectedIdTheme, setSelectedIdTheme] = useAtom(selectedThemeIdAtom)
@ -26,12 +22,14 @@ export const useLoadTheme = () => {
const setNativeTheme = useCallback(
(nativeTheme: NativeThemeProps) => {
if (!window.electronAPI) return
if (nativeTheme === 'dark') {
window?.electronAPI?.setNativeThemeDark()
window?.core?.api?.setNativeThemeDark()
setTheme('dark')
localStorage.setItem('nativeTheme', 'dark')
} else {
window?.electronAPI?.setNativeThemeLight()
window?.core?.api?.setNativeThemeLight()
setTheme('light')
localStorage.setItem('nativeTheme', 'light')
}
@ -40,6 +38,7 @@ export const useLoadTheme = () => {
)
const applyTheme = (theme: Theme) => {
if (!theme.variables) return
const variables = cssVars(theme.variables)
const headTag = document.getElementsByTagName('head')[0]
const styleTag = document.createElement('style')
@ -48,45 +47,32 @@ export const useLoadTheme = () => {
}
const getThemes = useCallback(async () => {
if (!janDataFolderPath.length) return
const folderPath = await joinPath([janDataFolderPath, 'themes'])
const installedThemes = await fs.readdirSync(folderPath)
const installedThemes = await window.core.api.getThemes()
const themesOptions: { name: string; value: string }[] = installedThemes
.filter((x: string) => x !== '.DS_Store')
.map(async (x: string) => {
const y = await joinPath([`${folderPath}/${x}`, `theme.json`])
const c: Theme = JSON.parse(await fs.readFileSync(y, 'utf-8'))
return { name: c?.displayName, value: c.id }
const themesOptions: { name: string; value: string }[] =
installedThemes.map((x: string) => ({
name: x
.replace(/-/g, ' ')
.replace(/\b\w/g, (char) => char.toUpperCase()),
value: x,
}))
setThemeOptions(themesOptions)
if (!selectedIdTheme.length) return setSelectedIdTheme('joi-light')
const theme: Theme = JSON.parse(
await window.core.api.readTheme({
themeName: selectedIdTheme,
})
Promise.all(themesOptions).then((results) => {
setThemeOptions(results)
})
)
if (janDataFolderPath.length > 0) {
if (!selectedIdTheme.length) return setSelectedIdTheme('joi-light')
const filePath = await joinPath([
`${folderPath}/${selectedIdTheme}`,
`theme.json`,
])
const theme: Theme = JSON.parse(await fs.readFileSync(filePath, 'utf-8'))
setThemeData(theme)
setNativeTheme(theme.nativeTheme)
applyTheme(theme)
}
}, [
janDataFolderPath,
selectedIdTheme,
setNativeTheme,
setSelectedIdTheme,
setThemeData,
setThemeOptions,
])
setThemeData(theme)
setNativeTheme(theme.nativeTheme)
applyTheme(theme)
}, [selectedIdTheme])
const configureTheme = useCallback(async () => {
if (!themeData || !themeOptions) {
await getThemes()
getThemes()
} else {
applyTheme(themeData)
}
@ -95,11 +81,9 @@ export const useLoadTheme = () => {
useEffect(() => {
configureTheme()
}, [
configureTheme,
selectedIdTheme,
setNativeTheme,
setSelectedIdTheme,
themeData?.nativeTheme,
])
}, [themeData])
useEffect(() => {
getThemes()
}, [])
}

Some files were not shown because too many files have changed in this diff Show More