Merge pull request #5213 from menloresearch/release/v0.5.18

Sync Release/v0.5.18 into Development branch
This commit is contained in:
Louis 2025-06-13 09:49:37 +07:00 committed by GitHub
commit e586f2387e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
892 changed files with 33391 additions and 52255 deletions

28
.github/scripts/electron-checksum.py vendored Normal file
View File

@ -0,0 +1,28 @@
import hashlib
import base64
import sys
def hash_file(file_path):
# Create a SHA-512 hash object
sha512 = hashlib.sha512()
# Read and update the hash object with the content of the file
with open(file_path, 'rb') as f:
while True:
data = f.read(1024 * 1024) # Read in 1 MB chunks
if not data:
break
sha512.update(data)
# Obtain the hash result and encode it in base64
hash_base64 = base64.b64encode(sha512.digest()).decode('utf-8')
return hash_base64
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: python3 script.py <file_path>")
sys.exit(1)
file_path = sys.argv[1]
hash_base64_output = hash_file(file_path)
print(hash_base64_output)

View File

Before

Width:  |  Height:  |  Size: 49 KiB

After

Width:  |  Height:  |  Size: 49 KiB

View File

Before

Width:  |  Height:  |  Size: 49 KiB

After

Width:  |  Height:  |  Size: 49 KiB

63
.github/scripts/rename-tauri-app.sh vendored Normal file
View File

@ -0,0 +1,63 @@
#!/bin/bash
# Check if the correct number of arguments is provided
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <path_to_json_input_file> <channel>"
exit 1
fi
INPUT_JSON_FILE="$1"
CHANNEL="$2"
if [ "$CHANNEL" == "nightly" ]; then
UPDATER="latest"
else
UPDATER="beta"
fi
# Check if the input file exists
if [ ! -f "$INPUT_JSON_FILE" ]; then
echo "Input file not found: $INPUT_JSON_FILE"
exit 1
fi
# Use jq to transform the content
jq --arg channel "$CHANNEL" --arg updater "$UPDATER" '
.productName = "Jan-\($channel)" |
.identifier = "jan-\($channel).ai.app"
' "$INPUT_JSON_FILE" > ./tauri.conf.json.tmp
cat ./tauri.conf.json.tmp
rm $INPUT_JSON_FILE
mv ./tauri.conf.json.tmp $INPUT_JSON_FILE
# Update Info.plist if it exists
INFO_PLIST_PATH="./src-tauri/Info.plist"
if [ -f "$INFO_PLIST_PATH" ]; then
echo "Updating Info.plist..."
# Replace jan.ai.app with jan-{channel}.ai.app
sed -i '' "s|jan\.ai\.app|jan-${CHANNEL}.ai.app|g" "$INFO_PLIST_PATH"
# Replace <string>jan</string> with <string>jan-{channel}</string>
sed -i '' "s|<string>jan</string>|<string>jan-${CHANNEL}</string>|g" "$INFO_PLIST_PATH"
echo "Info.plist updated"
cat ./src-tauri/Info.plist
fi
# Update the layout file
# LAYOUT_FILE_PATH="web/app/layout.tsx"
# if [ ! -f "$LAYOUT_FILE_PATH" ]; then
# echo "File does not exist: $LAYOUT_FILE_PATH"
# exit 1
# fi
# Perform the replacements
# sed -i -e "s#Jan#Jan-$CHANNEL#g" "$LAYOUT_FILE_PATH"
# Notify completion
# echo "File has been updated: $LAYOUT_FILE_PATH"

View File

@ -1,86 +0,0 @@
name: Electron Builder - Beta Build
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+-beta"]
jobs:
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
build-macos:
uses: ./.github/workflows/template-build-macos.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
beta: true
nightly: false
cortex_api_port: "39271"
build-windows-x64:
uses: ./.github/workflows/template-build-windows-x64.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
beta: true
nightly: false
cortex_api_port: "39271"
build-linux-x64:
uses: ./.github/workflows/template-build-linux-x64.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
beta: true
nightly: false
cortex_api_port: "39271"
sync-temp-to-latest:
needs: [build-macos, build-windows-x64, build-linux-x64]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: Sync temp to latest
run: |
# sync temp-beta to beta by copy files that are different or new
aws s3 sync "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/" "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/beta/"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: "true"
noti-discord-and-update-url-readme:
needs: [build-macos, get-update-version, build-windows-x64, build-linux-x64, sync-temp-to-latest]
runs-on: ubuntu-latest
steps:
- name: Set version to environment variable
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
VERSION="${VERSION#v}"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Notify Discord
uses: Ilshidur/action-discord@master
with:
args: |
Jan-beta App version {{ VERSION }}, has been released, use the following links to download the app with faster speed or visit the Github release page for more information:
- Windows: https://delta.jan.ai/beta/jan-beta-win-x64-{{ VERSION }}.exe
- macOS Universal: https://delta.jan.ai/beta/jan-beta-mac-universal-{{ VERSION }}.dmg
- Linux Deb: https://delta.jan.ai/beta/jan-beta-linux-amd64-{{ VERSION }}.deb
- Linux AppImage: https://delta.jan.ai/beta/jan-beta-linux-x86_64-{{ VERSION }}.AppImage
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_JAN_BETA }}

View File

@ -12,8 +12,9 @@ on:
- none
- aws-s3
default: none
pull_request_review:
types: [submitted]
pull_request:
branches:
- release/**
jobs:
set-public-provider:
@ -47,48 +48,84 @@ jobs:
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
build-macos:
uses: ./.github/workflows/template-build-macos.yml
needs: [get-update-version, set-public-provider]
build-tauri-macos:
uses: ./.github/workflows/template-tauri-build-macos.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
nightly: true
beta: false
channel: nightly
cortex_api_port: "39261"
build-windows-x64:
uses: ./.github/workflows/template-build-windows-x64.yml
build-tauri-windows-x64:
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
nightly: true
beta: false
channel: nightly
cortex_api_port: "39261"
build-linux-x64:
uses: ./.github/workflows/template-build-linux-x64.yml
build-tauri-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
nightly: true
beta: false
channel: nightly
cortex_api_port: "39261"
sync-temp-to-latest:
needs: [set-public-provider, build-windows-x64, build-linux-x64, build-macos]
needs: [get-update-version, set-public-provider, build-tauri-windows-x64, build-tauri-linux-x64, build-tauri-macos]
runs-on: ubuntu-latest
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: create latest.json file
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
LINUX_SIGNATURE="${{ needs.build-tauri-linux-x64.outputs.APPIMAGE_SIG }}"
LINUX_URL="https://delta.jan.ai/nightly/${{ needs.build-tauri-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
WINDOWS_SIGNATURE="${{ needs.build-tauri-windows-x64.outputs.WIN_SIG }}"
WINDOWS_URL="https://delta.jan.ai/nightly/${{ needs.build-tauri-windows-x64.outputs.FILE_NAME }}"
DARWIN_SIGNATURE="${{ needs.build-tauri-macos.outputs.MAC_UNIVERSAL_SIG }}"
DARWIN_URL="https://delta.jan.ai/nightly/Jan-nightly_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
jq --arg version "$VERSION" \
--arg pub_date "$PUB_DATE" \
--arg linux_signature "$LINUX_SIGNATURE" \
--arg linux_url "$LINUX_URL" \
--arg windows_signature "$WINDOWS_SIGNATURE" \
--arg windows_url "$WINDOWS_URL" \
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
--arg darwin_arm_url "$DARWIN_URL" \
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
--arg darwin_amd_url "$DARWIN_URL" \
'.version = $version
| .pub_date = $pub_date
| .platforms["linux-x86_64"].signature = $linux_signature
| .platforms["linux-x86_64"].url = $linux_url
| .platforms["windows-x86_64"].signature = $windows_signature
| .platforms["windows-x86_64"].url = $windows_url
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
| .platforms["darwin-aarch64"].url = $darwin_arm_url
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
src-tauri/latest.json.template > latest.json
cat latest.json
- name: Sync temp to latest
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
run: |
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/latest.json
aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
@ -97,7 +134,14 @@ jobs:
AWS_EC2_METADATA_DISABLED: "true"
noti-discord-nightly-and-update-url-readme:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
needs: [
build-tauri-macos,
build-tauri-windows-x64,
build-tauri-linux-x64,
get-update-version,
set-public-provider,
sync-temp-to-latest
]
secrets: inherit
if: github.event_name == 'schedule'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
@ -108,7 +152,14 @@ jobs:
new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-pre-release-and-update-url-readme:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
needs: [
build-tauri-macos,
build-tauri-windows-x64,
build-tauri-linux-x64,
get-update-version,
set-public-provider,
sync-temp-to-latest
]
secrets: inherit
if: github.event_name == 'push'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
@ -119,7 +170,14 @@ jobs:
new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-manual-and-update-url-readme:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
needs: [
build-tauri-macos,
build-tauri-windows-x64,
build-tauri-linux-x64,
get-update-version,
set-public-provider,
sync-temp-to-latest
]
secrets: inherit
if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
@ -130,21 +188,28 @@ jobs:
new_version: ${{ needs.get-update-version.outputs.new_version }}
comment-pr-build-url:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
runs-on: ubuntu-latest
if: github.event_name == 'pull_request_review'
steps:
- name: Set up GitHub CLI
run: |
curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz
sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/
# comment-pr-build-url:
# needs: [
# build-tauri-macos,
# build-tauri-windows-x64,
# build-tauri-linux-x64,
# get-update-version,
# set-public-provider,
# sync-temp-to-latest
# ]
# runs-on: ubuntu-latest
# if: github.event_name == 'pull_request_review'
# steps:
# - name: Set up GitHub CLI
# run: |
# curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz
# sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/
- name: Comment build URL on PR
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_URL=${{ github.event.pull_request.html_url }}
RUN_ID=${{ github.run_id }}
COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})."
gh pr comment $PR_URL --body "$COMMENT"
# - name: Comment build URL on PR
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# run: |
# PR_URL=${{ github.event.pull_request.html_url }}
# RUN_ID=${{ github.run_id }}
# COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})."
# gh pr comment $PR_URL --body "$COMMENT"

View File

@ -33,8 +33,8 @@ jobs:
draft: true
prerelease: false
build-macos:
uses: ./.github/workflows/template-build-macos.yml
build-electron-macos:
uses: ./.github/workflows/template-electron-build-macos.yml
secrets: inherit
needs: [get-update-version]
with:
@ -44,8 +44,8 @@ jobs:
nightly: false
new_version: ${{ needs.get-update-version.outputs.new_version }}
build-windows-x64:
uses: ./.github/workflows/template-build-windows-x64.yml
build-electron-windows-x64:
uses: ./.github/workflows/template-electron-build-windows-x64.yml
secrets: inherit
needs: [get-update-version]
with:
@ -55,8 +55,8 @@ jobs:
nightly: false
new_version: ${{ needs.get-update-version.outputs.new_version }}
build-linux-x64:
uses: ./.github/workflows/template-build-linux-x64.yml
build-electron-linux-x64:
uses: ./.github/workflows/template-electron-build-linux-x64.yml
secrets: inherit
needs: [get-update-version]
with:
@ -65,9 +65,49 @@ jobs:
beta: false
nightly: false
new_version: ${{ needs.get-update-version.outputs.new_version }}
# build-tauri-macos:
# uses: ./.github/workflows/template-tauri-build-macos.yml
# secrets: inherit
# needs: [get-update-version, create-draft-release]
# with:
# ref: ${{ github.ref }}
# public_provider: github
# channel: stable
# new_version: ${{ needs.get-update-version.outputs.new_version }}
# upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
# build-tauri-windows-x64:
# uses: ./.github/workflows/template-tauri-build-windows-x64.yml
# secrets: inherit
# needs: [get-update-version, create-draft-release]
# with:
# ref: ${{ github.ref }}
# public_provider: github
# channel: stable
# new_version: ${{ needs.get-update-version.outputs.new_version }}
# upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
# build-tauri-linux-x64:
# uses: ./.github/workflows/template-tauri-build-linux-x64.yml
# secrets: inherit
# needs: [get-update-version, create-draft-release]
# with:
# ref: ${{ github.ref }}
# public_provider: github
# channel: stable
# new_version: ${{ needs.get-update-version.outputs.new_version }}
# upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
update_release_draft:
needs: [build-macos, build-windows-x64, build-linux-x64]
needs: [
build-electron-windows-x64,
build-electron-linux-x64,
build-electron-macos,
build-tauri-windows-x64,
build-tauri-linux-x64,
build-tauri-macos
]
permissions:
# write permission is required to create a github release
contents: write

View File

@ -34,6 +34,8 @@ on:
- 'Makefile'
- 'extensions/**'
- 'core/**'
- 'src-tauri/**'
- 'web-app/**'
- '!README.md'
jobs:
@ -53,7 +55,6 @@ jobs:
run: |
make config-yarn
yarn
yarn build:joi
yarn build:core
- name: Run test coverage
@ -305,52 +306,53 @@ jobs:
path: electron/playwright-report/
retention-days: 2
coverage-check:
runs-on: ubuntu-latest
needs: base_branch_cov
if: (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
fetch-depth: 0
# coverage-check:
# runs-on: ubuntu-latest
# needs: base_branch_cov
# continue-on-error: true
# if: (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
# steps:
# - name: Getting the repo
# uses: actions/checkout@v3
# with:
# fetch-depth: 0
- name: Installing node
uses: actions/setup-node@v3
with:
node-version: 20
# - name: Installing node
# uses: actions/setup-node@v3
# with:
# node-version: 20
- name: Install yarn
run: npm install -g yarn
# - name: Install yarn
# run: npm install -g yarn
- name: 'Cleanup cache'
continue-on-error: true
run: |
rm -rf ~/jan
make clean
# - name: 'Cleanup cache'
# continue-on-error: true
# run: |
# rm -rf ~/jan
# make clean
- name: Download code coverage report from base branch
uses: actions/download-artifact@v4
with:
name: ref-lcov.info
# - name: Download code coverage report from base branch
# uses: actions/download-artifact@v4
# with:
# name: ref-lcov.info
- name: Linter and test coverage
run: |
export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
echo -e "Display ID: $DISPLAY"
make lint
yarn build:test
yarn test:coverage
# - name: Linter and test coverage
# run: |
# export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
# echo -e "Display ID: $DISPLAY"
# make lint
# yarn build:test
# yarn test:coverage
- name: Generate Code Coverage report
id: code-coverage
uses: barecheck/code-coverage-action@v1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
lcov-file: './coverage/lcov.info'
base-lcov-file: './lcov.info'
send-summary-comment: true
show-annotations: 'warning'
# - name: Generate Code Coverage report
# id: code-coverage
# uses: barecheck/code-coverage-action@v1
# with:
# github-token: ${{ secrets.GITHUB_TOKEN }}
# lcov-file: './coverage/lcov.info'
# base-lcov-file: './lcov.info'
# send-summary-comment: true
# show-annotations: 'warning'
test-on-ubuntu-pr-target:
runs-on: ubuntu-latest

View File

@ -0,0 +1,156 @@
name: Tauri Builder - Beta Build
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+-beta"]
jobs:
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
create-draft-release:
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
outputs:
upload_url: ${{ steps.create_release.outputs.upload_url }}
version: ${{ steps.get_version.outputs.version }}
permissions:
contents: write
steps:
- name: Extract tag name without v prefix
id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
env:
GITHUB_REF: ${{ github.ref }}
- name: Create Draft Release
id: create_release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ github.ref_name }}
token: ${{ secrets.GITHUB_TOKEN }}
name: "${{ env.VERSION }}"
draft: true
prerelease: false
generate_release_notes: true
build-macos:
uses: ./.github/workflows/template-tauri-build-macos.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: beta
cortex_api_port: "39271"
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
build-windows-x64:
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: beta
cortex_api_port: "39271"
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
build-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: beta
cortex_api_port: "39271"
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
sync-temp-to-latest:
needs: [create-draft-release, get-update-version, build-macos, build-windows-x64, build-linux-x64]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: create latest.json file
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
LINUX_URL="https://delta.jan.ai/beta/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
WINDOWS_URL="https://delta.jan.ai/beta/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
DARWIN_URL="https://delta.jan.ai/beta/Jan-beta_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
jq --arg version "$VERSION" \
--arg pub_date "$PUB_DATE" \
--arg linux_signature "$LINUX_SIGNATURE" \
--arg linux_url "$LINUX_URL" \
--arg windows_signature "$WINDOWS_SIGNATURE" \
--arg windows_url "$WINDOWS_URL" \
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
--arg darwin_arm_url "$DARWIN_URL" \
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
--arg darwin_amd_url "$DARWIN_URL" \
'.version = $version
| .pub_date = $pub_date
| .platforms["linux-x86_64"].signature = $linux_signature
| .platforms["linux-x86_64"].url = $linux_url
| .platforms["windows-x86_64"].signature = $windows_signature
| .platforms["windows-x86_64"].url = $windows_url
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
| .platforms["darwin-aarch64"].url = $darwin_arm_url
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
src-tauri/latest.json.template > latest.json
cat latest.json
- name: Sync temp to latest
run: |
# sync temp-beta to beta by copy files that are different or new
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/latest.json
aws s3 sync "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/" "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/beta/"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: "true"
- name: Upload release assert if public provider is github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
asset_path: ./latest.json
asset_name: latest.json
asset_content_type: text/json
noti-discord-and-update-url-readme:
needs: [build-macos, get-update-version, build-windows-x64, build-linux-x64, sync-temp-to-latest]
runs-on: ubuntu-latest
steps:
- name: Set version to environment variable
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
VERSION="${VERSION#v}"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Notify Discord
uses: Ilshidur/action-discord@master
with:
args: |
Jan-beta App version {{ VERSION }}, has been released, use the following links to download the app with faster speed or visit the Github release page for more information:
- Windows: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_x64-setup.exe
- macOS Universal: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_universal.dmg
- Linux Deb: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_amd64.deb
- Linux AppImage: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_amd64.AppImage
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_JAN_BETA }}

View File

@ -1,225 +1,225 @@
name: Tauri Builder - Nightly / Manual
on:
schedule:
- cron: '0 20 * * 1,2,3' # At 8 PM UTC on Monday, Tuesday, and Wednesday which is 3 AM UTC+7 Tuesday, Wednesday, and Thursday
workflow_dispatch:
inputs:
public_provider:
type: choice
description: 'Public Provider'
options:
- none
- aws-s3
default: none
pull_request:
branches:
- release/**
jobs:
set-public-provider:
runs-on: ubuntu-latest
outputs:
public_provider: ${{ steps.set-public-provider.outputs.public_provider }}
ref: ${{ steps.set-public-provider.outputs.ref }}
steps:
- name: Set public provider
id: set-public-provider
run: |
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo "::set-output name=public_provider::${{ github.event.inputs.public_provider }}"
echo "::set-output name=ref::${{ github.ref }}"
else
if [ "${{ github.event_name }}" == "schedule" ]; then
echo "::set-output name=public_provider::aws-s3"
echo "::set-output name=ref::refs/heads/dev"
elif [ "${{ github.event_name }}" == "push" ]; then
echo "::set-output name=public_provider::aws-s3"
echo "::set-output name=ref::${{ github.ref }}"
elif [ "${{ github.event_name }}" == "pull_request_review" ]; then
echo "::set-output name=public_provider::none"
echo "::set-output name=ref::${{ github.ref }}"
else
echo "::set-output name=public_provider::none"
echo "::set-output name=ref::${{ github.ref }}"
fi
fi
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
build-macos:
uses: ./.github/workflows/template-tauri-build-macos.yml
needs: [get-update-version, set-public-provider]
secrets: inherit
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
cortex_api_port: '39261'
build-windows-x64:
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
cortex_api_port: '39261'
build-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
cortex_api_port: '39261'
sync-temp-to-latest:
needs:
[
get-update-version,
set-public-provider,
build-windows-x64,
build-linux-x64,
build-macos,
]
runs-on: ubuntu-latest
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: create latest.json file
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
LINUX_URL="https://delta.jan.ai/nightly/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
WINDOWS_URL="https://delta.jan.ai/nightly/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
DARWIN_URL="https://delta.jan.ai/nightly/Jan-nightly_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
jq --arg version "$VERSION" \
--arg pub_date "$PUB_DATE" \
--arg linux_signature "$LINUX_SIGNATURE" \
--arg linux_url "$LINUX_URL" \
--arg windows_signature "$WINDOWS_SIGNATURE" \
--arg windows_url "$WINDOWS_URL" \
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
--arg darwin_arm_url "$DARWIN_URL" \
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
--arg darwin_amd_url "$DARWIN_URL" \
'.version = $version
| .pub_date = $pub_date
| .platforms["linux-x86_64"].signature = $linux_signature
| .platforms["linux-x86_64"].url = $linux_url
| .platforms["windows-x86_64"].signature = $windows_signature
| .platforms["windows-x86_64"].url = $windows_url
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
| .platforms["darwin-aarch64"].url = $darwin_arm_url
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
src-tauri/latest.json.template > latest.json
cat latest.json
- name: Sync temp to latest
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
run: |
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/latest.json
aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: 'true'
noti-discord-nightly-and-update-url-readme:
needs:
[
build-macos,
build-windows-x64,
build-linux-x64,
get-update-version,
set-public-provider,
sync-temp-to-latest,
]
secrets: inherit
if: github.event_name == 'schedule'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Nightly
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-pre-release-and-update-url-readme:
needs:
[
build-macos,
build-windows-x64,
build-linux-x64,
get-update-version,
set-public-provider,
sync-temp-to-latest,
]
secrets: inherit
if: github.event_name == 'push'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Pre-release
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-manual-and-update-url-readme:
needs:
[
build-macos,
build-windows-x64,
build-linux-x64,
get-update-version,
set-public-provider,
sync-temp-to-latest,
]
secrets: inherit
if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Manual
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
comment-pr-build-url:
needs:
[
build-macos,
build-windows-x64,
build-linux-x64,
get-update-version,
set-public-provider,
sync-temp-to-latest,
]
runs-on: ubuntu-latest
if: github.event_name == 'pull_request_review'
steps:
- name: Set up GitHub CLI
run: |
curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz
sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/
- name: Comment build URL on PR
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_URL=${{ github.event.pull_request.html_url }}
RUN_ID=${{ github.run_id }}
COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})."
gh pr comment $PR_URL --body "$COMMENT"
name: Tauri Builder - Nightly / Manual
on:
schedule:
- cron: '0 20 * * 1,2,3' # At 8 PM UTC on Monday, Tuesday, and Wednesday which is 3 AM UTC+7 Tuesday, Wednesday, and Thursday
workflow_dispatch:
inputs:
public_provider:
type: choice
description: 'Public Provider'
options:
- none
- aws-s3
default: none
pull_request:
branches:
- release/**
jobs:
set-public-provider:
runs-on: ubuntu-latest
outputs:
public_provider: ${{ steps.set-public-provider.outputs.public_provider }}
ref: ${{ steps.set-public-provider.outputs.ref }}
steps:
- name: Set public provider
id: set-public-provider
run: |
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo "::set-output name=public_provider::${{ github.event.inputs.public_provider }}"
echo "::set-output name=ref::${{ github.ref }}"
else
if [ "${{ github.event_name }}" == "schedule" ]; then
echo "::set-output name=public_provider::aws-s3"
echo "::set-output name=ref::refs/heads/dev"
elif [ "${{ github.event_name }}" == "push" ]; then
echo "::set-output name=public_provider::aws-s3"
echo "::set-output name=ref::${{ github.ref }}"
elif [ "${{ github.event_name }}" == "pull_request_review" ]; then
echo "::set-output name=public_provider::none"
echo "::set-output name=ref::${{ github.ref }}"
else
echo "::set-output name=public_provider::none"
echo "::set-output name=ref::${{ github.ref }}"
fi
fi
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
build-macos:
uses: ./.github/workflows/template-tauri-build-macos.yml
needs: [get-update-version, set-public-provider]
secrets: inherit
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
cortex_api_port: '39261'
build-windows-x64:
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
cortex_api_port: '39261'
build-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, set-public-provider]
with:
ref: ${{ needs.set-public-provider.outputs.ref }}
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
new_version: ${{ needs.get-update-version.outputs.new_version }}
channel: nightly
cortex_api_port: '39261'
sync-temp-to-latest:
needs:
[
get-update-version,
set-public-provider,
build-windows-x64,
build-linux-x64,
build-macos,
]
runs-on: ubuntu-latest
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: create latest.json file
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
LINUX_URL="https://delta.jan.ai/nightly/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
WINDOWS_URL="https://delta.jan.ai/nightly/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
DARWIN_URL="https://delta.jan.ai/nightly/Jan-nightly_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
jq --arg version "$VERSION" \
--arg pub_date "$PUB_DATE" \
--arg linux_signature "$LINUX_SIGNATURE" \
--arg linux_url "$LINUX_URL" \
--arg windows_signature "$WINDOWS_SIGNATURE" \
--arg windows_url "$WINDOWS_URL" \
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
--arg darwin_arm_url "$DARWIN_URL" \
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
--arg darwin_amd_url "$DARWIN_URL" \
'.version = $version
| .pub_date = $pub_date
| .platforms["linux-x86_64"].signature = $linux_signature
| .platforms["linux-x86_64"].url = $linux_url
| .platforms["windows-x86_64"].signature = $windows_signature
| .platforms["windows-x86_64"].url = $windows_url
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
| .platforms["darwin-aarch64"].url = $darwin_arm_url
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
src-tauri/latest.json.template > latest.json
cat latest.json
- name: Sync temp to latest
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
run: |
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/latest.json
aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: 'true'
noti-discord-nightly-and-update-url-readme:
needs:
[
build-macos,
build-windows-x64,
build-linux-x64,
get-update-version,
set-public-provider,
sync-temp-to-latest,
]
secrets: inherit
if: github.event_name == 'schedule'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Nightly
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-pre-release-and-update-url-readme:
needs:
[
build-macos,
build-windows-x64,
build-linux-x64,
get-update-version,
set-public-provider,
sync-temp-to-latest,
]
secrets: inherit
if: github.event_name == 'push'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Pre-release
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-manual-and-update-url-readme:
needs:
[
build-macos,
build-windows-x64,
build-linux-x64,
get-update-version,
set-public-provider,
sync-temp-to-latest,
]
secrets: inherit
if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Manual
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
comment-pr-build-url:
needs:
[
build-macos,
build-windows-x64,
build-linux-x64,
get-update-version,
set-public-provider,
sync-temp-to-latest,
]
runs-on: ubuntu-latest
if: github.event_name == 'pull_request_review'
steps:
- name: Set up GitHub CLI
run: |
curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz
sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/
- name: Comment build URL on PR
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_URL=${{ github.event.pull_request.html_url }}
RUN_ID=${{ github.run_id }}
COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})."
gh pr comment $PR_URL --body "$COMMENT"

145
.github/workflows/jan-tauri-build.yaml vendored Normal file
View File

@ -0,0 +1,145 @@
name: Tauri Builder - Tag
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+"]
jobs:
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
create-draft-release:
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
outputs:
upload_url: ${{ steps.create_release.outputs.upload_url }}
version: ${{ steps.get_version.outputs.version }}
permissions:
contents: write
steps:
- name: Extract tag name without v prefix
id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
env:
GITHUB_REF: ${{ github.ref }}
- name: Create Draft Release
id: create_release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ github.ref_name }}
token: ${{ secrets.GITHUB_TOKEN }}
name: "${{ env.VERSION }}"
draft: true
prerelease: false
build-macos:
uses: ./.github/workflows/template-tauri-build-macos.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
channel: stable
new_version: ${{ needs.get-update-version.outputs.new_version }}
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
build-windows-x64:
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
channel: stable
new_version: ${{ needs.get-update-version.outputs.new_version }}
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
build-linux-x64:
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
secrets: inherit
needs: [get-update-version, create-draft-release]
with:
ref: ${{ github.ref }}
public_provider: github
channel: stable
new_version: ${{ needs.get-update-version.outputs.new_version }}
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
sync-temp-to-latest:
needs: [create-draft-release, get-update-version, build-macos, build-windows-x64, build-linux-x64]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
- name: create latest.json file
run: |
VERSION=${{ needs.get-update-version.outputs.new_version }}
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
LINUX_URL="https://github.com/menloresearch/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
WINDOWS_URL="https://github.com/menloresearch/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
DARWIN_URL="https://github.com/menloresearch/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-macos.outputs.TAR_NAME }}"
jq --arg version "$VERSION" \
--arg pub_date "$PUB_DATE" \
--arg linux_signature "$LINUX_SIGNATURE" \
--arg linux_url "$LINUX_URL" \
--arg windows_signature "$WINDOWS_SIGNATURE" \
--arg windows_url "$WINDOWS_URL" \
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
--arg darwin_arm_url "$DARWIN_URL" \
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
--arg darwin_amd_url "$DARWIN_URL" \
'.version = $version
| .pub_date = $pub_date
| .platforms["linux-x86_64"].signature = $linux_signature
| .platforms["linux-x86_64"].url = $linux_url
| .platforms["windows-x86_64"].signature = $windows_signature
| .platforms["windows-x86_64"].url = $windows_url
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
| .platforms["darwin-aarch64"].url = $darwin_arm_url
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
src-tauri/latest.json.template > latest.json
cat latest.json
- name: Upload release assert if public provider is github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
asset_path: ./latest.json
asset_name: latest.json
asset_content_type: text/json
update_release_draft:
needs: [build-macos, build-windows-x64, build-linux-x64]
permissions:
# write permission is required to create a github release
contents: write
# write permission is required for autolabeler
# otherwise, read permission is required at least
pull-requests: write
runs-on: ubuntu-latest
steps:
# (Optional) GitHub Enterprise requires GHE_HOST variable set
#- name: Set GHE_HOST
# run: |
# echo "GHE_HOST=${GITHUB_SERVER_URL##https:\/\/}" >> $GITHUB_ENV
# Drafts your next Release notes as Pull Requests are merged into "master"
- uses: release-drafter/release-drafter@v5
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
# with:
# config-name: my-config.yml
# disable-autolabeler: true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,127 +0,0 @@
name: Nightly Update cortex cpp
on:
schedule:
- cron: '30 19 * * 1-5' # At 01:30 on every day-of-week from Monday through Friday UTC +7
workflow_dispatch:
jobs:
update-submodule:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
actions: write
outputs:
pr_number: ${{ steps.check-update.outputs.pr_number }}
pr_created: ${{ steps.check-update.outputs.pr_created }}
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
submodules: recursive
ref: dev
fetch-depth: 0
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
- name: Configure Git
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
- name: Update submodule to latest release
id: check-update
env:
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
run: |
curl -s https://api.github.com/repos/menloresearch/cortex/releases > /tmp/github_api_releases.json
latest_prerelease_name=$(cat /tmp/github_api_releases.json | jq -r '.[] | select(.prerelease) | .name' | head -n 1)
get_asset_count() {
local version_name=$1
cat /tmp/github_api_releases.json | jq -r --arg version_name "$version_name" '.[] | select(.name == $version_name) | .assets | length'
}
cortex_cpp_version_file_path="extensions/inference-nitro-extension/bin/version.txt"
current_version_name=$(cat "$cortex_cpp_version_file_path" | head -n 1)
current_version_asset_count=$(get_asset_count "$current_version_name")
latest_prerelease_asset_count=$(get_asset_count "$latest_prerelease_name")
if [ "$current_version_name" = "$latest_prerelease_name" ]; then
echo "cortex cpp remote repo doesn't have update today, skip update cortex.cpp for today nightly build"
echo "::set-output name=pr_created::false"
exit 0
fi
if [ "$current_version_asset_count" != "$latest_prerelease_asset_count" ]; then
echo "Latest prerelease version has different number of assets, somethink went wrong, skip update cortex.cpp for today nightly build"
echo "::set-output name=pr_created::false"
exit 1
fi
echo $latest_prerelease_name > $cortex_cpp_version_file_path
echo "Updated version from $current_version_name to $latest_prerelease_name."
echo "::set-output name=pr_created::true"
git add -f $cortex_cpp_version_file_path
git commit -m "Update cortex cpp nightly to version $latest_prerelease_name"
branch_name="update-nightly-$(date +'%Y-%m-%d-%H-%M')"
git checkout -b $branch_name
git push origin $branch_name
pr_title="Update cortex cpp nightly to version $latest_prerelease_name"
pr_body="This PR updates the Update cortex cpp nightly to version $latest_prerelease_name"
gh pr create --title "$pr_title" --body "$pr_body" --head $branch_name --base dev --reviewer Van-QA
pr_number=$(gh pr list --head $branch_name --json number --jq '.[0].number')
echo "::set-output name=pr_number::$pr_number"
check-and-merge-pr:
needs: update-submodule
if: needs.update-submodule.outputs.pr_created == 'true'
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
submodules: recursive
fetch-depth: 0
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
- name: Wait for CI to pass
env:
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
run: |
pr_number=${{ needs.update-submodule.outputs.pr_number }}
while true; do
ci_completed=$(gh pr checks $pr_number --json completedAt --jq '.[].completedAt')
if echo "$ci_completed" | grep -q "0001-01-01T00:00:00Z"; then
echo "CI is still running, waiting..."
sleep 60
else
echo "CI has completed, checking states..."
ci_states=$(gh pr checks $pr_number --json state --jq '.[].state')
if echo "$ci_states" | grep -vqE "SUCCESS|SKIPPED"; then
echo "CI failed, exiting..."
exit 1
else
echo "CI passed, merging PR..."
break
fi
fi
done
- name: Merge the PR
env:
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
run: |
pr_number=${{ needs.update-submodule.outputs.pr_number }}
gh pr merge $pr_number --merge --admin

View File

@ -1,10 +1,9 @@
name: Publish core Package to npmjs
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+-core"]
paths: ["core/**", ".github/workflows/publish-npm-core.yml"]
pull_request:
paths: ["core/**", ".github/workflows/publish-npm-core.yml"]
tags: ['v[0-9]+.[0-9]+.[0-9]+-core']
paths: ['core/**', '.github/workflows/publish-npm-core.yml']
workflow_dispatch:
jobs:
build-and-publish-plugins:
environment: production
@ -12,7 +11,7 @@ jobs:
steps:
- uses: actions/checkout@v4
with:
fetch-depth: "0"
fetch-depth: '0'
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
- name: Install jq
@ -24,7 +23,7 @@ jobs:
env:
GITHUB_REF: ${{ github.ref }}
- name: "Get Semantic Version from tag"
- name: 'Get Semantic Version from tag'
if: github.event_name == 'push'
run: |
# Get the tag from the event
@ -42,8 +41,8 @@ jobs:
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@v3
with:
node-version: "20.x"
registry-url: "https://registry.npmjs.org"
node-version: '20.x'
registry-url: 'https://registry.npmjs.org'
- run: cd core && corepack enable && corepack prepare yarn@4.5.3 --activate && yarn --version && yarn config set -H enableImmutableInstalls false && yarn install && yarn build

View File

@ -1,53 +0,0 @@
name: Publish joi Package to npmjs
on:
push:
tags: ["v[0-9]+.[0-9]+.[0-9]+-joi"]
paths: ["joi/**", ".github/workflows/publish-npm-joi.yml"]
pull_request:
paths: ["joi/**", ".github/workflows/publish-npm-joi.yml"]
jobs:
build-and-publish-plugins:
environment: production
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: "0"
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Extract tag name without v prefix
id: get_version
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
env:
GITHUB_REF: ${{ github.ref }}
- name: "Get Semantic Version from tag"
if: github.event_name == 'push'
run: |
# Get the tag from the event
tag=${GITHUB_REF#refs/tags/v}
# remove the -joi suffix
new_version=$(echo $tag | sed -n 's/-joi//p')
echo $new_version
# Replace the old version with the new version in package.json
jq --arg version "$new_version" '.version = $version' joi/package.json > /tmp/package.json && mv /tmp/package.json joi/package.json
# Print the new version
echo "Updated package.json version to: $new_version"
cat joi/package.json
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@v3
with:
node-version: "20.x"
registry-url: "https://registry.npmjs.org"
- run: cd joi && corepack enable && corepack prepare yarn@4.5.3 --activate && yarn --version && yarn config set -H enableImmutableInstalls false && yarn install && yarn build
- run: cd joi && yarn publish --access public
if: github.event_name == 'push'
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@ -41,6 +41,7 @@ on:
jobs:
build-linux-x64:
if: inputs.public_provider == 'github' || inputs.public_provider == 'none'
runs-on: ubuntu-latest
environment: production
permissions:
@ -130,7 +131,7 @@ jobs:
env:
VERSION_TAG: ${{ inputs.new_version }}
- name: Build and publish app to aws s3 r2 or github artifactory
- name: Build and publish app to aws s3 r2 or github artifactory
if: inputs.public_provider != 'github'
run: |
# check public_provider is true or not
@ -176,12 +177,12 @@ jobs:
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-deb
name: jan-electron-linux-amd64-${{ inputs.new_version }}-deb
path: ./electron/dist/*.deb
- name: Upload Artifact .AppImage file
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
name: jan-electron-linux-amd64-${{ inputs.new_version }}-AppImage
path: ./electron/dist/*.AppImage

View File

@ -51,6 +51,7 @@ on:
jobs:
build-macos:
if: inputs.public_provider == 'github' || inputs.public_provider == 'none'
runs-on: macos-latest
environment: production
permissions:
@ -160,7 +161,7 @@ jobs:
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
- name: Build and publish app to aws s3 r2 or github artifactory
- name: Build and publish app to aws s3 r2 or github artifactory
if: inputs.public_provider != 'github'
run: |
# check public_provider is true or not
@ -229,5 +230,5 @@ jobs:
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-mac-universal-${{ inputs.new_version }}
name: jan-electron-mac-universal-${{ inputs.new_version }}
path: ./electron/dist/*.dmg

View File

@ -51,6 +51,7 @@ on:
jobs:
build-windows-x64:
if: inputs.public_provider == 'github' || inputs.public_provider == 'none'
runs-on: windows-latest
permissions:
contents: write
@ -225,5 +226,5 @@ jobs:
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-win-x64-${{ inputs.new_version }}
name: jan-electron-win-x64-${{ inputs.new_version }}
path: ./electron/dist/*.exe

View File

@ -44,9 +44,12 @@ jobs:
exit 1
}
if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then
if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then
echo "Tag detected, set output follow tag"
echo "::set-output name=new_version::${{ steps.tag.outputs.tag }}"
sanitized_tag="${{ steps.tag.outputs.tag }}"
# Remove the 'v' prefix if it exists
sanitized_tag="${sanitized_tag#v}"
echo "::set-output name=new_version::$sanitized_tag"
else
# Get the latest release tag from GitHub API
LATEST_TAG=$(get_latest_tag)

View File

@ -47,10 +47,10 @@ jobs:
with:
args: |
Jan App ${{ inputs.build_reason }} build artifact version {{ VERSION }}:
- Windows: https://delta.jan.ai/nightly/jan-nightly-win-x64-{{ VERSION }}.exe
- macOS Universal: https://delta.jan.ai/nightly/jan-nightly-mac-universal-{{ VERSION }}.dmg
- Linux Deb: https://delta.jan.ai/nightly/jan-nightly-linux-amd64-{{ VERSION }}.deb
- Linux AppImage: https://delta.jan.ai/nightly/jan-nightly-linux-x86_64-{{ VERSION }}.AppImage
- Windows: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_x64-setup.exe
- macOS Universal: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_universal.dmg
- Linux Deb: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_amd64.deb
- Linux AppImage: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_amd64.AppImage
- Github action run: https://github.com/menloresearch/jan/actions/runs/{{ GITHUB_RUN_ID }}
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}

View File

@ -1,318 +1,326 @@
name: tauri-build-linux-x64
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ''
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
TAURI_SIGNING_PUBLIC_KEY:
required: false
outputs:
DEB_SIG:
value: ${{ jobs.build-linux-x64.outputs.DEB_SIG }}
APPIMAGE_SIG:
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_SIG }}
APPIMAGE_FILE_NAME:
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}
jobs:
build-linux-x64:
runs-on: ubuntu-22.04
outputs:
DEB_SIG: ${{ steps.packageinfo.outputs.DEB_SIG }}
APPIMAGE_SIG: ${{ steps.packageinfo.outputs.APPIMAGE_SIG }}
APPIMAGE_FILE_NAME: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
environment: production
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Free Disk Space Before Build
run: |
echo "Disk space before cleanup:"
df -h
sudo rm -rf /usr/local/.ghcup
sudo rm -rf /opt/hostedtoolcache/CodeQL
sudo rm -rf /usr/local/lib/android/sdk/ndk
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf /usr/local/share/boost
sudo apt-get clean
echo "Disk space after cleanup:"
df -h
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Install Tauri dependecies
run: |
sudo apt update
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2
- name: Update app version base public_provider
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true | .bundle.resources = ["resources/themes/**/*", "resources/pre-install/**/*"] | .bundle.externalBin = ["binaries/cortex-server", "resources/bin/uv"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
"usr/lib/Jan-${{ inputs.channel }}/binaries": "binaries/deps",
"usr/lib/Jan-${{ inputs.channel }}/binaries/engines": "binaries/engines",
"usr/lib/Jan-${{ inputs.channel }}/binaries/libvulkan.so": "binaries/libvulkan.so"}' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
else
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
"usr/lib/Jan/binaries": "binaries/deps",
"usr/lib/Jan/binaries/engines": "binaries/engines",
"usr/lib/Jan/binaries/libvulkan.so": "binaries/libvulkan.so"}' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
fi
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
cat ./src-tauri/Cargo.toml
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
fi
- name: Build app
run: |
make build-tauri
# Copy engines and bun to appimage
wget https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O ./appimagetool
chmod +x ./appimagetool
if [ "${{ inputs.channel }}" != "stable" ]; then
ls ./src-tauri/target/release/bundle/appimage/
cp ./src-tauri/resources/bin/bun ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/bin/bun
mkdir -p ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/engines
cp -f ./src-tauri/binaries/deps/*.so* ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/
cp -f ./src-tauri/binaries/*.so* ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/
cp -rf ./src-tauri/binaries/engines ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep .AppImage | head -1)
echo $APP_IMAGE
rm -f $APP_IMAGE
./appimagetool ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir $APP_IMAGE
else
cp ./src-tauri/resources/bin/bun ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/bin/bun
mkdir -p ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/engines
cp -f ./src-tauri/binaries/deps/*.so* ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/
cp -f ./src-tauri/binaries/*.so* ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/
cp -rf ./src-tauri/binaries/engines ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep AppImage | head -1)
echo $APP_IMAGE
rm -f $APP_IMAGE
./appimagetool ./src-tauri/target/release/bundle/appimage/Jan.AppDir $APP_IMAGE
fi
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
# Publish app
## Artifacts, for dev and test
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-deb
path: ./src-tauri/target/release/bundle/deb/*.deb
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
## create zip file and latest-linux.yml for linux electron auto updater
- name: Create zip file and latest-linux.yml for linux electron auto updater
id: packageinfo
run: |
cd ./src-tauri/target/release/bundle
if [ "${{ inputs.channel }}" != "stable" ]; then
DEB_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
APPIMAGE_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
DEB_SIG=$(cat deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig)
APPIMAGE_SIG=$(cat appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig)
else
DEB_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.deb
APPIMAGE_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.AppImage
DEB_SIG=$(cat deb/Jan_${{ inputs.new_version }}_amd64.deb.sig)
APPIMAGE_SIG=$(cat appimage/Jan_${{ inputs.new_version }}_amd64.AppImage.sig)
fi
DEB_FILE_SIZE=$(stat -c%s deb/$DEB_FILE_NAME)
APPIMAGE_FILE_SIZE=$(stat -c%s appimage/$APPIMAGE_FILE_NAME)
echo "deb file size: $DEB_FILE_SIZE"
echo "appimage file size: $APPIMAGE_FILE_SIZE"
DEB_SH512_CHECKSUM=$(python3 ../../../../.github/scripts/electron-checksum.py deb/$DEB_FILE_NAME)
APPIMAGE_SH512_CHECKSUM=$(python3 ../../../../.github/scripts/electron-checksum.py appimage/$APPIMAGE_FILE_NAME)
echo "deb sh512 checksum: $DEB_SH512_CHECKSUM"
echo "appimage sh512 checksum: $APPIMAGE_SH512_CHECKSUM"
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
echo "releaseDate: $CURRENT_TIME"
# Create latest-linux.yml file
echo "version: ${{ inputs.new_version }}" > latest-linux.yml
echo "files:" >> latest-linux.yml
echo " - url: $DEB_FILE_NAME" >> latest-linux.yml
echo " sha512: $DEB_SH512_CHECKSUM" >> latest-linux.yml
echo " size: $DEB_FILE_SIZE" >> latest-linux.yml
echo " - url: $APPIMAGE_FILE_NAME" >> latest-linux.yml
echo " sha512: $APPIMAGE_SH512_CHECKSUM" >> latest-linux.yml
echo " size: $APPIMAGE_FILE_SIZE" >> latest-linux.yml
echo "path: $APPIMAGE_FILE_NAME" >> latest-linux.yml
echo "sha512: $APPIMAGE_SH512_CHECKSUM" >> latest-linux.yml
echo "releaseDate: $CURRENT_TIME" >> latest-linux.yml
cat latest-linux.yml
cp latest-linux.yml beta-linux.yml
echo "DEB_SIG=$DEB_SIG" >> $GITHUB_OUTPUT
echo "APPIMAGE_SIG=$APPIMAGE_SIG" >> $GITHUB_OUTPUT
echo "DEB_FILE_NAME=$DEB_FILE_NAME" >> $GITHUB_OUTPUT
echo "APPIMAGE_FILE_NAME=$APPIMAGE_FILE_NAME" >> $GITHUB_OUTPUT
## Upload to s3 for nightly and beta
- name: upload to aws s3 if public provider is aws
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
run: |
cd ./src-tauri/target/release/bundle
# Upload for electron updater for nightly
aws s3 cp ./latest-linux.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest-linux.yml
aws s3 cp ./beta-linux.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta-linux.yml
# Upload for tauri updater
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: 'true'
## Upload to github release for stable release
- name: Upload release assert if public provider is github
if: inputs.channel == 'stable'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/latest-linux.yml
asset_name: latest-linux.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.channel == 'beta'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/beta-linux.yml
asset_name: beta-linux.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/appimage/${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
asset_name: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
asset_content_type: application/octet-stream
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/deb/${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
asset_name: ${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
asset_content_type: application/octet-stream
name: tauri-build-linux-x64
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ''
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
TAURI_SIGNING_PUBLIC_KEY:
required: false
outputs:
DEB_SIG:
value: ${{ jobs.build-linux-x64.outputs.DEB_SIG }}
APPIMAGE_SIG:
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_SIG }}
APPIMAGE_FILE_NAME:
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}
jobs:
build-linux-x64:
runs-on: ubuntu-22.04
outputs:
DEB_SIG: ${{ steps.packageinfo.outputs.DEB_SIG }}
APPIMAGE_SIG: ${{ steps.packageinfo.outputs.APPIMAGE_SIG }}
APPIMAGE_FILE_NAME: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
environment: production
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Free Disk Space Before Build
run: |
echo "Disk space before cleanup:"
df -h
sudo rm -rf /usr/local/.ghcup
sudo rm -rf /opt/hostedtoolcache/CodeQL
sudo rm -rf /usr/local/lib/android/sdk/ndk
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf /usr/local/share/boost
sudo apt-get clean
echo "Disk space after cleanup:"
df -h
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Install Tauri dependecies
run: |
sudo apt update
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2
- name: Update app version base public_provider
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true | .bundle.resources = ["resources/pre-install/**/*"] | .bundle.externalBin = ["binaries/cortex-server", "resources/bin/uv"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
"usr/lib/Jan-${{ inputs.channel }}/binaries": "binaries/deps",
"usr/lib/Jan-${{ inputs.channel }}/binaries/engines": "binaries/engines",
"usr/lib/Jan-${{ inputs.channel }}/binaries/libvulkan.so": "binaries/libvulkan.so"}' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
else
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
"usr/lib/Jan/binaries": "binaries/deps",
"usr/lib/Jan/binaries/engines": "binaries/engines",
"usr/lib/Jan/binaries/libvulkan.so": "binaries/libvulkan.so"}' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
fi
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
mv /tmp/package.json web-app/package.json
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
cat ./src-tauri/Cargo.toml
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
fi
- name: Build app
run: |
make build-tauri
# Copy engines and bun to appimage
wget https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O ./appimagetool
chmod +x ./appimagetool
if [ "${{ inputs.channel }}" != "stable" ]; then
ls ./src-tauri/target/release/bundle/appimage/
cp ./src-tauri/resources/bin/bun ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/bin/bun
mkdir -p ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/engines
cp -f ./src-tauri/binaries/deps/*.so* ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/
cp -f ./src-tauri/binaries/*.so* ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/
cp -rf ./src-tauri/binaries/engines ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep .AppImage | head -1)
echo $APP_IMAGE
rm -f $APP_IMAGE
./appimagetool ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir $APP_IMAGE
yarn tauri signer sign \
--private-key "$TAURI_SIGNING_PRIVATE_KEY" \
--password "$TAURI_SIGNING_PRIVATE_KEY_PASSWORD" \
"$APP_IMAGE"
else
cp ./src-tauri/resources/bin/bun ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/bin/bun
mkdir -p ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/engines
cp -f ./src-tauri/binaries/deps/*.so* ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/
cp -f ./src-tauri/binaries/*.so* ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/
cp -rf ./src-tauri/binaries/engines ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep AppImage | head -1)
echo $APP_IMAGE
rm -f $APP_IMAGE
./appimagetool ./src-tauri/target/release/bundle/appimage/Jan.AppDir $APP_IMAGE
yarn tauri signer sign \
--private-key "$TAURI_SIGNING_PRIVATE_KEY" \
--password "$TAURI_SIGNING_PRIVATE_KEY_PASSWORD" \
"$APP_IMAGE"
fi
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
# Publish app
## Artifacts, for dev and test
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-deb
path: ./src-tauri/target/release/bundle/deb/*.deb
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
## create zip file and latest-linux.yml for linux electron auto updater
- name: Create zip file and latest-linux.yml for linux electron auto updater
id: packageinfo
run: |
cd ./src-tauri/target/release/bundle
if [ "${{ inputs.channel }}" != "stable" ]; then
DEB_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
APPIMAGE_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
DEB_SIG=$(cat deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig)
APPIMAGE_SIG=$(cat appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig)
else
DEB_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.deb
APPIMAGE_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.AppImage
DEB_SIG=$(cat deb/Jan_${{ inputs.new_version }}_amd64.deb.sig)
APPIMAGE_SIG=$(cat appimage/Jan_${{ inputs.new_version }}_amd64.AppImage.sig)
fi
DEB_FILE_SIZE=$(stat -c%s deb/$DEB_FILE_NAME)
APPIMAGE_FILE_SIZE=$(stat -c%s appimage/$APPIMAGE_FILE_NAME)
echo "deb file size: $DEB_FILE_SIZE"
echo "appimage file size: $APPIMAGE_FILE_SIZE"
DEB_SH512_CHECKSUM=$(python3 ../../../../.github/scripts/electron-checksum.py deb/$DEB_FILE_NAME)
APPIMAGE_SH512_CHECKSUM=$(python3 ../../../../.github/scripts/electron-checksum.py appimage/$APPIMAGE_FILE_NAME)
echo "deb sh512 checksum: $DEB_SH512_CHECKSUM"
echo "appimage sh512 checksum: $APPIMAGE_SH512_CHECKSUM"
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
echo "releaseDate: $CURRENT_TIME"
# Create latest-linux.yml file
echo "version: ${{ inputs.new_version }}" > latest-linux.yml
echo "files:" >> latest-linux.yml
echo " - url: $DEB_FILE_NAME" >> latest-linux.yml
echo " sha512: $DEB_SH512_CHECKSUM" >> latest-linux.yml
echo " size: $DEB_FILE_SIZE" >> latest-linux.yml
echo " - url: $APPIMAGE_FILE_NAME" >> latest-linux.yml
echo " sha512: $APPIMAGE_SH512_CHECKSUM" >> latest-linux.yml
echo " size: $APPIMAGE_FILE_SIZE" >> latest-linux.yml
echo "path: $APPIMAGE_FILE_NAME" >> latest-linux.yml
echo "sha512: $APPIMAGE_SH512_CHECKSUM" >> latest-linux.yml
echo "releaseDate: $CURRENT_TIME" >> latest-linux.yml
cat latest-linux.yml
cp latest-linux.yml beta-linux.yml
echo "DEB_SIG=$DEB_SIG" >> $GITHUB_OUTPUT
echo "APPIMAGE_SIG=$APPIMAGE_SIG" >> $GITHUB_OUTPUT
echo "DEB_FILE_NAME=$DEB_FILE_NAME" >> $GITHUB_OUTPUT
echo "APPIMAGE_FILE_NAME=$APPIMAGE_FILE_NAME" >> $GITHUB_OUTPUT
## Upload to s3 for nightly and beta
- name: upload to aws s3 if public provider is aws
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
run: |
cd ./src-tauri/target/release/bundle
# Upload for electron updater for nightly
aws s3 cp ./latest-linux.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest-linux.yml
aws s3 cp ./beta-linux.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta-linux.yml
# Upload for tauri updater
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: 'true'
## Upload to github release for stable release
- name: Upload release assert if public provider is github
if: inputs.channel == 'stable'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/latest-linux.yml
asset_name: latest-linux.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.channel == 'beta'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/beta-linux.yml
asset_name: beta-linux.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/appimage/${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
asset_name: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
asset_content_type: application/octet-stream
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/deb/${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
asset_name: ${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
asset_content_type: application/octet-stream

View File

@ -1,312 +1,312 @@
name: tauri-build-macos
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ''
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
CODE_SIGN_P12_BASE64:
required: false
CODE_SIGN_P12_PASSWORD:
required: false
APPLE_ID:
required: false
APPLE_APP_SPECIFIC_PASSWORD:
required: false
DEVELOPER_ID:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
TAURI_SIGNING_PUBLIC_KEY:
required: false
outputs:
MAC_UNIVERSAL_SIG:
value: ${{ jobs.build-macos.outputs.MAC_UNIVERSAL_SIG }}
TAR_NAME:
value: ${{ jobs.build-macos.outputs.TAR_NAME }}
jobs:
build-macos:
runs-on: macos-latest
outputs:
MAC_UNIVERSAL_SIG: ${{ steps.metadata.outputs.MAC_UNIVERSAL_SIG }}
TAR_NAME: ${{ steps.metadata.outputs.TAR_NAME }}
environment: production
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Create bun and uv universal
run: |
mkdir -p ./src-tauri/resources/bin/
cd ./src-tauri/resources/bin/
curl -L -o bun-darwin-x64.zip https://github.com/oven-sh/bun/releases/download/bun-v1.2.10/bun-darwin-x64.zip
curl -L -o bun-darwin-aarch64.zip https://github.com/oven-sh/bun/releases/download/bun-v1.2.10/bun-darwin-aarch64.zip
unzip bun-darwin-x64.zip
unzip bun-darwin-aarch64.zip
lipo -create -output bun-universal-apple-darwin bun-darwin-x64/bun bun-darwin-aarch64/bun
cp -f bun-darwin-aarch64/bun bun-aarch64-apple-darwin
cp -f bun-darwin-x64/bun bun-x86_64-apple-darwin
cp -f bun-universal-apple-darwin bun
curl -L -o uv-x86_64.tar.gz https://github.com/astral-sh/uv/releases/download/0.6.17/uv-x86_64-apple-darwin.tar.gz
curl -L -o uv-arm64.tar.gz https://github.com/astral-sh/uv/releases/download/0.6.17/uv-aarch64-apple-darwin.tar.gz
tar -xzf uv-x86_64.tar.gz
tar -xzf uv-arm64.tar.gz
mv uv-x86_64-apple-darwin uv-x86_64
mv uv-aarch64-apple-darwin uv-aarch64
lipo -create -output uv-universal-apple-darwin uv-x86_64/uv uv-aarch64/uv
cp -f uv-x86_64/uv uv-x86_64-apple-darwin
cp -f uv-aarch64/uv uv-aarch64-apple-darwin
cp -f uv-universal-apple-darwin uv
ls -la
- name: Update app version based on latest release tag with build number
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
cat ./src-tauri/Cargo.toml
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
fi
- name: Get key for notarize
run: base64 -d <<< "$NOTARIZE_P8_BASE64" > /tmp/notary-key.p8
shell: bash
env:
NOTARIZE_P8_BASE64: ${{ secrets.NOTARIZE_P8_BASE64 }}
- uses: apple-actions/import-codesign-certs@v2
continue-on-error: true
with:
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
- name: Build app
run: |
rustup target add x86_64-apple-darwin
make build-tauri
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
APP_PATH: '.'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
APPLE_CERTIFICATE: ${{ secrets.CODE_SIGN_P12_BASE64 }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
APPLE_API_ISSUER: ${{ secrets.NOTARY_ISSUER }}
APPLE_API_KEY: ${{ secrets.NOTARY_KEY_ID }}
APPLE_API_KEY_PATH: /tmp/notary-key.p8
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
# Publish app
## Artifacts, for dev and test
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.dmg
path: |
./src-tauri/target/universal-apple-darwin/release/bundle/dmg/*.dmg
## create zip file and latest-mac.yml for mac electron auto updater
- name: create zip file and latest-mac.yml for mac electron auto updater
run: |
cd ./src-tauri/target/universal-apple-darwin/release/bundle/macos
if [ "${{ inputs.channel }}" != "stable" ]; then
zip -r jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip Jan-${{ inputs.channel }}.app
FILE_NAME=jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip
DMG_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
MAC_UNIVERSAL_SIG=$(cat Jan-${{ inputs.channel }}.app.tar.gz.sig)
TAR_NAME=Jan-${{ inputs.channel }}.app.tar.gz
else
zip -r jan-mac-universal-${{ inputs.new_version }}.zip Jan.app
FILE_NAME=jan-mac-universal-${{ inputs.new_version }}.zip
MAC_UNIVERSAL_SIG=$(cat Jan.app.tar.gz.sig)
DMG_NAME=Jan_${{ inputs.new_version }}_universal.dmg
TAR_NAME=Jan.app.tar.gz
fi
FILE_SIZE=$(stat -f%z $FILE_NAME)
echo "size: $FILE_SIZE"
SH512_CHECKSUM=$(python3 ../../../../../../.github/scripts/electron-checksum.py $FILE_NAME)
echo "sha512: $SH512_CHECKSUM"
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
echo "releaseDate: $CURRENT_TIME"
# Create latest-mac.yml file
echo "version: ${{ inputs.new_version }}" > latest-mac.yml
echo "files:" >> latest-mac.yml
echo " - url: $FILE_NAME" >> latest-mac.yml
echo " sha512: $SH512_CHECKSUM" >> latest-mac.yml
echo " size: $FILE_NAME" >> latest-mac.yml
echo "path: $FILE_NAME" >> latest-mac.yml
echo "sha512: $SH512_CHECKSUM" >> latest-mac.yml
echo "releaseDate: $CURRENT_TIME" >> latest-mac.yml
cat latest-mac.yml
cp latest-mac.yml beta-mac.yml
echo "::set-output name=MAC_UNIVERSAL_SIG::$MAC_UNIVERSAL_SIG"
echo "::set-output name=FILE_NAME::$FILE_NAME"
echo "::set-output name=DMG_NAME::$DMG_NAME"
echo "::set-output name=TAR_NAME::$TAR_NAME"
id: metadata
## Upload to s3 for nightly and beta
- name: upload to aws s3 if public provider is aws
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
run: |
cd ./src-tauri/target/universal-apple-darwin/release/bundle
# Upload for electron updater for nightly
aws s3 cp ./macos/latest-mac.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest-mac.yml
aws s3 cp ./macos/beta-mac.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta-mac.yml
aws s3 cp ./macos/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip
aws s3 cp ./macos/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip.sig
# Upload for tauri updater
aws s3 cp ./dmg/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz.sig
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: 'true'
## Upload to github release for stable release
- name: Upload release assert if public provider is github
if: inputs.channel == 'stable'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/latest-mac.yml
asset_name: latest-mac.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.channel == 'beta'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/beta-mac.yml
asset_name: beta-mac.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
asset_content_type: application/gzip
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/dmg/${{ steps.metadata.outputs.DMG_NAME }}
asset_name: ${{ steps.metadata.outputs.DMG_NAME }}
asset_content_type: application/octet-stream
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.TAR_NAME }}
asset_name: ${{ steps.metadata.outputs.TAR_NAME }}
asset_content_type: application/gzip
name: tauri-build-macos
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ''
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
CODE_SIGN_P12_BASE64:
required: false
CODE_SIGN_P12_PASSWORD:
required: false
APPLE_ID:
required: false
APPLE_APP_SPECIFIC_PASSWORD:
required: false
DEVELOPER_ID:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
TAURI_SIGNING_PUBLIC_KEY:
required: false
outputs:
MAC_UNIVERSAL_SIG:
value: ${{ jobs.build-macos.outputs.MAC_UNIVERSAL_SIG }}
TAR_NAME:
value: ${{ jobs.build-macos.outputs.TAR_NAME }}
jobs:
build-macos:
runs-on: macos-latest
outputs:
MAC_UNIVERSAL_SIG: ${{ steps.metadata.outputs.MAC_UNIVERSAL_SIG }}
TAR_NAME: ${{ steps.metadata.outputs.TAR_NAME }}
environment: production
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Create bun and uv universal
run: |
mkdir -p ./src-tauri/resources/bin/
cd ./src-tauri/resources/bin/
curl -L -o bun-darwin-x64.zip https://github.com/oven-sh/bun/releases/download/bun-v1.2.10/bun-darwin-x64.zip
curl -L -o bun-darwin-aarch64.zip https://github.com/oven-sh/bun/releases/download/bun-v1.2.10/bun-darwin-aarch64.zip
unzip bun-darwin-x64.zip
unzip bun-darwin-aarch64.zip
lipo -create -output bun-universal-apple-darwin bun-darwin-x64/bun bun-darwin-aarch64/bun
cp -f bun-darwin-aarch64/bun bun-aarch64-apple-darwin
cp -f bun-darwin-x64/bun bun-x86_64-apple-darwin
cp -f bun-universal-apple-darwin bun
curl -L -o uv-x86_64.tar.gz https://github.com/astral-sh/uv/releases/download/0.6.17/uv-x86_64-apple-darwin.tar.gz
curl -L -o uv-arm64.tar.gz https://github.com/astral-sh/uv/releases/download/0.6.17/uv-aarch64-apple-darwin.tar.gz
tar -xzf uv-x86_64.tar.gz
tar -xzf uv-arm64.tar.gz
mv uv-x86_64-apple-darwin uv-x86_64
mv uv-aarch64-apple-darwin uv-aarch64
lipo -create -output uv-universal-apple-darwin uv-x86_64/uv uv-aarch64/uv
cp -f uv-x86_64/uv uv-x86_64-apple-darwin
cp -f uv-aarch64/uv uv-aarch64-apple-darwin
cp -f uv-universal-apple-darwin uv
ls -la
- name: Update app version based on latest release tag with build number
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
mv /tmp/package.json web-app/package.json
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
cat ./src-tauri/Cargo.toml
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
fi
- name: Get key for notarize
run: base64 -d <<< "$NOTARIZE_P8_BASE64" > /tmp/notary-key.p8
shell: bash
env:
NOTARIZE_P8_BASE64: ${{ secrets.NOTARIZE_P8_BASE64 }}
- uses: apple-actions/import-codesign-certs@v2
continue-on-error: true
with:
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
- name: Build app
run: |
rustup target add x86_64-apple-darwin
make build-tauri
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
APP_PATH: '.'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
APPLE_CERTIFICATE: ${{ secrets.CODE_SIGN_P12_BASE64 }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
APPLE_API_ISSUER: ${{ secrets.NOTARY_ISSUER }}
APPLE_API_KEY: ${{ secrets.NOTARY_KEY_ID }}
APPLE_API_KEY_PATH: /tmp/notary-key.p8
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
# Publish app
## Artifacts, for dev and test
- name: Upload Artifact
if: inputs.public_provider != 'github'
uses: actions/upload-artifact@v4
with:
name: jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.dmg
path: |
./src-tauri/target/universal-apple-darwin/release/bundle/dmg/*.dmg
## create zip file and latest-mac.yml for mac electron auto updater
- name: create zip file and latest-mac.yml for mac electron auto updater
run: |
cd ./src-tauri/target/universal-apple-darwin/release/bundle/macos
if [ "${{ inputs.channel }}" != "stable" ]; then
zip -r jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip Jan-${{ inputs.channel }}.app
FILE_NAME=jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip
DMG_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
MAC_UNIVERSAL_SIG=$(cat Jan-${{ inputs.channel }}.app.tar.gz.sig)
TAR_NAME=Jan-${{ inputs.channel }}.app.tar.gz
else
zip -r jan-mac-universal-${{ inputs.new_version }}.zip Jan.app
FILE_NAME=jan-mac-universal-${{ inputs.new_version }}.zip
MAC_UNIVERSAL_SIG=$(cat Jan.app.tar.gz.sig)
DMG_NAME=Jan_${{ inputs.new_version }}_universal.dmg
TAR_NAME=Jan.app.tar.gz
fi
FILE_SIZE=$(stat -f%z $FILE_NAME)
echo "size: $FILE_SIZE"
SH512_CHECKSUM=$(python3 ../../../../../../.github/scripts/electron-checksum.py $FILE_NAME)
echo "sha512: $SH512_CHECKSUM"
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
echo "releaseDate: $CURRENT_TIME"
# Create latest-mac.yml file
echo "version: ${{ inputs.new_version }}" > latest-mac.yml
echo "files:" >> latest-mac.yml
echo " - url: $FILE_NAME" >> latest-mac.yml
echo " sha512: $SH512_CHECKSUM" >> latest-mac.yml
echo " size: $FILE_SIZE" >> latest-mac.yml
echo "path: $FILE_NAME" >> latest-mac.yml
echo "sha512: $SH512_CHECKSUM" >> latest-mac.yml
echo "releaseDate: $CURRENT_TIME" >> latest-mac.yml
cat latest-mac.yml
cp latest-mac.yml beta-mac.yml
echo "::set-output name=MAC_UNIVERSAL_SIG::$MAC_UNIVERSAL_SIG"
echo "::set-output name=FILE_NAME::$FILE_NAME"
echo "::set-output name=DMG_NAME::$DMG_NAME"
echo "::set-output name=TAR_NAME::$TAR_NAME"
id: metadata
## Upload to s3 for nightly and beta
- name: upload to aws s3 if public provider is aws
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
run: |
cd ./src-tauri/target/universal-apple-darwin/release/bundle
# Upload for electron updater for nightly
aws s3 cp ./macos/latest-mac.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest-mac.yml
aws s3 cp ./macos/beta-mac.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta-mac.yml
aws s3 cp ./macos/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip
# aws s3 cp ./macos/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip.sig
# Upload for tauri updater
aws s3 cp ./dmg/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz.sig
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: 'true'
## Upload to github release for stable release
- name: Upload release assert if public provider is github
if: inputs.channel == 'stable'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/latest-mac.yml
asset_name: latest-mac.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.channel == 'beta'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/beta-mac.yml
asset_name: beta-mac.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
asset_content_type: application/gzip
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/dmg/${{ steps.metadata.outputs.DMG_NAME }}
asset_name: ${{ steps.metadata.outputs.DMG_NAME }}
asset_content_type: application/octet-stream
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.TAR_NAME }}
asset_name: ${{ steps.metadata.outputs.TAR_NAME }}
asset_content_type: application/gzip

View File

@ -1,290 +1,293 @@
name: tauri-build-windows-x64
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ''
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
AZURE_KEY_VAULT_URI:
required: false
AZURE_CLIENT_ID:
required: false
AZURE_TENANT_ID:
required: false
AZURE_CLIENT_SECRET:
required: false
AZURE_CERT_NAME:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
TAURI_SIGNING_PUBLIC_KEY:
required: false
outputs:
WIN_SIG:
value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }}
FILE_NAME:
value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }}
jobs:
build-windows-x64:
runs-on: windows-latest
outputs:
WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }}
FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }}
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Update app version base on tag
id: version_update
shell: bash
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true | .bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
mv /tmp/package.json web/package.json
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------Cargo.toml---------"
cat ./src-tauri/Cargo.toml
generate_build_version() {
### Examble
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
local new_version="$1"
local base_version
local t_value
# Check if it has a "-"
if [[ "$new_version" == *-* ]]; then
base_version="${new_version%%-*}" # part before -
suffix="${new_version#*-}" # part after -
# Check if it is rcX-beta
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
t_value="${BASH_REMATCH[1]}"
else
t_value="$suffix"
fi
else
base_version="$new_version"
t_value="0"
fi
# Export two values
new_base_version="$base_version"
new_build_version="${base_version}.${t_value}"
}
generate_build_version ${{ inputs.new_version }}
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
echo "---------tauri.conf.json---------"
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
fi
echo "---------nsis.template---------"
cat ./src-tauri/tauri.bundle.windows.nsis.template
- name: Install AzureSignTool
run: |
dotnet tool install --global --version 6.0.0 AzureSignTool
- name: Build app
shell: bash
run: |
make build-tauri
env:
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-windows-${{ inputs.new_version }}
path: |
./src-tauri/target/release/bundle/nsis/*.exe
## create zip file and latest.yml for windows electron auto updater
- name: create zip file and latest.yml for windows electron auto updater
shell: bash
run: |
cd ./src-tauri/target/release/bundle/nsis
if [ "${{ inputs.channel }}" != "stable" ]; then
FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe
WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig)
else
FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe
WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig)
fi
FILE_SIZE=$(stat -c %s $FILE_NAME)
echo "size: $FILE_SIZE"
SH512_CHECKSUM=$(python3 ../../../../../.github/scripts/electron-checksum.py $FILE_NAME)
echo "sha512: $SH512_CHECKSUM"
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
echo "releaseDate: $CURRENT_TIME"
# Create latest.yml file
echo "version: ${{ inputs.new_version }}" > latest.yml
echo "files:" >> latest.yml
echo " - url: $FILE_NAME" >> latest.yml
echo " sha512: $SH512_CHECKSUM" >> latest.yml
echo " size: $FILE_NAME" >> latest.yml
echo "path: $FILE_NAME" >> latest.yml
echo "sha512: $SH512_CHECKSUM" >> latest.yml
echo "releaseDate: $CURRENT_TIME" >> latest.yml
cat latest.yml
cp latest.yml beta.yml
echo "::set-output name=WIN_SIG::$WIN_SIG"
echo "::set-output name=FILE_NAME::$FILE_NAME"
id: metadata
## Upload to s3 for nightly and beta
- name: upload to aws s3 if public provider is aws
shell: bash
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
run: |
cd ./src-tauri/target/release/bundle/nsis
# Upload for electron updater for nightly
aws s3 cp ./latest.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest.yml
aws s3 cp ./beta.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta.yml
# Upload for tauri updater
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }}.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}.sig
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: 'true'
## Upload to github release for stable release
- name: Upload release assert if public provider is github
if: inputs.channel == 'stable'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/nsis/latest.yml
asset_name: latest.yml
asset_content_type: text/yaml
- name: Upload release asset if public provider is github
if: inputs.channel == 'beta'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/nsis/beta.yml
asset_name: beta.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
asset_content_type: application/octet-stream
name: tauri-build-windows-x64
on:
workflow_call:
inputs:
ref:
required: true
type: string
default: 'refs/heads/main'
public_provider:
required: true
type: string
default: none
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
new_version:
required: true
type: string
default: ''
cortex_api_port:
required: false
type: string
default: ''
upload_url:
required: false
type: string
default: ''
channel:
required: true
type: string
default: 'nightly'
description: 'The channel to use for this job'
secrets:
DELTA_AWS_S3_BUCKET_NAME:
required: false
DELTA_AWS_ACCESS_KEY_ID:
required: false
DELTA_AWS_SECRET_ACCESS_KEY:
required: false
AZURE_KEY_VAULT_URI:
required: false
AZURE_CLIENT_ID:
required: false
AZURE_TENANT_ID:
required: false
AZURE_CLIENT_SECRET:
required: false
AZURE_CERT_NAME:
required: false
TAURI_SIGNING_PRIVATE_KEY:
required: false
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
required: false
TAURI_SIGNING_PUBLIC_KEY:
required: false
outputs:
WIN_SIG:
value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }}
FILE_NAME:
value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }}
jobs:
build-windows-x64:
runs-on: windows-latest
outputs:
WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }}
FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }}
permissions:
contents: write
steps:
- name: Getting the repo
uses: actions/checkout@v3
with:
ref: ${{ inputs.ref }}
- name: Replace Icons for Beta Build
if: inputs.channel != 'stable'
shell: bash
run: |
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
- name: Installing node
uses: actions/setup-node@v1
with:
node-version: 20
- name: Install jq
uses: dcarbone/install-jq-action@v2.0.1
- name: Install ctoml
run: |
cargo install ctoml
- name: Update app version base on tag
id: version_update
shell: bash
run: |
echo "Version: ${{ inputs.new_version }}"
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true | .bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
mv /tmp/package.json web-app/package.json
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
echo "---------Cargo.toml---------"
cat ./src-tauri/Cargo.toml
generate_build_version() {
### Examble
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
local new_version="$1"
local base_version
local t_value
# Check if it has a "-"
if [[ "$new_version" == *-* ]]; then
base_version="${new_version%%-*}" # part before -
suffix="${new_version#*-}" # part after -
# Check if it is rcX-beta
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
t_value="${BASH_REMATCH[1]}"
else
t_value="$suffix"
fi
else
base_version="$new_version"
t_value="0"
fi
# Export two values
new_base_version="$base_version"
new_build_version="${base_version}.${t_value}"
}
generate_build_version ${{ inputs.new_version }}
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
# Change app name for beta and nightly builds
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
chmod +x .github/scripts/rename-tauri-app.sh
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
echo "---------tauri.conf.json---------"
cat ./src-tauri/tauri.conf.json
# Update Cargo.toml
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
echo "------------------"
cat ./src-tauri/Cargo.toml
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
fi
echo "---------nsis.template---------"
cat ./src-tauri/tauri.bundle.windows.nsis.template
- name: Install AzureSignTool
run: |
dotnet tool install --global --version 6.0.0 AzureSignTool
- name: Build app
shell: bash
run: |
curl -L -o ./src-tauri/binaries/vcomp140.dll https://catalog.jan.ai/vcomp140.dll
curl -L -o ./src-tauri/binaries/msvcp140_codecvt_ids.dll https://catalog.jan.ai/msvcp140_codecvt_ids.dll
ls ./src-tauri/binaries
make build-tauri
env:
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }}
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: auto
AWS_EC2_METADATA_DISABLED: 'true'
AWS_MAX_ATTEMPTS: '5'
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-windows-${{ inputs.new_version }}
path: |
./src-tauri/target/release/bundle/nsis/*.exe
## create zip file and latest.yml for windows electron auto updater
- name: create zip file and latest.yml for windows electron auto updater
shell: bash
run: |
cd ./src-tauri/target/release/bundle/nsis
if [ "${{ inputs.channel }}" != "stable" ]; then
FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe
WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig)
else
FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe
WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig)
fi
FILE_SIZE=$(stat -c %s $FILE_NAME)
echo "size: $FILE_SIZE"
SH512_CHECKSUM=$(python3 ../../../../../.github/scripts/electron-checksum.py $FILE_NAME)
echo "sha512: $SH512_CHECKSUM"
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
echo "releaseDate: $CURRENT_TIME"
# Create latest.yml file
echo "version: ${{ inputs.new_version }}" > latest.yml
echo "files:" >> latest.yml
echo " - url: $FILE_NAME" >> latest.yml
echo " sha512: $SH512_CHECKSUM" >> latest.yml
echo " size: $FILE_SIZE" >> latest.yml
echo "path: $FILE_NAME" >> latest.yml
echo "sha512: $SH512_CHECKSUM" >> latest.yml
echo "releaseDate: $CURRENT_TIME" >> latest.yml
cat latest.yml
cp latest.yml beta.yml
echo "::set-output name=WIN_SIG::$WIN_SIG"
echo "::set-output name=FILE_NAME::$FILE_NAME"
id: metadata
## Upload to s3 for nightly and beta
- name: upload to aws s3 if public provider is aws
shell: bash
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
run: |
cd ./src-tauri/target/release/bundle/nsis
# Upload for electron updater for nightly
aws s3 cp ./latest.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest.yml
aws s3 cp ./beta.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta.yml
# Upload for tauri updater
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }}.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}.sig
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
AWS_EC2_METADATA_DISABLED: 'true'
## Upload to github release for stable release
- name: Upload release assert if public provider is github
if: inputs.channel == 'stable'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/nsis/latest.yml
asset_name: latest.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.channel == 'beta'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/nsis/beta.yml
asset_name: beta.yml
asset_content_type: text/yaml
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
asset_content_type: application/octet-stream

21
.gitignore vendored
View File

@ -22,19 +22,6 @@ coverage
*.log
core/lib/**
# Nitro binary files
extensions/*-extension/bin/*/nitro
extensions/*-extension/bin/*/*.metal
extensions/*-extension/bin/*/*.exe
extensions/*-extension/bin/*/*.dll
extensions/*-extension/bin/*/*.exp
extensions/*-extension/bin/*/*.lib
extensions/*-extension/bin/saved-*
extensions/*-extension/bin/*.tar.gz
extensions/*-extension/bin/vulkaninfoSDK.exe
extensions/*-extension/bin/vulkaninfo
# Turborepo
.turbo
electron/test-data
@ -50,3 +37,11 @@ electron/shared/**
# docs
docs/yarn.lock
electron/.version.bak
src-tauri/binaries/engines/cortex.llamacpp
src-tauri/resources/themes
src-tauri/resources/lib
src-tauri/Cargo.lock
src-tauri/icons
!src-tauri/icons/icon.png
src-tauri/gen/apple
src-tauri/resources/bin

103
Makefile
View File

@ -24,95 +24,44 @@ ifeq ($(OS),Windows_NT)
echo "skip"
endif
yarn install
yarn build:joi
yarn build:core
yarn build:server
yarn build:extensions
check-file-counts: install-and-build
ifeq ($(OS),Windows_NT)
powershell -Command "if ((Get-ChildItem -Path pre-install -Filter *.tgz | Measure-Object | Select-Object -ExpandProperty Count) -ne (Get-ChildItem -Path extensions -Directory | Where-Object Name -like *-extension* | Measure-Object | Select-Object -ExpandProperty Count)) { Write-Host 'Number of .tgz files in pre-install does not match the number of subdirectories in extensions with package.json'; exit 1 } else { Write-Host 'Extension build successful' }"
else
@tgz_count=$$(find pre-install -type f -name "*.tgz" | wc -l); dir_count=$$(find extensions -mindepth 1 -maxdepth 1 -type d -exec test -e '{}/package.json' \; -print | wc -l); if [ $$tgz_count -ne $$dir_count ]; then echo "Number of .tgz files in pre-install ($$tgz_count) does not match the number of subdirectories in extension ($$dir_count)"; exit 1; else echo "Extension build successful"; fi
endif
dev: check-file-counts
dev: install-and-build
yarn install:cortex
yarn download:bin
yarn copy:lib
yarn dev
# Deprecated soon
dev-tauri: install-and-build
yarn install:cortex
yarn download:bin
yarn copy:lib
yarn dev:tauri
# Linting
lint: check-file-counts
lint: install-and-build
yarn lint
update-playwright-config:
ifeq ($(OS),Windows_NT)
echo -e "const RPconfig = {\n\
apiKey: '$(REPORT_PORTAL_API_KEY)',\n\
endpoint: '$(REPORT_PORTAL_URL)',\n\
project: '$(REPORT_PORTAL_PROJECT_NAME)',\n\
launch: '$(REPORT_PORTAL_LAUNCH_NAME)',\n\
attributes: [\n\
{\n\
key: 'key',\n\
value: 'value',\n\
},\n\
{\n\
value: 'value',\n\
},\n\
],\n\
description: '$(REPORT_PORTAL_DESCRIPTION)',\n\
}\n$$(cat electron/playwright.config.ts)" > electron/playwright.config.ts;
sed -i "s/^ reporter: .*/ reporter: [['@reportportal\/agent-js-playwright', RPconfig]],/" electron/playwright.config.ts
else ifeq ($(shell uname -s),Linux)
echo "const RPconfig = {\n\
apiKey: '$(REPORT_PORTAL_API_KEY)',\n\
endpoint: '$(REPORT_PORTAL_URL)',\n\
project: '$(REPORT_PORTAL_PROJECT_NAME)',\n\
launch: '$(REPORT_PORTAL_LAUNCH_NAME)',\n\
attributes: [\n\
{\n\
key: 'key',\n\
value: 'value',\n\
},\n\
{\n\
value: 'value',\n\
},\n\
],\n\
description: '$(REPORT_PORTAL_DESCRIPTION)',\n\
}\n$$(cat electron/playwright.config.ts)" > electron/playwright.config.ts;
sed -i "s/^ reporter: .*/ reporter: [['@reportportal\/agent-js-playwright', RPconfig]],/" electron/playwright.config.ts
else
echo "const RPconfig = {\n\
apiKey: '$(REPORT_PORTAL_API_KEY)',\n\
endpoint: '$(REPORT_PORTAL_URL)',\n\
project: '$(REPORT_PORTAL_PROJECT_NAME)',\n\
launch: '$(REPORT_PORTAL_LAUNCH_NAME)',\n\
attributes: [\n\
{\n\
key: 'key',\n\
value: 'value',\n\
},\n\
{\n\
value: 'value',\n\
},\n\
],\n\
description: '$(REPORT_PORTAL_DESCRIPTION)',\n\
}\n$$(cat electron/playwright.config.ts)" > electron/playwright.config.ts;
sed -i '' "s|^ reporter: .*| reporter: [['@reportportal\/agent-js-playwright', RPconfig]],|" electron/playwright.config.ts
endif
# Testing
test: lint
yarn build:test
yarn test:coverage
# yarn build:test
# yarn test:coverage
# Need e2e setup for tauri backend
yarn test
# Builds and publishes the app
build-and-publish: check-file-counts
yarn build:publish
build-and-publish: install-and-build
yarn build
# Build
build: check-file-counts
build: install-and-build
yarn build
# Deprecated soon
build-tauri: install-and-build
yarn copy:lib
yarn build
clean:
@ -122,6 +71,8 @@ ifeq ($(OS),Windows_NT)
-powershell -Command "Remove-Item -Recurse -Force ./pre-install/*.tgz"
-powershell -Command "Remove-Item -Recurse -Force ./extensions/*/*.tgz"
-powershell -Command "Remove-Item -Recurse -Force ./electron/pre-install/*.tgz"
-powershell -Command "Remove-Item -Recurse -Force ./src-tauri/resources"
-powershell -Command "Remove-Item -Recurse -Force ./src-tauri/target"
-powershell -Command "if (Test-Path \"$($env:USERPROFILE)\jan\extensions\") { Remove-Item -Path \"$($env:USERPROFILE)\jan\extensions\" -Recurse -Force }"
else ifeq ($(shell uname -s),Linux)
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
@ -136,6 +87,8 @@ else ifeq ($(shell uname -s),Linux)
rm -rf ./pre-install/*.tgz
rm -rf ./extensions/*/*.tgz
rm -rf ./electron/pre-install/*.tgz
rm -rf ./src-tauri/resources
rm -rf ./src-tauri/target
rm -rf "~/jan/extensions"
rm -rf "~/.cache/jan*"
else
@ -150,6 +103,8 @@ else
rm -rf ./pre-install/*.tgz
rm -rf ./extensions/*/*.tgz
rm -rf ./electron/pre-install/*.tgz
rm -rf ./src-tauri/resources
rm -rf ./src-tauri/target
rm -rf ~/jan/extensions
rm -rf ~/Library/Caches/jan*
endif

View File

@ -8,31 +8,13 @@
],
"homepage": "https://jan.ai",
"license": "AGPL-3.0",
"browser": "dist/index.js",
"main": "dist/index.js",
"module": "dist/node/index.cjs.js",
"typings": "dist/types/index.d.ts",
"files": [
"dist",
"types"
],
"author": "Jan <service@jan.ai>",
"exports": {
".": "./dist/index.js",
"./node": "./dist/node/index.cjs.js"
},
"typesVersions": {
"*": {
".": [
"./dist/index.js.map",
"./dist/types/index.d.ts"
],
"node": [
"./dist/node/index.cjs.js.map",
"./dist/types/node/index.d.ts"
]
}
},
"scripts": {
"lint": "tslint --project tsconfig.json -t codeFrame 'src/**/*.ts' 'test/**/*.ts'",
"test": "jest",

View File

@ -1,3 +1,6 @@
/**
* @jest-environment jsdom
*/
import { openExternalUrl } from './core'
import { joinPath } from './core'
import { openFileExplorer } from './core'
@ -25,7 +28,7 @@ describe('test core apis', () => {
},
}
const result = await joinPath(paths)
expect(globalThis.core.api.joinPath).toHaveBeenCalledWith(paths)
expect(globalThis.core.api.joinPath).toHaveBeenCalledWith({ args: paths })
expect(result).toBe('/path/one/path/two')
})
@ -37,7 +40,7 @@ describe('test core apis', () => {
},
}
const result = await openFileExplorer(path)
expect(globalThis.core.api.openFileExplorer).toHaveBeenCalledWith(path)
expect(globalThis.core.api.openFileExplorer).toHaveBeenCalledWith({ path })
expect(result).toBe('opened')
})
@ -51,20 +54,6 @@ describe('test core apis', () => {
expect(globalThis.core.api.getJanDataFolderPath).toHaveBeenCalled()
expect(result).toBe('/path/to/jan/data')
})
it('should execute function on main process', async () => {
const extension = 'testExtension'
const method = 'testMethod'
const args = ['arg1', 'arg2']
globalThis.core = {
api: {
invokeExtensionFunc: jest.fn().mockResolvedValue('result'),
},
}
const result = await executeOnMain(extension, method, ...args)
expect(globalThis.core.api.invokeExtensionFunc).toHaveBeenCalledWith(extension, method, ...args)
expect(result).toBe('result')
})
})
describe('dirName - just a pass thru api', () => {

View File

@ -13,8 +13,11 @@ const executeOnMain: (extension: string, method: string, ...args: any[]) => Prom
extension,
method,
...args
) => globalThis.core?.api?.invokeExtensionFunc(extension, method, ...args)
) => {
if ('electronAPI' in window && window.electronAPI)
return globalThis.core?.api?.invokeExtensionFunc(extension, method, ...args)
return () => {}
}
/**
* Gets Jan's data folder path.
@ -29,15 +32,15 @@ const getJanDataFolderPath = (): Promise<string> => globalThis.core.api?.getJanD
* @returns {Promise<any>} A promise that resolves when the file explorer is opened.
*/
const openFileExplorer: (path: string) => Promise<any> = (path) =>
globalThis.core.api?.openFileExplorer(path)
globalThis.core.api?.openFileExplorer({ path })
/**
* Joins multiple paths together.
* @param paths - The paths to join.
* @returns {Promise<string>} A promise that resolves with the joined path.
*/
const joinPath: (paths: string[]) => Promise<string> = (paths) =>
globalThis.core.api?.joinPath(paths)
const joinPath: (args: string[]) => Promise<string> = (args) =>
globalThis.core.api?.joinPath({ args })
/**
* Get dirname of a file path.

View File

@ -1,7 +1,5 @@
import { BaseExtension } from './extension'
import { SettingComponentProps } from '../types'
import { getJanDataFolderPath, joinPath } from './core'
import { fs } from './fs'
jest.mock('./core')
jest.mock('./fs')
@ -90,18 +88,32 @@ describe('BaseExtension', () => {
{ key: 'setting2', controllerProps: { value: 'value2' } } as any,
]
;(getJanDataFolderPath as jest.Mock).mockResolvedValue('/data')
;(joinPath as jest.Mock).mockResolvedValue('/data/settings/TestExtension')
;(fs.existsSync as jest.Mock).mockResolvedValue(false)
;(fs.mkdir as jest.Mock).mockResolvedValue(undefined)
;(fs.writeFileSync as jest.Mock).mockResolvedValue(undefined)
const localStorageMock = (() => {
let store: Record<string, string> = {}
return {
getItem: (key: string) => store[key] || null,
setItem: (key: string, value: string) => {
store[key] = value
},
removeItem: (key: string) => {
delete store[key]
},
clear: () => {
store = {}
},
}
})()
Object.defineProperty(global, 'localStorage', {
value: localStorageMock,
})
const mock = jest.spyOn(localStorage, 'setItem')
await baseExtension.registerSettings(settings)
expect(fs.mkdir).toHaveBeenCalledWith('/data/settings/TestExtension')
expect(fs.writeFileSync).toHaveBeenCalledWith(
'/data/settings/TestExtension',
JSON.stringify(settings, null, 2)
expect(mock).toHaveBeenCalledWith(
'TestExtension',
JSON.stringify(settings)
)
})
@ -125,17 +137,15 @@ describe('BaseExtension', () => {
]
jest.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings)
;(getJanDataFolderPath as jest.Mock).mockResolvedValue('/data')
;(joinPath as jest.Mock).mockResolvedValue('/data/settings/TestExtension/settings.json')
;(fs.writeFileSync as jest.Mock).mockResolvedValue(undefined)
const mockSetItem = jest.spyOn(localStorage, 'setItem')
await baseExtension.updateSettings([
{ key: 'setting1', controllerProps: { value: 'newValue' } } as any,
])
expect(fs.writeFileSync).toHaveBeenCalledWith(
'/data/settings/TestExtension/settings.json',
JSON.stringify([{ key: 'setting1', controllerProps: { value: 'newValue' } }], null, 2)
expect(mockSetItem).toHaveBeenCalledWith(
'TestExtension',
JSON.stringify([{ key: 'setting1', controllerProps: { value: 'newValue' } }])
)
})
})

View File

@ -1,7 +1,4 @@
import { Model, ModelEvent, SettingComponentProps } from '../types'
import { getJanDataFolderPath, joinPath } from './core'
import { events } from './events'
import { fs } from './fs'
import { Model, SettingComponentProps } from '../types'
import { ModelManager } from './models'
export enum ExtensionTypeEnum {
@ -117,22 +114,14 @@ export abstract class BaseExtension implements ExtensionType {
return
}
const extensionSettingFolderPath = await joinPath([
await getJanDataFolderPath(),
'settings',
this.name,
])
settings.forEach((setting) => {
setting.extensionName = this.name
})
try {
if (!(await fs.existsSync(extensionSettingFolderPath)))
await fs.mkdir(extensionSettingFolderPath)
const settingFilePath = await joinPath([extensionSettingFolderPath, this.settingFileName])
const oldSettingsJson = localStorage.getItem(this.name)
// Persists new settings
if (await fs.existsSync(settingFilePath)) {
const oldSettings = JSON.parse(await fs.readFileSync(settingFilePath, 'utf-8'))
if (oldSettingsJson) {
const oldSettings = JSON.parse(oldSettingsJson)
settings.forEach((setting) => {
// Keep setting value
if (setting.controllerProps && Array.isArray(oldSettings))
@ -141,7 +130,7 @@ export abstract class BaseExtension implements ExtensionType {
)?.controllerProps?.value
})
}
await fs.writeFileSync(settingFilePath, JSON.stringify(settings, null, 2))
localStorage.setItem(this.name, JSON.stringify(settings))
} catch (err) {
console.error(err)
}
@ -180,17 +169,10 @@ export abstract class BaseExtension implements ExtensionType {
async getSettings(): Promise<SettingComponentProps[]> {
if (!this.name) return []
const settingPath = await joinPath([
await getJanDataFolderPath(),
this.settingFolderName,
this.name,
this.settingFileName,
])
try {
if (!(await fs.existsSync(settingPath))) return []
const content = await fs.readFileSync(settingPath, 'utf-8')
const settings: SettingComponentProps[] = JSON.parse(content)
const settingsString = localStorage.getItem(this.name)
if (!settingsString) return []
const settings: SettingComponentProps[] = JSON.parse(settingsString)
return settings
} catch (err) {
console.warn(err)
@ -220,19 +202,7 @@ export abstract class BaseExtension implements ExtensionType {
if (!updatedSettings.length) updatedSettings = componentProps as SettingComponentProps[]
const settingFolder = await joinPath([
await getJanDataFolderPath(),
this.settingFolderName,
this.name,
])
if (!(await fs.existsSync(settingFolder))) {
await fs.mkdir(settingFolder)
}
const settingPath = await joinPath([settingFolder, this.settingFileName])
await fs.writeFileSync(settingPath, JSON.stringify(updatedSettings, null, 2))
localStorage.setItem(this.name, JSON.stringify(updatedSettings))
updatedSettings.forEach((setting) => {
this.onSettingUpdate<typeof setting.controllerProps.value>(

View File

@ -31,21 +31,21 @@ export abstract class AIEngine extends BaseExtension {
/**
* Loads the model.
*/
async loadModel(model: Model): Promise<any> {
if (model.engine.toString() !== this.provider) return Promise.resolve()
async loadModel(model: Partial<Model>, abortController?: AbortController): Promise<any> {
if (model?.engine?.toString() !== this.provider) return Promise.resolve()
events.emit(ModelEvent.OnModelReady, model)
return Promise.resolve()
}
/**
* Stops the model.
*/
async unloadModel(model?: Model): Promise<any> {
async unloadModel(model?: Partial<Model>): Promise<any> {
if (model?.engine && model.engine.toString() !== this.provider) return Promise.resolve()
events.emit(ModelEvent.OnModelStopped, model ?? {})
return Promise.resolve()
}
/*
/**
* Inference request
*/
inference(data: MessageRequest) {}

View File

@ -1,4 +1,3 @@
import { InferenceEngine } from '../../../types'
import { AIEngine } from './AIEngine'
/**
@ -6,6 +5,7 @@ import { AIEngine } from './AIEngine'
*/
export class EngineManager {
public engines = new Map<string, AIEngine>()
public controller: AbortController | null = null
/**
* Registers an engine.
@ -21,22 +21,6 @@ export class EngineManager {
* @returns The engine, if found.
*/
get<T extends AIEngine>(provider: string): T | undefined {
// Backward compatible provider
// nitro is migrated to cortex
if (
[
InferenceEngine.nitro,
InferenceEngine.cortex,
InferenceEngine.cortex_llamacpp,
InferenceEngine.cortex_onnx,
InferenceEngine.cortex_tensorrtllm,
InferenceEngine.cortex_onnx,
]
.map((e) => e.toString())
.includes(provider)
)
provider = InferenceEngine.cortex
return this.engines.get(provider) as T | undefined
}

View File

@ -29,7 +29,7 @@ export abstract class LocalOAIEngine extends OAIEngine {
/**
* Load the model.
*/
override async loadModel(model: Model & { file_path?: string }): Promise<void> {
override async loadModel(model: Model & { file_path?: string }, abortController?: AbortController): Promise<void> {
if (model.engine.toString() !== this.provider) return
const modelFolder = 'file_path' in model && model.file_path ? await dirName(model.file_path) : await this.getModelFilePath(model.id)
const systemInfo = await systemInformation()

View File

@ -12,11 +12,7 @@ import {
ChatCompletionRole,
ContentType,
} from '../../../types'
import { requestInference } from './helpers/sse'
import { ulid } from 'ulidx'
jest.mock('./helpers/sse')
jest.mock('ulidx')
jest.mock('../../events')
class TestOAIEngine extends OAIEngine {
@ -48,79 +44,6 @@ describe('OAIEngine', () => {
)
})
it('should handle inference request', async () => {
const data: MessageRequest = {
model: { engine: 'test-provider', id: 'test-model' } as any,
threadId: 'test-thread',
type: MessageRequestType.Thread,
assistantId: 'test-assistant',
messages: [{ role: ChatCompletionRole.User, content: 'Hello' }],
}
;(ulid as jest.Mock).mockReturnValue('test-id')
;(requestInference as jest.Mock).mockReturnValue({
subscribe: ({ next, complete }: any) => {
next('test response')
complete()
},
})
await engine.inference(data)
expect(requestInference).toHaveBeenCalledWith(
'http://test-inference-url',
expect.objectContaining({ model: 'test-model' }),
expect.any(Object),
expect.any(AbortController),
{ Authorization: 'Bearer test-token' },
undefined
)
expect(events.emit).toHaveBeenCalledWith(
MessageEvent.OnMessageResponse,
expect.objectContaining({ id: 'test-id' })
)
expect(events.emit).toHaveBeenCalledWith(
MessageEvent.OnMessageUpdate,
expect.objectContaining({
content: [
{
type: ContentType.Text,
text: { value: 'test response', annotations: [] },
},
],
status: MessageStatus.Ready,
})
)
})
it('should handle inference error', async () => {
const data: MessageRequest = {
model: { engine: 'test-provider', id: 'test-model' } as any,
threadId: 'test-thread',
type: MessageRequestType.Thread,
assistantId: 'test-assistant',
messages: [{ role: ChatCompletionRole.User, content: 'Hello' }],
}
;(ulid as jest.Mock).mockReturnValue('test-id')
;(requestInference as jest.Mock).mockReturnValue({
subscribe: ({ error }: any) => {
error({ message: 'test error', code: 500 })
},
})
await engine.inference(data)
expect(events.emit).toHaveBeenLastCalledWith(
MessageEvent.OnMessageUpdate,
expect.objectContaining({
status: 'error',
error_code: 500,
})
)
})
it('should stop inference', () => {
engine.stopInference()
expect(engine.isCancelled).toBe(true)

View File

@ -1,18 +1,9 @@
import { requestInference } from './helpers/sse'
import { ulid } from 'ulidx'
import { AIEngine } from './AIEngine'
import {
ChatCompletionRole,
ContentType,
InferenceEvent,
MessageEvent,
MessageRequest,
MessageRequestType,
MessageStatus,
Model,
ModelInfo,
ThreadContent,
ThreadMessage,
} from '../../../types'
import { events } from '../../events'
@ -53,111 +44,6 @@ export abstract class OAIEngine extends AIEngine {
*/
override onUnload(): void {}
/*
* Inference request
*/
override async inference(data: MessageRequest) {
if (!data.model?.id) {
events.emit(MessageEvent.OnMessageResponse, {
status: MessageStatus.Error,
content: [
{
type: ContentType.Text,
text: {
value: 'No model ID provided',
annotations: [],
},
},
],
})
return
}
const timestamp = Date.now() / 1000
const message: ThreadMessage = {
id: ulid(),
thread_id: data.threadId,
type: data.type,
assistant_id: data.assistantId,
role: ChatCompletionRole.Assistant,
content: [],
status: MessageStatus.Pending,
created_at: timestamp,
completed_at: timestamp,
object: 'thread.message',
}
if (data.type !== MessageRequestType.Summary) {
events.emit(MessageEvent.OnMessageResponse, message)
}
this.isCancelled = false
this.controller = new AbortController()
const model: ModelInfo = {
...(this.loadedModel ? this.loadedModel : {}),
...data.model,
}
const header = await this.headers()
let requestBody = {
messages: data.messages ?? [],
model: model.id,
stream: true,
...model.parameters,
}
if (this.transformPayload) {
requestBody = this.transformPayload(requestBody)
}
requestInference(
this.inferenceUrl,
requestBody,
model,
this.controller,
header,
this.transformResponse
).subscribe({
next: (content: any) => {
const messageContent: ThreadContent = {
type: ContentType.Text,
text: {
value: content.trim(),
annotations: [],
},
}
message.content = [messageContent]
events.emit(MessageEvent.OnMessageUpdate, message)
},
complete: async () => {
message.status = message.content.length
? MessageStatus.Ready
: MessageStatus.Error
events.emit(MessageEvent.OnMessageUpdate, message)
},
error: async (err: any) => {
if (this.isCancelled || message.content.length) {
message.status = MessageStatus.Stopped
events.emit(MessageEvent.OnMessageUpdate, message)
return
}
message.status = MessageStatus.Error
message.content[0] = {
type: ContentType.Text,
text: {
value:
typeof message === 'string'
? err.message
: (JSON.stringify(err.message) ?? err.detail),
annotations: [],
},
}
message.error_code = err.code
events.emit(MessageEvent.OnMessageUpdate, message)
},
})
}
/**
* Stops the inference.
*/

View File

@ -1,146 +0,0 @@
import { lastValueFrom, Observable } from 'rxjs'
import { requestInference } from './sse'
import { ReadableStream } from 'stream/web'
describe('requestInference', () => {
it('should send a request to the inference server and return an Observable', () => {
// Mock the fetch function
const mockFetch: any = jest.fn(() =>
Promise.resolve({
ok: true,
json: () =>
Promise.resolve({
choices: [{ message: { content: 'Generated response' } }],
}),
headers: new Headers(),
redirected: false,
status: 200,
statusText: 'OK',
// Add other required properties here
})
)
jest.spyOn(global, 'fetch').mockImplementation(mockFetch)
// Define the test inputs
const inferenceUrl = 'https://inference-server.com'
const requestBody = { message: 'Hello' }
const model = { id: 'model-id', parameters: { stream: false } }
// Call the function
const result = requestInference(inferenceUrl, requestBody, model)
// Assert the expected behavior
expect(result).toBeInstanceOf(Observable)
expect(lastValueFrom(result)).resolves.toEqual('Generated response')
})
it('returns 401 error', () => {
// Mock the fetch function
const mockFetch: any = jest.fn(() =>
Promise.resolve({
ok: false,
json: () =>
Promise.resolve({
error: { message: 'Invalid API Key.', code: 'invalid_api_key' },
}),
headers: new Headers(),
redirected: false,
status: 401,
statusText: 'invalid_api_key',
// Add other required properties here
})
)
jest.spyOn(global, 'fetch').mockImplementation(mockFetch)
// Define the test inputs
const inferenceUrl = 'https://inference-server.com'
const requestBody = { message: 'Hello' }
const model = { id: 'model-id', parameters: { stream: false } }
// Call the function
const result = requestInference(inferenceUrl, requestBody, model)
// Assert the expected behavior
expect(result).toBeInstanceOf(Observable)
expect(lastValueFrom(result)).rejects.toEqual({
message: 'Invalid API Key.',
code: 'invalid_api_key',
})
})
})
it('should handle a successful response with a transformResponse function', () => {
// Mock the fetch function
const mockFetch: any = jest.fn(() =>
Promise.resolve({
ok: true,
json: () =>
Promise.resolve({
choices: [{ message: { content: 'Generated response' } }],
}),
headers: new Headers(),
redirected: false,
status: 200,
statusText: 'OK',
})
)
jest.spyOn(global, 'fetch').mockImplementation(mockFetch)
// Define the test inputs
const inferenceUrl = 'https://inference-server.com'
const requestBody = { message: 'Hello' }
const model = { id: 'model-id', parameters: { stream: false } }
const transformResponse = (data: any) =>
data.choices[0].message.content.toUpperCase()
// Call the function
const result = requestInference(
inferenceUrl,
requestBody,
model,
undefined,
undefined,
transformResponse
)
// Assert the expected behavior
expect(result).toBeInstanceOf(Observable)
expect(lastValueFrom(result)).resolves.toEqual('GENERATED RESPONSE')
})
it('should handle a successful response with streaming enabled', () => {
// Mock the fetch function
const mockFetch: any = jest.fn(() =>
Promise.resolve({
ok: true,
body: new ReadableStream({
start(controller) {
controller.enqueue(
new TextEncoder().encode(
'data: {"choices": [{"delta": {"content": "Streamed"}}]}'
)
)
controller.enqueue(new TextEncoder().encode('data: [DONE]'))
controller.close()
},
}),
headers: new Headers(),
redirected: false,
status: 200,
statusText: 'OK',
})
)
jest.spyOn(global, 'fetch').mockImplementation(mockFetch)
// Define the test inputs
const inferenceUrl = 'https://inference-server.com'
const requestBody = { message: 'Hello' }
const model = { id: 'model-id', parameters: { stream: true } }
// Call the function
const result = requestInference(inferenceUrl, requestBody, model)
// Assert the expected behavior
expect(result).toBeInstanceOf(Observable)
expect(lastValueFrom(result)).resolves.toEqual('Streamed')
})

View File

@ -1,132 +0,0 @@
import { Observable } from 'rxjs'
import { ErrorCode, ModelRuntimeParams } from '../../../../types'
/**
* Sends a request to the inference server to generate a response based on the recent messages.
* @param recentMessages - An array of recent messages to use as context for the inference.
* @returns An Observable that emits the generated response as a string.
*/
export function requestInference(
inferenceUrl: string,
requestBody: any,
model: {
id: string
parameters?: ModelRuntimeParams
},
controller?: AbortController,
headers?: HeadersInit,
transformResponse?: Function
): Observable<string> {
return new Observable((subscriber) => {
fetch(inferenceUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
'Accept': model.parameters?.stream
? 'text/event-stream'
: 'application/json',
...headers,
},
body: JSON.stringify(requestBody),
signal: controller?.signal,
})
.then(async (response) => {
if (!response.ok) {
if (response.status === 401) {
throw {
code: ErrorCode.InvalidApiKey,
message: 'Invalid API Key.',
}
}
let data = await response.json()
try {
handleError(data)
} catch (err) {
subscriber.error(err)
return
}
}
// There could be overriden stream parameter in the model
// that is set in request body (transformed payload)
if (
requestBody?.stream === false ||
model.parameters?.stream === false
) {
const data = await response.json()
try {
handleError(data)
} catch (err) {
subscriber.error(err)
return
}
if (transformResponse) {
subscriber.next(transformResponse(data))
} else {
subscriber.next(
data.choices
? data.choices[0]?.message?.content
: (data.content[0]?.text ?? '')
)
}
} else {
const stream = response.body
const decoder = new TextDecoder('utf-8')
const reader = stream?.getReader()
let content = ''
while (true && reader) {
const { done, value } = await reader.read()
if (done) {
break
}
const text = decoder.decode(value)
const lines = text.trim().split('\n')
let cachedLines = ''
for (const line of lines) {
try {
if (transformResponse) {
content += transformResponse(line)
subscriber.next(content ?? '')
} else {
const toParse = cachedLines + line
if (!line.includes('data: [DONE]')) {
const data = JSON.parse(toParse.replace('data: ', ''))
try {
handleError(data)
} catch (err) {
subscriber.error(err)
return
}
content += data.choices[0]?.delta?.content ?? ''
if (content.startsWith('assistant: ')) {
content = content.replace('assistant: ', '')
}
if (content !== '') subscriber.next(content)
}
}
} catch {
cachedLines = line
}
}
}
}
subscriber.complete()
})
.catch((err) => subscriber.error(err))
})
}
/**
* Handle error and normalize it to a common format.
* @param data
*/
const handleError = (data: any) => {
if (
data.error ||
data.message ||
data.detail ||
(Array.isArray(data) && data.length && data[0].error)
) {
throw data.error ?? data[0]?.error ?? data
}
}

View File

@ -1,5 +1,4 @@
import {
InferenceEngine,
Engines,
EngineVariant,
EngineReleased,
@ -28,7 +27,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
* @param name - Inference engine name.
* @returns A Promise that resolves to an array of installed engine.
*/
abstract getInstalledEngines(name: InferenceEngine): Promise<EngineVariant[]>
abstract getInstalledEngines(name: string): Promise<EngineVariant[]>
/**
* @param name - Inference engine name.
@ -37,7 +36,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
* @returns A Promise that resolves to an array of latest released engine by version.
*/
abstract getReleasedEnginesByVersion(
name: InferenceEngine,
name: string,
version: string,
platform?: string
): Promise<EngineReleased[]>
@ -48,7 +47,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
* @returns A Promise that resolves to an array of latest released engine.
*/
abstract getLatestReleasedEngine(
name: InferenceEngine,
name: string,
platform?: string
): Promise<EngineReleased[]>
@ -74,7 +73,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
* @returns A Promise that resolves to unintall of engine.
*/
abstract uninstallEngine(
name: InferenceEngine,
name: string,
engineConfig: EngineConfig
): Promise<{ messages: string }>
@ -83,7 +82,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
* @returns A Promise that resolves to an object of default engine.
*/
abstract getDefaultEngineVariant(
name: InferenceEngine
name: string
): Promise<DefaultEngineVariant>
/**
@ -92,7 +91,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
* @returns A Promise that resolves to set default engine.
*/
abstract setDefaultEngineVariant(
name: InferenceEngine,
name: string,
engineConfig: EngineConfig
): Promise<{ messages: string }>
@ -100,7 +99,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
* @returns A Promise that resolves to update engine.
*/
abstract updateEngine(
name: InferenceEngine,
name: string,
engineConfig?: EngineConfig
): Promise<{ messages: string }>
@ -112,5 +111,5 @@ export abstract class EngineManagementExtension extends BaseExtension {
/**
* @returns A Promise that resolves to an object of remote models list .
*/
abstract getRemoteModels(name: InferenceEngine | string): Promise<any>
abstract getRemoteModels(name: string): Promise<any>
}

View File

@ -19,7 +19,7 @@ export abstract class HardwareManagementExtension extends BaseExtension {
/**
* @returns A Promise that resolves to an object of set active gpus.
*/
abstract setAvtiveGpu(data: { gpus: number[] }): Promise<{
abstract setActiveGpu(data: { gpus: number[] }): Promise<{
message: string
activated_gpus: number[]
}>

View File

@ -36,31 +36,31 @@ describe('fs module', () => {
it('should call readFileSync with correct arguments', () => {
const args = ['path/to/file']
fs.readFileSync(...args)
expect(globalThis.core.api.readFileSync).toHaveBeenCalledWith(...args)
expect(globalThis.core.api.readFileSync).toHaveBeenCalledWith({ args })
})
it('should call existsSync with correct arguments', () => {
const args = ['path/to/file']
fs.existsSync(...args)
expect(globalThis.core.api.existsSync).toHaveBeenCalledWith(...args)
expect(globalThis.core.api.existsSync).toHaveBeenCalledWith({ args })
})
it('should call readdirSync with correct arguments', () => {
const args = ['path/to/directory']
fs.readdirSync(...args)
expect(globalThis.core.api.readdirSync).toHaveBeenCalledWith(...args)
expect(globalThis.core.api.readdirSync).toHaveBeenCalledWith({ args })
})
it('should call mkdir with correct arguments', () => {
const args = ['path/to/directory']
fs.mkdir(...args)
expect(globalThis.core.api.mkdir).toHaveBeenCalledWith(...args)
expect(globalThis.core.api.mkdir).toHaveBeenCalledWith({ args })
})
it('should call rm with correct arguments', () => {
const args = ['path/to/directory']
fs.rm(...args)
expect(globalThis.core.api.rm).toHaveBeenCalledWith(...args, { recursive: true, force: true })
expect(globalThis.core.api.rm).toHaveBeenCalledWith({ args })
})
it('should call unlinkSync with correct arguments', () => {

View File

@ -4,7 +4,7 @@ import { FileStat } from '../types'
* Writes data to a file at the specified path.
* @returns {Promise<any>} A Promise that resolves when the file is written successfully.
*/
const writeFileSync = (...args: any[]) => globalThis.core.api?.writeFileSync(...args)
const writeFileSync = (...args: any[]) => globalThis.core.api?.writeFileSync({ args })
/**
* Writes blob data to a file at the specified path.
@ -19,29 +19,29 @@ const writeBlob: (path: string, data: string) => Promise<any> = (path, data) =>
* Reads the contents of a file at the specified path.
* @returns {Promise<any>} A Promise that resolves with the contents of the file.
*/
const readFileSync = (...args: any[]) => globalThis.core.api?.readFileSync(...args)
const readFileSync = (...args: any[]) => globalThis.core.api?.readFileSync({ args })
/**
* Check whether the file exists
* @param {string} path
* @returns {boolean} A boolean indicating whether the path is a file.
*/
const existsSync = (...args: any[]) => globalThis.core.api?.existsSync(...args)
const existsSync = (...args: any[]) => globalThis.core.api?.existsSync({ args })
/**
* List the directory files
* @returns {Promise<any>} A Promise that resolves with the contents of the directory.
*/
const readdirSync = (...args: any[]) => globalThis.core.api?.readdirSync(...args)
const readdirSync = (...args: any[]) => globalThis.core.api?.readdirSync({ args })
/**
* Creates a directory at the specified path.
* @returns {Promise<any>} A Promise that resolves when the directory is created successfully.
*/
const mkdir = (...args: any[]) => globalThis.core.api?.mkdir(...args)
const mkdir = (...args: any[]) => globalThis.core.api?.mkdir({ args })
/**
* Removes a directory at the specified path.
* @returns {Promise<any>} A Promise that resolves when the directory is removed successfully.
*/
const rm = (...args: any[]) => globalThis.core.api?.rm(...args, { recursive: true, force: true })
const rm = (...args: any[]) => globalThis.core.api?.rm({ args })
/**
* Deletes a file from the local file system.
@ -80,10 +80,8 @@ const getGgufFiles: (paths: string[]) => Promise<any> = (paths) =>
* @param outsideJanDataFolder - Whether the file is outside the Jan data folder.
* @returns {Promise<FileStat>} - A promise that resolves with the file's stats.
*/
const fileStat: (path: string, outsideJanDataFolder?: boolean) => Promise<FileStat | undefined> = (
path,
outsideJanDataFolder
) => globalThis.core.api?.fileStat(path, outsideJanDataFolder)
const fileStat: (path: string) => Promise<FileStat | undefined> = (path) =>
globalThis.core.api?.fileStat({ args: path })
// TODO: Export `dummy` fs functions automatically
// Currently adding these manually

View File

@ -3,7 +3,6 @@ import * as Events from './events'
import * as FileSystem from './fs'
import * as Extension from './extension'
import * as Extensions from './extensions'
import * as Tools from './tools'
import * as Models from './models'
describe('Module Tests', () => {
@ -27,10 +26,6 @@ describe('Module Tests', () => {
expect(Extensions).toBeDefined()
})
it('should export all base tools', () => {
expect(Tools).toBeDefined()
})
it('should export all base tools', () => {
expect(Models).toBeDefined()
})

View File

@ -28,12 +28,6 @@ export * from './extension'
*/
export * from './extensions'
/**
* Export all base tools.
* @module
*/
export * from './tools'
/**
* Export all base models.
* @module

View File

@ -38,10 +38,13 @@ export class ModelManager {
return this.models.get(id) as T | undefined
}
/**
* The instance of the tool manager.
* Shared instance of ExtensionManager.
*/
static instance(): ModelManager {
return (window.core?.modelManager as ModelManager) ?? new ModelManager()
static instance() {
if (!window.core.modelManager)
window.core.modelManager = new ModelManager()
return window.core.modelManager as ModelManager
}
}

View File

@ -1,5 +0,0 @@
it('should not throw any errors when imported', () => {
expect(() => require('./index')).not.toThrow();
})

View File

@ -1,2 +0,0 @@
export * from './manager'
export * from './tool'

View File

@ -1,47 +0,0 @@
import { AssistantTool, MessageRequest } from '../../types'
import { InferenceTool } from './tool'
/**
* Manages the registration and retrieval of inference tools.
*/
export class ToolManager {
public tools = new Map<string, InferenceTool>()
/**
* Registers a tool.
* @param tool - The tool to register.
*/
register<T extends InferenceTool>(tool: T) {
this.tools.set(tool.name, tool)
}
/**
* Retrieves a tool by it's name.
* @param name - The name of the tool to retrieve.
* @returns The tool, if found.
*/
get<T extends InferenceTool>(name: string): T | undefined {
return this.tools.get(name) as T | undefined
}
/*
** Process the message request with the tools.
*/
process(request: MessageRequest, tools: AssistantTool[]): Promise<MessageRequest> {
return tools.reduce((prevPromise, currentTool) => {
return prevPromise.then((prevResult) => {
return currentTool.enabled
? this.get(currentTool.type)?.process(prevResult, currentTool) ??
Promise.resolve(prevResult)
: Promise.resolve(prevResult)
})
}, Promise.resolve(request))
}
/**
* The instance of the tool manager.
*/
static instance(): ToolManager {
return (window.core?.toolManager as ToolManager) ?? new ToolManager()
}
}

View File

@ -1,63 +0,0 @@
import { ToolManager } from '../../browser/tools/manager'
import { InferenceTool } from '../../browser/tools/tool'
import { AssistantTool, MessageRequest } from '../../types'
class MockInferenceTool implements InferenceTool {
name = 'mockTool'
process(request: MessageRequest, tool: AssistantTool): Promise<MessageRequest> {
return Promise.resolve(request)
}
}
it('should register a tool', () => {
const manager = new ToolManager()
const tool = new MockInferenceTool()
manager.register(tool)
expect(manager.get(tool.name)).toBe(tool)
})
it('should retrieve a tool by its name', () => {
const manager = new ToolManager()
const tool = new MockInferenceTool()
manager.register(tool)
const retrievedTool = manager.get(tool.name)
expect(retrievedTool).toBe(tool)
})
it('should return undefined for a non-existent tool', () => {
const manager = new ToolManager()
const retrievedTool = manager.get('nonExistentTool')
expect(retrievedTool).toBeUndefined()
})
it('should process the message request with enabled tools', async () => {
const manager = new ToolManager()
const tool = new MockInferenceTool()
manager.register(tool)
const request: MessageRequest = { message: 'test' } as any
const tools: AssistantTool[] = [{ type: 'mockTool', enabled: true }] as any
const result = await manager.process(request, tools)
expect(result).toBe(request)
})
it('should skip processing for disabled tools', async () => {
const manager = new ToolManager()
const tool = new MockInferenceTool()
manager.register(tool)
const request: MessageRequest = { message: 'test' } as any
const tools: AssistantTool[] = [{ type: 'mockTool', enabled: false }] as any
const result = await manager.process(request, tools)
expect(result).toBe(request)
})
it('should throw an error when process is called without implementation', () => {
class TestTool extends InferenceTool {
name = 'testTool'
}
const tool = new TestTool()
expect(() => tool.process({} as MessageRequest)).toThrowError()
})

View File

@ -1,12 +0,0 @@
import { AssistantTool, MessageRequest } from '../../types'
/**
* Represents a base inference tool.
*/
export abstract class InferenceTool {
abstract name: string
/*
** Process a message request and return the processed message request.
*/
abstract process(request: MessageRequest, tool?: AssistantTool): Promise<MessageRequest>
}

View File

@ -8,6 +8,7 @@ import {
normalizeFilePath,
getJanDataFolderPath,
} from '../../helper'
import { readdirSync, readFileSync } from 'fs'
export class App implements Processor {
observer?: Function
@ -25,8 +26,8 @@ export class App implements Processor {
/**
* Joins multiple paths together, respect to the current OS.
*/
joinPath(args: any[]) {
return join(...args)
joinPath(args: any) {
return join(...('args' in args ? args.args : args))
}
/**
@ -69,6 +70,9 @@ export class App implements Processor {
writeLog(args)
}
/**
* Get app configurations.
*/
getAppConfigurations() {
return appConfiguration()
}

View File

@ -21,18 +21,21 @@ export class FileSystem implements Processor {
return import(FileSystem.moduleName).then((mdl) =>
mdl[route](
...args.map((arg: any, index: number) => {
if(index !== 0) {
const arg0 = args[0]
if ('args' in arg0) arg = arg0.args
if (Array.isArray(arg)) arg = arg[0]
if (index !== 0) {
return arg
}
if (index === 0 && typeof arg !== 'string') {
throw new Error(`Invalid argument ${JSON.stringify(args)}`)
}
const path =
(arg.startsWith(`file:/`) || arg.startsWith(`file:\\`))
? join(getJanDataFolderPath(), normalizeFilePath(arg))
: arg
arg.startsWith(`file:/`) || arg.startsWith(`file:\\`)
? join(getJanDataFolderPath(), normalizeFilePath(arg))
: arg
if(path.startsWith(`http://`) || path.startsWith(`https://`)) {
if (path.startsWith(`http://`) || path.startsWith(`https://`)) {
return path
}
const absolutePath = resolve(path)
@ -88,5 +91,4 @@ export class FileSystem implements Processor {
})
})
}
}

View File

@ -94,8 +94,6 @@ export default class Extension {
`Package ${this.origin} does not contain a valid manifest: ${error}`
)
}
return true
}
/**

View File

@ -18,9 +18,7 @@ export const getAppConfigurations = (): AppConfiguration => {
if (!fs.existsSync(configurationFile)) {
// create default app config if we don't have one
console.debug(
`App config not found, creating default config at ${configurationFile}`
)
console.debug(`App config not found, creating default config at ${configurationFile}`)
fs.writeFileSync(configurationFile, JSON.stringify(appDefaultConfiguration))
return appDefaultConfiguration
}
@ -31,28 +29,23 @@ export const getAppConfigurations = (): AppConfiguration => {
)
return appConfigurations
} catch (err) {
console.error(
`Failed to read app config, return default config instead! Err: ${err}`
)
console.error(`Failed to read app config, return default config instead! Err: ${err}`)
return defaultAppConfig()
}
}
const getConfigurationFilePath = () =>
join(
global.core?.appPath() ||
process.env[process.platform == 'win32' ? 'USERPROFILE' : 'HOME'],
global.core?.appPath() || process.env[process.platform == 'win32' ? 'USERPROFILE' : 'HOME'],
configurationFileName
)
export const updateAppConfiguration = (
export const updateAppConfiguration = ({
configuration,
}: {
configuration: AppConfiguration
): Promise<void> => {
}): Promise<void> => {
const configurationFile = getConfigurationFilePath()
console.debug(
'updateAppConfiguration, configurationFile: ',
configurationFile
)
fs.writeFileSync(configurationFile, JSON.stringify(configuration))
return Promise.resolve()
@ -87,14 +80,11 @@ export const getJanExtensionsPath = (): string => {
*/
export const defaultAppConfig = (): AppConfiguration => {
const { app } = require('electron')
const defaultJanDataFolder = join(
app?.getPath('userData') ?? os?.homedir() ?? '',
'data'
)
const defaultJanDataFolder = join(app?.getPath('userData') ?? os?.homedir() ?? '', 'data')
return {
data_folder:
process.env.CI === 'e2e'
? (process.env.APP_CONFIG_PATH ?? resolve('./test-data'))
? process.env.APP_CONFIG_PATH ?? resolve('./test-data')
: defaultJanDataFolder,
quick_ask: false,
}

View File

@ -148,10 +148,7 @@ export const CoreRoutes = [
]
export const APIRoutes = [...CoreRoutes, ...Object.values(NativeRoute)]
export const APIEvents = [
...Object.values(AppEvent),
...Object.values(DownloadEvent),
]
export const APIEvents = [...Object.values(AppEvent), ...Object.values(DownloadEvent)]
export type PayloadType = {
messages: ChatCompletionMessage[]
model: string

View File

@ -1,7 +1,5 @@
import { InferenceEngine } from '../../types'
export type Engines = {
[key in InferenceEngine]: (EngineVariant & EngineConfig)[]
[key: string]: (EngineVariant & EngineConfig)[]
}
export type EngineMetadata = {
@ -22,13 +20,13 @@ export type EngineMetadata = {
}
export type EngineVariant = {
engine: InferenceEngine
engine: string
name: string
version: string
}
export type DefaultEngineVariant = {
engine: InferenceEngine
engine: string
variant: string
version: string
}

View File

@ -7,6 +7,7 @@ export enum ChatCompletionRole {
System = 'system',
Assistant = 'assistant',
User = 'user',
Tool = 'tool',
}
/**
@ -18,6 +19,9 @@ export type ChatCompletionMessage = {
content?: ChatCompletionMessageContent
/** The role of the author of this message. **/
role: ChatCompletionRole
type?: string
output?: string
tool_call_id?: string
}
export type ChatCompletionMessageContent =

View File

@ -36,6 +36,8 @@ export type ThreadMessage = {
type?: string
/** The error code which explain what error type. Used in conjunction with MessageStatus.Error */
error_code?: ErrorCode
tool_call_id?: string
}
/**
@ -43,6 +45,9 @@ export type ThreadMessage = {
* @data_transfer_object
*/
export type MessageRequest = {
/**
* The id of the message request.
*/
id?: string
/**
@ -71,6 +76,11 @@ export type MessageRequest = {
// TODO: deprecate threadId field
thread?: Thread
/**
* ChatCompletion tools
*/
tools?: MessageTool[]
/** Engine name to process */
engine?: string
@ -78,6 +88,24 @@ export type MessageRequest = {
type?: string
}
/**
* ChatCompletion Tool parameters
*/
export type MessageTool = {
type: string
function: MessageFunction
}
/**
* ChatCompletion Tool's function parameters
*/
export type MessageFunction = {
name: string
description?: string
parameters?: Record<string, unknown>
strict?: boolean
}
/**
* The status of the message.
* @data_transfer_object

View File

@ -6,29 +6,7 @@ export type ModelInfo = {
id: string
settings?: ModelSettingParams
parameters?: ModelRuntimeParams
engine?: InferenceEngine
}
/**
* Represents the inference engine.
* @stored
*/
export enum InferenceEngine {
anthropic = 'anthropic',
mistral = 'mistral',
martian = 'martian',
openrouter = 'openrouter',
nitro = 'nitro',
openai = 'openai',
groq = 'groq',
triton_trtllm = 'triton_trtllm',
nitro_tensorrt_llm = 'nitro-tensorrt-llm',
cohere = 'cohere',
nvidia = 'nvidia',
cortex = 'cortex',
cortex_llamacpp = 'llama-cpp',
cortex_onnx = 'onnxruntime',
cortex_tensorrtllm = 'tensorrt-llm',
engine?: string
}
// Represents an artifact of a model, including its filename and URL
@ -105,7 +83,7 @@ export type Model = {
/**
* The model engine.
*/
engine: InferenceEngine
engine: string
}
// Represents metadata associated with a model

View File

@ -27,8 +27,8 @@ export type Thread = {
* @stored
*/
export type ThreadAssistantInfo = {
assistant_id: string
assistant_name: string
id: string
name: string
model: ModelInfo
instructions?: string
tools?: AssistantTool[]

View File

@ -1,46 +0,0 @@
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
plugins: ['@typescript-eslint'],
env: {
node: true,
},
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'plugin:react/recommended',
],
rules: {
'react/prop-types': 'off', // In favor of strong typing - no need to dedupe
'react/no-is-mounted': 'off',
'@typescript-eslint/no-non-null-assertion': 'off',
'@typescript-eslint/no-var-requires': 'off',
'@typescript-eslint/ban-ts-comment': 'off',
'@typescript-eslint/no-unused-vars': 'off',
'@typescript-eslint/no-explicit-any': 'off',
},
settings: {
react: {
createClass: 'createReactClass', // Regex for Component Factory to use,
// default to "createReactClass"
pragma: 'React', // Pragma to use, default to "React"
version: 'detect', // React version. "detect" automatically picks the version you have installed.
// You can also use `16.0`, `16.3`, etc, if you want to override the detected value.
// default to latest and warns if missing
// It will default to "detect" in the future
},
linkComponents: [
// Components used as alternatives to <a> for linking, eg. <Link to={ url } />
'Hyperlink',
{ name: 'Link', linkAttribute: 'to' },
],
},
ignorePatterns: [
'build',
'renderer',
'node_modules',
'@global',
'playwright-report',
'test-data',
],
}

View File

@ -1,10 +0,0 @@
export {}
declare global {
namespace NodeJS {
interface Global {
core: any
}
}
var core: any | undefined
}

View File

@ -1,14 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-jit</key>
<true/>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
</dict>
</plist>

View File

@ -1,20 +0,0 @@
import { Handler, RequestHandler } from '@janhq/core/node'
import { ipcMain } from 'electron'
import { windowManager } from '../managers/window'
export function injectHandler() {
const ipcWrapper: Handler = (
route: string,
listener: (...args: any[]) => any
) =>
ipcMain.handle(route, async (_event, ...args: any[]) => {
return listener(...args)
})
const handler = new RequestHandler(
ipcWrapper,
(channel: string, args: any) =>
windowManager.mainWindow?.webContents.send(channel, args)
)
handler.handle()
}

View File

@ -1,327 +0,0 @@
import { app, ipcMain, dialog, shell, nativeTheme } from 'electron'
import { autoUpdater } from 'electron-updater'
import { join } from 'path'
import { windowManager } from '../managers/window'
import {
ModuleManager,
getJanDataFolderPath,
getJanExtensionsPath,
init,
AppEvent,
NativeRoute,
SelectFileProp,
} from '@janhq/core/node'
import { SelectFileOption } from '@janhq/core'
import { menu } from '../utils/menu'
import { migrate } from '../utils/migration'
import { createUserSpace } from '../utils/path'
import { setupExtensions } from '../utils/extension'
const isMac = process.platform === 'darwin'
export function handleAppIPCs() {
/**
* Handles the "openAppDirectory" IPC message by opening the app's user data directory.
* The `shell.openPath` method is used to open the directory in the user's default file explorer.
* @param _event - The IPC event object.
*/
ipcMain.handle(NativeRoute.openAppDirectory, async (_event) => {
shell.openPath(getJanDataFolderPath())
})
ipcMain.handle(NativeRoute.appUpdateDownload, async (_event) => {
autoUpdater.downloadUpdate()
})
/**
* Handles the "setNativeThemeLight" IPC message by setting the native theme source to "light".
* This will change the appearance of the app to the light theme.
*/
ipcMain.handle(NativeRoute.setNativeThemeLight, () => {
nativeTheme.themeSource = 'light'
})
/**
* Handles the "setCloseApp" IPC message by closing the main application window.
* This effectively closes the application if no other windows are open.
*/
ipcMain.handle(NativeRoute.setCloseApp, () => {
windowManager.mainWindow?.close()
})
/**
* Handles the "setMinimizeApp" IPC message by minimizing the main application window.
* The window will be minimized to the system's taskbar or dock.
*/
ipcMain.handle(NativeRoute.setMinimizeApp, () => {
windowManager.mainWindow?.minimize()
})
/**
* Handles the "setMaximizeApp" IPC message. It toggles the maximization state of the main window.
* If the window is currently maximized, it will be un-maximized (restored to its previous size).
* If the window is not maximized, it will be maximized to fill the screen.
* @param _event - The IPC event object.
*/
ipcMain.handle(NativeRoute.setMaximizeApp, async (_event) => {
if (windowManager.mainWindow?.isMaximized()) {
windowManager.mainWindow.unmaximize()
} else {
windowManager.mainWindow?.maximize()
}
})
/**
* Handles the "setNativeThemeDark" IPC message by setting the native theme source to "dark".
* This will change the appearance of the app to the dark theme.
*/
ipcMain.handle(NativeRoute.setNativeThemeDark, () => {
nativeTheme.themeSource = 'dark'
})
/**
* Opens a URL in the user's default browser.
* @param _event - The IPC event object.
* @param url - The URL to open.
*/
ipcMain.handle(NativeRoute.openExternalUrl, async (_event, url) => {
shell.openExternal(url)
})
/**
* Opens a URL in the user's default browser.
* @param _event - The IPC event object.
* @param url - The URL to open.
*/
ipcMain.handle(NativeRoute.openFileExplore, async (_event, url) => {
shell.openPath(url)
})
/**
* Relaunches the app in production - reload window in development.
* @param _event - The IPC event object.
* @param url - The URL to reload.
*/
ipcMain.handle(NativeRoute.relaunch, async (_event) => {
ModuleManager.instance.clearImportedModules()
if (app.isPackaged) {
app.relaunch()
app.exit()
} else {
for (const modulePath in ModuleManager.instance.requiredModules) {
delete require.cache[
require.resolve(join(getJanExtensionsPath(), modulePath))
]
}
init({
// Function to check from the main process that user wants to install a extension
confirmInstall: async (_extensions: string[]) => {
return true
},
// Path to install extension to
extensionsPath: getJanExtensionsPath(),
})
windowManager.mainWindow?.reload()
}
})
/**
* Handles the "selectDirectory" IPC message to open a dialog for selecting a directory.
* If no main window is found, logs an error and exits.
* @returns {string} The path of the selected directory, or nothing if canceled.
*/
ipcMain.handle(NativeRoute.selectDirectory, async () => {
const mainWindow = windowManager.mainWindow
if (!mainWindow) {
console.error('No main window found')
return
}
const { canceled, filePaths } = await dialog.showOpenDialog(mainWindow, {
title: 'Select a folder',
buttonLabel: 'Select Folder',
properties: ['openDirectory', 'createDirectory'],
})
if (canceled) {
return
} else {
return filePaths[0]
}
})
/**
* Handles the "selectFiles" IPC message to open a dialog for selecting files.
* Allows options for setting the dialog title, button label, and selection properties.
* Logs an error if no main window is found.
* @param _event - The IPC event object.
* @param option - Options for customizing file selection dialog.
* @returns {string[]} An array of selected file paths, or nothing if canceled.
*/
ipcMain.handle(
NativeRoute.selectFiles,
async (_event, option?: SelectFileOption) => {
const mainWindow = windowManager.mainWindow
if (!mainWindow) {
console.error('No main window found')
return
}
const title = option?.title ?? 'Select files'
const buttonLabel = option?.buttonLabel ?? 'Select'
const props: SelectFileProp[] = ['openFile']
if (option?.allowMultiple) {
props.push('multiSelections')
}
if (option?.selectDirectory) {
props.push('openDirectory')
}
console.debug(`Select files with props: ${props}`)
const { canceled, filePaths } = await dialog.showOpenDialog(mainWindow, {
title,
buttonLabel,
properties: props,
filters: option?.filters,
})
if (canceled) return
return filePaths
}
)
/**
* Handles the "hideQuickAskWindow" IPC message to hide the quick ask window.
* @returns A promise that resolves when the window is hidden.
*/
ipcMain.handle(
NativeRoute.hideQuickAskWindow,
async (): Promise<void> => windowManager.hideQuickAskWindow()
)
/**
* Handles the "sendQuickAskInput" IPC message to send user input to the main window.
* @param _event - The IPC event object.
* @param input - User input string to be sent.
*/
ipcMain.handle(
NativeRoute.sendQuickAskInput,
async (_event, input: string): Promise<void> => {
windowManager.mainWindow?.webContents.send(
AppEvent.onUserSubmitQuickAsk,
input
)
}
)
/**
* Handles the "showOpenMenu" IPC message to show the context menu at given coordinates.
* Only applicable on non-Mac platforms.
* @param e - The event object.
* @param args - Contains coordinates where the menu should appear.
*/
ipcMain.handle(NativeRoute.showOpenMenu, function (e, args) {
if (!isMac && windowManager.mainWindow) {
menu.popup({
window: windowManager.mainWindow,
x: args.x,
y: args.y,
})
}
})
/**
* Handles the "hideMainWindow" IPC message to hide the main application window.
* @returns A promise that resolves when the window is hidden.
*/
ipcMain.handle(
NativeRoute.hideMainWindow,
async (): Promise<void> => windowManager.hideMainWindow()
)
/**
* Handles the "showMainWindow" IPC message to show the main application window.
* @returns A promise that resolves when the window is shown.
*/
ipcMain.handle(
NativeRoute.showMainWindow,
async (): Promise<void> => windowManager.showMainWindow()
)
/**
* Handles the "quickAskSizeUpdated" IPC message to update the size of the quick ask window.
* Resizes window by the given height offset.
* @param _event - The IPC event object.
* @param heightOffset - The amount of height to increase.
* @returns A promise that resolves when the window is resized.
*/
ipcMain.handle(
NativeRoute.quickAskSizeUpdated,
async (_event, heightOffset: number): Promise<void> =>
windowManager.expandQuickAskWindow(heightOffset)
)
/**
* Handles the "ackDeepLink" IPC message to acknowledge a deep link.
* Triggers handling of deep link in the application.
* @param _event - The IPC event object.
* @returns A promise that resolves when the deep link is acknowledged.
*/
ipcMain.handle(NativeRoute.ackDeepLink, async (_event): Promise<void> => {
windowManager.ackDeepLink()
})
/**
* Handles the "factoryReset" IPC message to reset the application to its initial state.
* Clears loaded modules, recreates user space, runs migrations, and sets up extensions.
* @param _event - The IPC event object.
* @returns A promise that resolves after the reset operations are complete.
*/
ipcMain.handle(NativeRoute.factoryReset, async (_event): Promise<void> => {
ModuleManager.instance.clearImportedModules()
return createUserSpace().then(migrate).then(setupExtensions)
})
/**
* Handles the "startServer" IPC message to start the Jan API server.
* Initializes and starts server with provided configuration options.
* @param _event - The IPC event object.
* @param args - Configuration object containing host, port, CORS settings etc.
* @returns Promise that resolves when server starts successfully
*/
ipcMain.handle(
NativeRoute.startServer,
async (_event, args): Promise<void> => {
const { startServer } = require('@janhq/server')
return startServer({
host: args?.host,
port: args?.port,
isCorsEnabled: args?.isCorsEnabled,
isVerboseEnabled: args?.isVerboseEnabled,
prefix: args?.prefix,
})
}
)
/**
* Handles the "stopServer" IPC message to stop the Jan API server.
* Gracefully shuts down the server instance.
* @param _event - The IPC event object
* @returns Promise that resolves when server stops successfully
*/
ipcMain.handle(NativeRoute.stopServer, async (_event): Promise<void> => {
/**
* Stop Jan API Server.
*/
const { stopServer } = require('@janhq/server')
return stopServer()
})
/**
* Handles the "appToken" IPC message to generate a random app ID.
*/
ipcMain.handle(NativeRoute.appToken, async (_event): Promise<string> => {
return process.env.appToken ?? 'cortex.cpp'
})
}

View File

@ -1,70 +0,0 @@
import { app, dialog } from 'electron'
import { windowManager } from './../managers/window'
import {
ProgressInfo,
UpdateDownloadedEvent,
UpdateInfo,
autoUpdater,
} from 'electron-updater'
import { AppEvent } from '@janhq/core/node'
import { trayManager } from '../managers/tray'
export let waitingToInstallVersion: string | undefined = undefined
export function handleAppUpdates() {
/* Should not check for update during development */
if (!app.isPackaged) {
return
}
/* New Update Available */
autoUpdater.on('update-available', async (_info: UpdateInfo) => {
windowManager.mainWindow?.webContents.send(
AppEvent.onAppUpdateAvailable,
{}
)
})
/* App Update Completion Message */
autoUpdater.on('update-downloaded', async (_info: UpdateDownloadedEvent) => {
windowManager.mainWindow?.webContents.send(
AppEvent.onAppUpdateDownloadSuccess,
{}
)
const action = await dialog.showMessageBox({
message: `Update downloaded. Please restart the application to apply the updates.`,
buttons: ['Restart', 'Later'],
})
if (action.response === 0) {
trayManager.destroyCurrentTray()
windowManager.closeQuickAskWindow()
waitingToInstallVersion = _info?.version
autoUpdater.quitAndInstall()
}
})
/* App Update Error */
autoUpdater.on('error', (info: Error) => {
windowManager.mainWindow?.webContents.send(
AppEvent.onAppUpdateDownloadError,
{ failedToInstallVersion: waitingToInstallVersion, info }
)
})
/* App Update Progress */
autoUpdater.on('download-progress', (progress: ProgressInfo) => {
console.debug('app update progress: ', progress.percent)
windowManager.mainWindow?.webContents.send(
AppEvent.onAppUpdateDownloadUpdate,
{
...progress,
}
)
})
autoUpdater.autoDownload = false
autoUpdater.autoInstallOnAppQuit = true
if (process.env.CI !== 'e2e') {
autoUpdater.checkForUpdates()
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 76 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 806 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 835 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

View File

@ -1,18 +0,0 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
collectCoverageFrom: ['src/**/*.{ts,tsx}'],
modulePathIgnorePatterns: ['<rootDir>/tests'],
moduleNameMapper: {
'@/(.*)': '<rootDir>/src/$1',
},
runner: './testRunner.js',
transform: {
'^.+\\.tsx?$': [
'ts-jest',
{
diagnostics: false,
},
],
},
}

View File

@ -1,161 +0,0 @@
import { app, BrowserWindow } from 'electron'
import { join, resolve } from 'path'
/**
* Managers
**/
import { windowManager } from './managers/window'
import { getAppConfigurations, log } from '@janhq/core/node'
/**
* IPC Handlers
**/
import { injectHandler } from './handlers/common'
import { handleAppUpdates } from './handlers/update'
import { handleAppIPCs } from './handlers/native'
/**
* Utils
**/
import { setupMenu } from './utils/menu'
import { createUserSpace } from './utils/path'
import { migrate } from './utils/migration'
import { cleanUpAndQuit } from './utils/clean'
import { setupExtensions } from './utils/extension'
import { setupCore } from './utils/setup'
import { setupReactDevTool } from './utils/dev'
import { trayManager } from './managers/tray'
import { logSystemInfo } from './utils/system'
import { registerGlobalShortcuts } from './utils/shortcut'
import { registerLogger } from './utils/logger'
import { randomBytes } from 'crypto'
const preloadPath = join(__dirname, 'preload.js')
const preloadQuickAskPath = join(__dirname, 'preload.quickask.js')
const rendererPath = join(__dirname, '..', 'renderer')
const quickAskPath = join(rendererPath, 'search.html')
const mainPath = join(rendererPath, 'index.html')
const mainUrl = 'http://localhost:3000'
const quickAskUrl = `${mainUrl}/search`
const gotTheLock = app.requestSingleInstanceLock()
if (process.defaultApp) {
if (process.argv.length >= 2) {
app.setAsDefaultProtocolClient('jan', process.execPath, [
resolve(process.argv[1]),
])
}
} else {
app.setAsDefaultProtocolClient('jan')
}
const createMainWindow = () => {
const startUrl = app.isPackaged ? `file://${mainPath}` : mainUrl
windowManager.createMainWindow(preloadPath, startUrl)
}
// Generate a random token for the app
// This token is used for authentication when making request to cortex.cpp server
process.env.appToken = randomBytes(16).toString('hex')
app
.whenReady()
.then(() => {
if (!gotTheLock) {
app.quit()
throw new Error('Another instance of the app is already running')
} else {
app.on(
'second-instance',
(_event, commandLine, _workingDirectory): void => {
if (process.platform === 'win32' || process.platform === 'linux') {
// this is for handling deeplink on windows and linux
// since those OS will emit second-instance instead of open-url
const url = commandLine.pop()
if (url) {
windowManager.sendMainAppDeepLink(url)
}
}
windowManager.showMainWindow()
}
)
}
})
.then(setupCore)
.then(createUserSpace)
.then(registerLogger)
.then(migrate)
.then(setupExtensions)
.then(setupMenu)
.then(handleIPCs)
.then(() => process.env.CI !== 'e2e' && createQuickAskWindow())
.then(createMainWindow)
.then(handleAppUpdates)
.then(registerGlobalShortcuts)
.then(() => {
if (!app.isPackaged) {
setupReactDevTool()
windowManager.mainWindow?.webContents.openDevTools()
}
})
.then(() => process.env.CI !== 'e2e' && trayManager.createSystemTray())
.then(logSystemInfo)
.then(() => {
app.on('activate', () => {
if (!BrowserWindow.getAllWindows().length) {
createMainWindow()
} else {
windowManager.showMainWindow()
}
})
})
app.on('open-url', (_event, url) => {
windowManager.sendMainAppDeepLink(url)
})
app.on('before-quit', function (_event) {
trayManager.destroyCurrentTray()
})
app.once('quit', () => {
cleanUpAndQuit()
})
app.once('window-all-closed', () => {
// Feature Toggle for Quick Ask
if (
getAppConfigurations().quick_ask &&
!windowManager.isQuickAskWindowDestroyed()
)
return
cleanUpAndQuit()
})
function createQuickAskWindow() {
// Feature Toggle for Quick Ask
if (!getAppConfigurations().quick_ask) return
const startUrl = app.isPackaged ? `file://${quickAskPath}` : quickAskUrl
windowManager.createQuickAskWindow(preloadQuickAskPath, startUrl)
}
/**
* Handles various IPC messages from the renderer process.
*/
function handleIPCs() {
// Inject core handlers for IPCs
injectHandler()
// Handle native IPCs
handleAppIPCs()
}
/*
** Suppress Node error messages
*/
process.on('uncaughtException', function (err) {
log(`Error: ${err}`)
})

View File

@ -1,21 +0,0 @@
const DEFAULT_MIN_WIDTH = 400
const DEFAULT_MIN_HEIGHT = 600
export const mainWindowConfig: Electron.BrowserWindowConstructorOptions = {
skipTaskbar: false,
minWidth: DEFAULT_MIN_WIDTH,
minHeight: DEFAULT_MIN_HEIGHT,
show: true,
// we want to go frameless on windows and linux
transparent: process.platform === 'darwin',
frame: process.platform === 'darwin',
titleBarStyle: 'hiddenInset',
vibrancy: 'fullscreen-ui',
visualEffectState: 'active',
backgroundMaterial: 'acrylic',
autoHideMenuBar: true,
trafficLightPosition: {
x: 16,
y: 10,
},
}

View File

@ -1,22 +0,0 @@
const DEFAULT_WIDTH = 556
const DEFAULT_HEIGHT = 60
export const quickAskWindowConfig: Electron.BrowserWindowConstructorOptions = {
width: DEFAULT_WIDTH,
height: DEFAULT_HEIGHT,
skipTaskbar: true,
acceptFirstMouse: true,
hasShadow: true,
alwaysOnTop: true,
show: false,
fullscreenable: false,
resizable: false,
center: true,
movable: true,
maximizable: false,
focusable: true,
transparent: false,
frame: false,
type: 'panel',
}

View File

@ -1,51 +0,0 @@
import { join } from 'path'
import { Tray, app, Menu } from 'electron'
import { windowManager } from '../managers/window'
import { getAppConfigurations } from '@janhq/core/node'
class TrayManager {
currentTray: Tray | undefined
createSystemTray = () => {
// Feature Toggle for Quick Ask
if (!getAppConfigurations().quick_ask) return
if (this.currentTray) {
return
}
const iconPath = join(app.getAppPath(), 'icons', 'icon-tray.png')
const tray = new Tray(iconPath)
tray.setToolTip(app.getName())
tray.on('click', () => {
windowManager.showQuickAskWindow()
})
// Add context menu for windows only
if (process.platform === 'win32') {
const contextMenu = Menu.buildFromTemplate([
{
label: 'Open Jan',
type: 'normal',
click: () => windowManager.showMainWindow(),
},
{
label: 'Open Quick Ask',
type: 'normal',
click: () => windowManager.showQuickAskWindow(),
},
{ label: 'Quit', type: 'normal', click: () => app.quit() },
])
tray.setContextMenu(contextMenu)
}
this.currentTray = tray
}
destroyCurrentTray() {
this.currentTray?.destroy()
this.currentTray = undefined
}
}
export const trayManager = new TrayManager()

View File

@ -1,215 +0,0 @@
import { BrowserWindow, app, shell } from 'electron'
import { quickAskWindowConfig } from './quickAskWindowConfig'
import { mainWindowConfig } from './mainWindowConfig'
import { getAppConfigurations, AppEvent } from '@janhq/core/node'
import { getBounds, saveBounds } from '../utils/setup'
/**
* Manages the current window instance.
*/
// TODO: refactor this
let isAppQuitting = false
class WindowManager {
public mainWindow?: BrowserWindow
private _quickAskWindow: BrowserWindow | undefined = undefined
private _quickAskWindowVisible = false
private _mainWindowVisible = false
private deeplink: string | undefined
/**
* Creates a new window instance.
* @returns The created window instance.
*/
async createMainWindow(preloadPath: string, startUrl: string) {
const bounds = await getBounds()
this.mainWindow = new BrowserWindow({
...mainWindowConfig,
width: bounds.width,
height: bounds.height,
show: false,
x: bounds.x,
y: bounds.y,
webPreferences: {
nodeIntegration: true,
preload: preloadPath,
webSecurity: false,
},
})
if (process.platform === 'win32' || process.platform === 'linux') {
/// This is work around for windows deeplink.
/// second-instance event is not fired when app is not open, so the app
/// does not received the deeplink.
const commandLine = process.argv.slice(1)
if (commandLine.length > 0) {
const url = commandLine[0]
this.sendMainAppDeepLink(url)
}
}
this.mainWindow.on('resized', () => {
saveBounds(this.mainWindow?.getBounds())
})
this.mainWindow.on('moved', () => {
saveBounds(this.mainWindow?.getBounds())
})
/* Load frontend app to the window */
this.mainWindow.loadURL(startUrl)
/* Open external links in the default browser */
this.mainWindow.webContents.setWindowOpenHandler(({ url }) => {
shell.openExternal(url)
return { action: 'deny' }
})
app.on('before-quit', function () {
isAppQuitting = true
})
windowManager.mainWindow?.on('close', function (evt) {
// Feature Toggle for Quick Ask
if (!getAppConfigurations().quick_ask) return
if (!isAppQuitting) {
evt.preventDefault()
windowManager.hideMainWindow()
}
})
windowManager.mainWindow?.on('ready-to-show', function () {
windowManager.mainWindow?.show()
})
}
createQuickAskWindow(preloadPath: string, startUrl: string): void {
this._quickAskWindow = new BrowserWindow({
...quickAskWindowConfig,
webPreferences: {
nodeIntegration: true,
preload: preloadPath,
webSecurity: false,
},
})
this._quickAskWindow.loadURL(startUrl)
this._quickAskWindow.on('blur', () => {
this.hideQuickAskWindow()
})
}
isMainWindowVisible(): boolean {
return this._mainWindowVisible
}
hideMainWindow(): void {
this.mainWindow?.hide()
this._mainWindowVisible = false
}
showMainWindow(): void {
this.mainWindow?.show()
this._mainWindowVisible = true
}
hideQuickAskWindow(): void {
this._quickAskWindow?.hide()
this._quickAskWindowVisible = false
}
showQuickAskWindow(): void {
this._quickAskWindow?.show()
this._quickAskWindowVisible = true
}
closeQuickAskWindow(): void {
if (this._quickAskWindow?.isDestroyed()) return
this._quickAskWindow?.close()
this._quickAskWindow?.destroy()
this._quickAskWindow = undefined
this._quickAskWindowVisible = false
}
isQuickAskWindowVisible(): boolean {
return this._quickAskWindowVisible
}
isQuickAskWindowDestroyed(): boolean {
return this._quickAskWindow?.isDestroyed() ?? true
}
/**
* Expand the quick ask window
*/
expandQuickAskWindow(heightOffset: number): void {
const width = quickAskWindowConfig.width!
const height = quickAskWindowConfig.height! + heightOffset
this._quickAskWindow?.setMinimumSize(width, height)
this._quickAskWindow?.setSize(width, height, true)
}
/**
* Send the selected text to the quick ask window.
*/
sendQuickAskSelectedText(selectedText: string): void {
this._quickAskWindow?.webContents.send(
AppEvent.onSelectedText,
selectedText
)
}
/**
* Try to send the deep link to the main app.
*/
sendMainAppDeepLink(url: string): void {
this.deeplink = url
const interval = setInterval(() => {
if (!this.deeplink) clearInterval(interval)
const mainWindow = this.mainWindow
if (mainWindow) {
mainWindow.webContents.send(AppEvent.onDeepLink, this.deeplink)
if (mainWindow.isMinimized()) mainWindow.restore()
mainWindow.focus()
}
}, 500)
}
/**
* Send main view state to the main app.
*/
sendMainViewState(route: string) {
if (this.mainWindow && !this.mainWindow.isDestroyed()) {
this.mainWindow.webContents.send(AppEvent.onMainViewStateChange, route)
}
}
/**
* Clean up all windows.
*/
cleanUp(): void {
if (!this.mainWindow?.isDestroyed()) {
this.mainWindow?.close()
this.mainWindow?.destroy()
this.mainWindow = undefined
this._mainWindowVisible = false
}
if (!this._quickAskWindow?.isDestroyed()) {
this._quickAskWindow?.close()
this._quickAskWindow?.destroy()
this._quickAskWindow = undefined
this._quickAskWindowVisible = false
}
}
/**
* Acknowledges that the window has received a deep link. We can remove it.
*/
ackDeepLink() {
this.deeplink = undefined
}
}
export const windowManager = new WindowManager()

View File

@ -1,29 +0,0 @@
const yaml = require('js-yaml')
const fs = require('fs')
// get two file paths from arguments:
const [, , ...args] = process.argv
const file1 = args[0]
const file2 = args[1]
const file3 = args[2]
// check that all arguments are present and throw error instead
if (!file1 || !file2 || !file3) {
throw new Error(
'Please provide 3 file paths as arguments: path to file1, to file2 and destination path'
)
}
const doc1 = yaml.load(fs.readFileSync(file1, 'utf8'))
console.log('doc1: ', doc1)
const doc2 = yaml.load(fs.readFileSync(file2, 'utf8'))
console.log('doc2: ', doc2)
const merged = { ...doc1, ...doc2 }
merged.files.push(...doc1.files)
console.log('merged', merged)
const mergedYml = yaml.dump(merged)
fs.writeFileSync(file3, mergedYml, 'utf8')

View File

@ -1,146 +0,0 @@
{
"name": "jan",
"version": "0.1.1740752217",
"main": "./build/main.js",
"author": "Jan <service@jan.ai>",
"license": "MIT",
"productName": "Jan",
"homepage": "https://github.com/menloresearch/jan/tree/main/electron",
"description": "Use offline LLMs with your own data. Run open source models like Llama2 or Falcon on your internal computers/servers.",
"build": {
"appId": "jan.ai.app",
"productName": "Jan",
"files": [
"renderer/**/*",
"build/**/*.{js,map}",
"pre-install",
"themes",
"scripts/**/*",
"icons/**/*",
"themes",
"shared"
],
"asarUnpack": [
"pre-install",
"themes",
"docs",
"scripts",
"icons",
"themes",
"shared"
],
"publish": [
{
"provider": "github",
"owner": "janhq",
"repo": "jan"
}
],
"extends": null,
"mac": {
"type": "distribution",
"entitlements": "./entitlements.mac.plist",
"entitlementsInherit": "./entitlements.mac.plist",
"notarize": {
"teamId": "F8AH6NHVY5"
},
"icon": "icons/icon.png"
},
"linux": {
"target": [
"deb"
],
"category": "Utility",
"icon": "icons/"
},
"win": {
"icon": "icons/icon.png",
"target": [
"nsis"
]
},
"nsis": {
"oneClick": true,
"installerIcon": "icons/icon.ico",
"uninstallerIcon": "icons/icon.ico",
"include": "scripts/uninstaller.nsh",
"deleteAppDataOnUninstall": true
},
"protocols": [
{
"name": "Jan",
"schemes": [
"jan"
]
}
],
"artifactName": "jan-${os}-${arch}-${version}.${ext}"
},
"scripts": {
"lint": "eslint . --ext \".js,.jsx,.ts,.tsx\"",
"test:e2e": "DEBUG=pw:browser xvfb-maybe -- playwright test --workers=1",
"copy:assets": "rimraf --glob \"./pre-install/*.tgz\" && cpx \"../pre-install/*.tgz\" \"./pre-install\"",
"version-patch": "run-script-os",
"version-patch:darwin:linux": "jq '.version' package.json | tr -d '\"' > .version.bak && jq --arg ver \"0.1.$(date +%s)\" '.version = $ver' package.json > package.tmp && mv package.tmp package.json",
"version-patch:win32": "node -e \"const fs=require('fs');const pkg=require('./package.json');const bak=pkg.version;fs.writeFileSync('.version.bak',bak);pkg.version='0.1.'+Math.floor(Date.now()/1000);fs.writeFileSync('package.json',JSON.stringify(pkg,null,2));\"",
"version-restore": "run-script-os",
"version-restore:darwin:linux": "jq --arg ver $(cat .version.bak) '.version = $ver' package.json > package.tmp && mv package.tmp package.json && rm .version.bak",
"version-restore:win32": "node -e \"const fs=require('fs');const pkg=require('./package.json');const bak=fs.readFileSync('.version.bak','utf8');pkg.version=bak;fs.writeFileSync('package.json',JSON.stringify(pkg,null,2));\"",
"dev:darwin:linux": "yarn copy:assets && tsc -p . && yarn version-patch && electron . && yarn version-restore",
"dev:windows": "yarn copy:assets && tsc -p . && electron .",
"dev": "run-script-os",
"compile": "tsc -p .",
"start": "electron .",
"build": "yarn copy:assets && run-script-os",
"build:test": "yarn copy:assets && run-script-os",
"build:test:darwin": "tsc -p . && electron-builder -p never -m --dir",
"build:test:win32": "tsc -p . && electron-builder -p never -w --dir",
"build:test:linux": "tsc -p . && electron-builder -p never -l --dir",
"build:darwin": "tsc -p . && electron-builder -p never -m --universal",
"build:win32": "tsc -p . && electron-builder -p never -w",
"build:linux": "tsc -p . && electron-builder -p never -l deb -l AppImage",
"build:publish": "yarn copy:assets && run-script-os",
"build:publish:darwin": "tsc -p . && electron-builder -p always -m --universal",
"build:publish:win32": "tsc -p . && electron-builder -p always -w",
"build:publish:linux": "tsc -p . && electron-builder -p always -l deb -l AppImage"
},
"dependencies": {
"@alumna/reflect": "^1.1.3",
"@janhq/core": "link:../core",
"@janhq/server": "link:../server",
"@kirillvakalov/nut-tree__nut-js": "4.2.1-2",
"@npmcli/arborist": "^7.1.0",
"electron-store": "^8.1.0",
"electron-updater": "^6.1.7",
"fs-extra": "^11.2.0",
"pacote": "^21.0.0",
"request": "^2.88.2",
"request-progress": "^3.0.0",
"ulidx": "^2.3.0"
},
"devDependencies": {
"@electron/notarize": "^2.5.0",
"@playwright/test": "^1.38.1",
"@reportportal/agent-js-playwright": "^5.1.7",
"@types/npmcli__arborist": "^5.6.4",
"@types/pacote": "^11.1.7",
"@types/request": "^2.48.12",
"@typescript-eslint/eslint-plugin": "^6.7.3",
"@typescript-eslint/parser": "^6.7.3",
"electron": "30.0.6",
"electron-builder": "^24.13.3",
"electron-builder-squirrel-windows": "^24.13.3",
"electron-devtools-installer": "^3.2.0",
"electron-playwright-helpers": "^1.6.0",
"eslint": "8.57.0",
"eslint-plugin-react": "^7.34.0",
"rimraf": "^5.0.5",
"run-script-os": "^1.1.6",
"typescript": "^5.3.3",
"xvfb-maybe": "^0.2.1"
},
"installConfig": {
"hoistingLimits": "workspaces"
},
"packageManager": "yarn@4.5.3"
}

View File

@ -1,14 +0,0 @@
import { PlaywrightTestConfig } from '@playwright/test'
const config: PlaywrightTestConfig = {
testDir: './tests/e2e',
retries: 0,
globalTimeout: 350000,
use: {
screenshot: 'only-on-failure',
video: 'retain-on-failure',
trace: 'retain-on-failure',
},
// reporter: [['html', { outputFolder: './playwright-report' }]],
}
export default config

View File

@ -1,32 +0,0 @@
/**
* Exposes a set of APIs to the renderer process via the contextBridge object.
* @module preload
*/
import { APIEvents, APIRoutes } from '@janhq/core/node'
import { contextBridge, ipcRenderer } from 'electron'
const interfaces: { [key: string]: (...args: any[]) => any } = {}
// Loop over each route in APIRoutes
APIRoutes.forEach((method) => {
// For each method, create a function on the interfaces object
// This function invokes the method on the ipcRenderer with any provided arguments
interfaces[method] = (...args: any[]) => ipcRenderer.invoke(method, ...args)
})
// Loop over each method in APIEvents
APIEvents.forEach((method) => {
// For each method, create a function on the interfaces object
// This function sets up an event listener on the ipcRenderer for the method
// The handler for the event is provided as an argument to the function
interfaces[method] = (handler: any) => ipcRenderer.on(method, handler)
})
// Expose the 'interfaces' object in the main world under the name 'electronAPI'
// This allows the renderer process to access these methods directly
contextBridge.exposeInMainWorld('electronAPI', {
...interfaces,
isQuickAsk: () => true,
})

View File

@ -1,60 +0,0 @@
/**
* Exposes a set of APIs to the renderer process via the contextBridge object.
* @module preload
*/
import { APIEvents, APIRoutes, AppConfiguration } from '@janhq/core/node'
import { contextBridge, ipcRenderer } from 'electron'
import { readdirSync } from 'fs'
const interfaces: { [key: string]: (...args: any[]) => any } = {}
// Loop over each route in APIRoutes
APIRoutes.forEach((method) => {
// For each method, create a function on the interfaces object
// This function invokes the method on the ipcRenderer with any provided arguments
interfaces[method] = (...args: any[]) => ipcRenderer.invoke(method, ...args)
})
// Loop over each method in APIEvents
APIEvents.forEach((method) => {
// For each method, create a function on the interfaces object
// This function sets up an event listener on the ipcRenderer for the method
// The handler for the event is provided as an argument to the function
interfaces[method] = (handler: any) => ipcRenderer.on(method, handler)
})
interfaces['changeDataFolder'] = async (path) => {
const appConfiguration: AppConfiguration = await ipcRenderer.invoke(
'getAppConfigurations'
)
const currentJanDataFolder = appConfiguration.data_folder
appConfiguration.data_folder = path
const reflect = require('@alumna/reflect')
const { err } = await reflect({
src: currentJanDataFolder,
dest: path,
recursive: true,
delete: false,
overwrite: true,
errorOnExist: false,
})
if (err) {
console.error(err)
throw err
}
await ipcRenderer.invoke('updateAppConfiguration', appConfiguration)
}
interfaces['isDirectoryEmpty'] = async (path) => {
const dirChildren = await readdirSync(path)
return dirChildren.filter((x) => x !== '.DS_Store').length === 0
}
// Expose the 'interfaces' object in the main world under the name 'electronAPI'
// This allows the renderer process to access these methods directly
contextBridge.exposeInMainWorld('electronAPI', {
...interfaces,
isQuickAsk: () => false,
})

View File

@ -1,46 +0,0 @@
!include nsDialogs.nsh
XPStyle on
!macro customUnInstall
${ifNot} ${isUpdated}
; Define the process name of your Electron app
StrCpy $0 "Jan.exe"
; Check if the application is running
nsExec::ExecToStack 'tasklist /FI "IMAGENAME eq $0" /NH'
Pop $1
StrCmp $1 "" notRunning
; If the app is running, notify the user and attempt to close it
MessageBox MB_OK "Jan is being uninstalled, force close app." IDOK forceClose
forceClose:
; Attempt to kill the running application
nsExec::ExecToStack 'taskkill /F /IM $0'
Pop $1
; Proceed with uninstallation
Goto continueUninstall
notRunning:
; If the app is not running, proceed with uninstallation
Goto continueUninstall
continueUninstall:
; Proceed with uninstallation
DeleteRegKey HKLM "Software\Jan"
RMDir /r "$INSTDIR"
Delete "$INSTDIR\*.*"
; Clean up shortcuts and app data
Delete "$DESKTOP\Jan.lnk"
Delete "$STARTMENU\Programs\Jan.lnk"
RMDir /r "$APPDATA\Jan"
RMDir /r "$LOCALAPPDATA\jan-updater"
; Close the uninstaller
Quit
${endIf}
!macroend

View File

@ -1,69 +0,0 @@
const { exec } = require('child_process')
function execCommandWithRetry(command, retries = 3) {
return new Promise((resolve, reject) => {
const execute = (attempt) => {
exec(command, (error, stdout, stderr) => {
if (error) {
console.error(`Error: ${error}`)
if (attempt < retries) {
console.log(`Retrying... Attempt ${attempt + 1}`)
execute(attempt + 1)
} else {
return reject(error)
}
} else {
console.log(`stdout: ${stdout}`)
console.error(`stderr: ${stderr}`)
resolve()
}
})
}
execute(0)
})
}
function sign({
path,
name,
certUrl,
clientId,
tenantId,
clientSecret,
certName,
timestampServer,
version,
}) {
return new Promise((resolve, reject) => {
const command = `azuresigntool.exe sign -kvu "${certUrl}" -kvi "${clientId}" -kvt "${tenantId}" -kvs "${clientSecret}" -kvc "${certName}" -tr "${timestampServer}" -v "${path}"`
execCommandWithRetry(command)
.then(resolve)
.catch(reject)
})
}
exports.default = async function (options) {
const certUrl = process.env.AZURE_KEY_VAULT_URI
const clientId = process.env.AZURE_CLIENT_ID
const tenantId = process.env.AZURE_TENANT_ID
const clientSecret = process.env.AZURE_CLIENT_SECRET
const certName = process.env.AZURE_CERT_NAME
const timestampServer = 'http://timestamp.globalsign.com/tsa/r6advanced1'
try {
await sign({
path: options.path,
name: 'jan-win-x64',
certUrl,
clientId,
tenantId,
clientSecret,
certName,
timestampServer,
version: options.version,
})
} catch (error) {
console.error('Failed to sign after 3 attempts:', error)
process.exit(1)
}
}

View File

@ -1,10 +0,0 @@
const jestRunner = require('jest-runner');
class EmptyTestFileRunner extends jestRunner.default {
async runTests(tests, watcher, onStart, onResult, onFailure, options) {
const nonEmptyTests = tests.filter(test => test.context.hasteFS.getSize(test.path) > 0);
return super.runTests(nonEmptyTests, watcher, onStart, onResult, onFailure, options);
}
}
module.exports = EmptyTestFileRunner;

View File

@ -1,4 +0,0 @@
export const Constants = {
VIDEO_DIR: './playwright-video',
TIMEOUT: '300000',
}

View File

@ -1,126 +0,0 @@
import {
_electron as electron,
BrowserContext,
ElectronApplication,
expect,
Page,
test as base,
} from '@playwright/test'
import {
ElectronAppInfo,
findLatestBuild,
parseElectronApp,
stubDialog,
} from 'electron-playwright-helpers'
import { Constants } from './constants'
import { HubPage } from '../pages/hubPage'
import { CommonActions } from '../pages/commonActions'
import { rmSync } from 'fs'
import * as path from 'path'
export let electronApp: ElectronApplication
export let page: Page
export let appInfo: ElectronAppInfo
export const TIMEOUT = parseInt(process.env.TEST_TIMEOUT || Constants.TIMEOUT)
export async function setupElectron() {
console.log(`TEST TIMEOUT: ${TIMEOUT}`)
process.env.CI = 'e2e'
const latestBuild = findLatestBuild('dist')
expect(latestBuild).toBeTruthy()
// parse the packaged Electron app and find paths and other info
appInfo = parseElectronApp(latestBuild)
expect(appInfo).toBeTruthy()
electronApp = await electron.launch({
args: [appInfo.main, '--no-sandbox'], // main file from package.json
executablePath: appInfo.executable, // path to the Electron executable
// recordVideo: { dir: Constants.VIDEO_DIR }, // Specify the directory for video recordings
})
await stubDialog(electronApp, 'showMessageBox', { response: 1 })
page = await electronApp.firstWindow({
timeout: TIMEOUT,
})
}
export async function teardownElectron() {
await page.close()
await electronApp.close()
}
/**
* this fixture is needed to record and attach videos / screenshot on failed tests when
* tests are run in serial mode (i.e. browser is not closed between tests)
*/
export const test = base.extend<
{
commonActions: CommonActions
hubPage: HubPage
attachVideoPage: Page
attachScreenshotsToReport: void
},
{ createVideoContext: BrowserContext }
>({
commonActions: async ({ request }, use, testInfo) => {
await use(new CommonActions(page, testInfo))
},
hubPage: async ({ commonActions }, use) => {
await use(new HubPage(page, commonActions))
},
createVideoContext: [
async ({ playwright }, use) => {
const context = electronApp.context()
await use(context)
},
{ scope: 'worker' },
],
attachVideoPage: [
async ({ createVideoContext }, use, testInfo) => {
await use(page)
if (testInfo.status !== testInfo.expectedStatus) {
const path = await createVideoContext.pages()[0].video()?.path()
await createVideoContext.close()
await testInfo.attach('video', {
path: path,
})
}
},
{ scope: 'test', auto: true },
],
attachScreenshotsToReport: [
async ({ commonActions }, use, testInfo) => {
await use()
// After the test, we can check whether the test passed or failed.
if (testInfo.status !== testInfo.expectedStatus) {
await commonActions.takeScreenshot('')
}
},
{ auto: true },
],
})
test.beforeAll(async () => {
rmSync(path.join(__dirname, '../../test-data'), {
recursive: true,
force: true,
})
test.setTimeout(TIMEOUT)
await setupElectron()
await page.waitForSelector('img[alt="Jan - Logo"]', {
state: 'visible',
timeout: TIMEOUT,
})
})
test.afterAll(async () => {
// teardownElectron()
})

View File

@ -1,25 +0,0 @@
import { test, appInfo, page, TIMEOUT } from '../config/fixtures'
import { expect } from '@playwright/test'
test.beforeAll(async () => {
expect(appInfo).toMatchObject({
asar: true,
executable: expect.anything(),
main: expect.anything(),
name: 'jan',
packageJson: expect.objectContaining({ name: 'jan' }),
platform: process.platform,
resourcesDir: expect.anything(),
})
})
test('explores hub', async ({ hubPage }) => {
await hubPage.navigateByMenu()
await hubPage.verifyContainerVisible()
await hubPage.scrollToBottom()
const useModelBtn = page.getByTestId(/^setup-btn/).first()
await expect(useModelBtn).toBeVisible({
timeout: TIMEOUT,
})
})

View File

@ -1,15 +0,0 @@
import { expect } from '@playwright/test'
import { page, test, TIMEOUT } from '../config/fixtures'
test('renders left navigation panel', async () => {
const threadBtn = page.getByTestId('Thread').first()
await expect(threadBtn).toBeVisible({ timeout: TIMEOUT })
// Chat section should be there
await page.getByTestId('Local API Server').first().click({
timeout: TIMEOUT,
})
const localServer = page.getByTestId('local-server-testid').first()
await expect(localServer).toBeVisible({
timeout: TIMEOUT,
})
})

View File

@ -1,11 +0,0 @@
import { expect } from '@playwright/test'
import { test, page, TIMEOUT } from '../config/fixtures'
test('shows settings', async () => {
await page.getByTestId('Settings').first().click({
timeout: TIMEOUT,
})
const settingDescription = page.getByTestId('testid-setting-description')
await expect(settingDescription).toBeVisible({ timeout: TIMEOUT })
})

View File

@ -1,18 +0,0 @@
import { expect } from '@playwright/test'
import { page, test, TIMEOUT } from '../config/fixtures'
test('show onboarding screen without any threads created or models downloaded', async () => {
await page.getByTestId('Thread').first().click({
timeout: TIMEOUT,
})
const denyButton = page.locator('[data-testid="btn-deny-product-analytics"]')
if ((await denyButton.count()) > 0) {
await denyButton.click({ force: true })
}
const onboardScreen = page.getByTestId('onboard-screen')
await expect(onboardScreen).toBeVisible({
timeout: TIMEOUT,
})
})

View File

@ -1,59 +0,0 @@
import { Page, expect } from '@playwright/test'
import { CommonActions } from './commonActions'
import { TIMEOUT } from '../config/fixtures'
export class BasePage {
menuId: string
constructor(
protected readonly page: Page,
readonly action: CommonActions,
protected containerId: string
) {}
public getValue(key: string) {
return this.action.getValue(key)
}
public setValue(key: string, value: string) {
this.action.setValue(key, value)
}
async takeScreenshot(name: string = '') {
await this.action.takeScreenshot(name)
}
async navigateByMenu() {
await this.clickFirstElement(this.menuId)
}
async clickFirstElement(testId: string) {
await this.page.getByTestId(testId).first().click()
}
async verifyContainerVisible() {
const container = this.page.getByTestId(this.containerId)
expect(container.isVisible()).toBeTruthy()
}
async scrollToBottom() {
await this.page.evaluate(() => {
window.scrollTo(0, document.body.scrollHeight)
})
}
async waitUpdateLoader() {
await this.isElementVisible('img[alt="Jan - Logo"]')
}
//wait and find a specific element with its selector and return Visible
async isElementVisible(selector: any) {
let isVisible = true
await this.page
.waitForSelector(selector, { state: 'visible', timeout: TIMEOUT })
.catch(() => {
isVisible = false
})
return isVisible
}
}

View File

@ -1,34 +0,0 @@
import { Page, TestInfo } from '@playwright/test'
import { page } from '../config/fixtures'
export class CommonActions {
private testData = new Map<string, string>()
constructor(
public page: Page,
public testInfo: TestInfo
) {}
async takeScreenshot(name: string) {
const screenshot = await page.screenshot({
fullPage: true,
})
const attachmentName = `${this.testInfo.title}_${name || new Date().toISOString().slice(5, 19).replace(/[-:]/g, '').replace('T', '_')}`
await this.testInfo.attach(attachmentName.replace(/\s+/g, ''), {
body: screenshot,
contentType: 'image/png',
})
}
async hooks() {
console.log('hook from the scenario page')
}
setValue(key: string, value: string) {
this.testData.set(key, value)
}
getValue(key: string) {
return this.testData.get(key)
}
}

Some files were not shown because too many files have changed in this diff Show More