Merge pull request #5213 from menloresearch/release/v0.5.18
Sync Release/v0.5.18 into Development branch
28
.github/scripts/electron-checksum.py
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import hashlib
|
||||||
|
import base64
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def hash_file(file_path):
|
||||||
|
# Create a SHA-512 hash object
|
||||||
|
sha512 = hashlib.sha512()
|
||||||
|
|
||||||
|
# Read and update the hash object with the content of the file
|
||||||
|
with open(file_path, 'rb') as f:
|
||||||
|
while True:
|
||||||
|
data = f.read(1024 * 1024) # Read in 1 MB chunks
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
sha512.update(data)
|
||||||
|
|
||||||
|
# Obtain the hash result and encode it in base64
|
||||||
|
hash_base64 = base64.b64encode(sha512.digest()).decode('utf-8')
|
||||||
|
return hash_base64
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
print("Usage: python3 script.py <file_path>")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
file_path = sys.argv[1]
|
||||||
|
hash_base64_output = hash_file(file_path)
|
||||||
|
print(hash_base64_output)
|
||||||
|
Before Width: | Height: | Size: 49 KiB After Width: | Height: | Size: 49 KiB |
|
Before Width: | Height: | Size: 49 KiB After Width: | Height: | Size: 49 KiB |
63
.github/scripts/rename-tauri-app.sh
vendored
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Check if the correct number of arguments is provided
|
||||||
|
if [ "$#" -ne 2 ]; then
|
||||||
|
echo "Usage: $0 <path_to_json_input_file> <channel>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
INPUT_JSON_FILE="$1"
|
||||||
|
|
||||||
|
CHANNEL="$2"
|
||||||
|
|
||||||
|
if [ "$CHANNEL" == "nightly" ]; then
|
||||||
|
UPDATER="latest"
|
||||||
|
else
|
||||||
|
UPDATER="beta"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if the input file exists
|
||||||
|
if [ ! -f "$INPUT_JSON_FILE" ]; then
|
||||||
|
echo "Input file not found: $INPUT_JSON_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Use jq to transform the content
|
||||||
|
jq --arg channel "$CHANNEL" --arg updater "$UPDATER" '
|
||||||
|
.productName = "Jan-\($channel)" |
|
||||||
|
.identifier = "jan-\($channel).ai.app"
|
||||||
|
' "$INPUT_JSON_FILE" > ./tauri.conf.json.tmp
|
||||||
|
|
||||||
|
cat ./tauri.conf.json.tmp
|
||||||
|
|
||||||
|
rm $INPUT_JSON_FILE
|
||||||
|
mv ./tauri.conf.json.tmp $INPUT_JSON_FILE
|
||||||
|
|
||||||
|
# Update Info.plist if it exists
|
||||||
|
INFO_PLIST_PATH="./src-tauri/Info.plist"
|
||||||
|
if [ -f "$INFO_PLIST_PATH" ]; then
|
||||||
|
echo "Updating Info.plist..."
|
||||||
|
|
||||||
|
# Replace jan.ai.app with jan-{channel}.ai.app
|
||||||
|
sed -i '' "s|jan\.ai\.app|jan-${CHANNEL}.ai.app|g" "$INFO_PLIST_PATH"
|
||||||
|
|
||||||
|
# Replace <string>jan</string> with <string>jan-{channel}</string>
|
||||||
|
sed -i '' "s|<string>jan</string>|<string>jan-${CHANNEL}</string>|g" "$INFO_PLIST_PATH"
|
||||||
|
|
||||||
|
echo "Info.plist updated"
|
||||||
|
|
||||||
|
cat ./src-tauri/Info.plist
|
||||||
|
fi
|
||||||
|
# Update the layout file
|
||||||
|
# LAYOUT_FILE_PATH="web/app/layout.tsx"
|
||||||
|
|
||||||
|
# if [ ! -f "$LAYOUT_FILE_PATH" ]; then
|
||||||
|
# echo "File does not exist: $LAYOUT_FILE_PATH"
|
||||||
|
# exit 1
|
||||||
|
# fi
|
||||||
|
|
||||||
|
# Perform the replacements
|
||||||
|
# sed -i -e "s#Jan#Jan-$CHANNEL#g" "$LAYOUT_FILE_PATH"
|
||||||
|
|
||||||
|
# Notify completion
|
||||||
|
# echo "File has been updated: $LAYOUT_FILE_PATH"
|
||||||
86
.github/workflows/jan-electron-build-beta.yml
vendored
@ -1,86 +0,0 @@
|
|||||||
name: Electron Builder - Beta Build
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags: ["v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+-beta"]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Job create Update app version based on latest release tag with build number and save to output
|
|
||||||
get-update-version:
|
|
||||||
uses: ./.github/workflows/template-get-update-version.yml
|
|
||||||
|
|
||||||
build-macos:
|
|
||||||
uses: ./.github/workflows/template-build-macos.yml
|
|
||||||
secrets: inherit
|
|
||||||
needs: [get-update-version]
|
|
||||||
with:
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
public_provider: github
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
beta: true
|
|
||||||
nightly: false
|
|
||||||
cortex_api_port: "39271"
|
|
||||||
|
|
||||||
build-windows-x64:
|
|
||||||
uses: ./.github/workflows/template-build-windows-x64.yml
|
|
||||||
secrets: inherit
|
|
||||||
needs: [get-update-version]
|
|
||||||
with:
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
public_provider: github
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
beta: true
|
|
||||||
nightly: false
|
|
||||||
cortex_api_port: "39271"
|
|
||||||
|
|
||||||
build-linux-x64:
|
|
||||||
uses: ./.github/workflows/template-build-linux-x64.yml
|
|
||||||
secrets: inherit
|
|
||||||
needs: [get-update-version]
|
|
||||||
with:
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
public_provider: github
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
beta: true
|
|
||||||
nightly: false
|
|
||||||
cortex_api_port: "39271"
|
|
||||||
|
|
||||||
sync-temp-to-latest:
|
|
||||||
needs: [build-macos, build-windows-x64, build-linux-x64]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
- name: Sync temp to latest
|
|
||||||
run: |
|
|
||||||
# sync temp-beta to beta by copy files that are different or new
|
|
||||||
aws s3 sync "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/" "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/beta/"
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
|
||||||
AWS_EC2_METADATA_DISABLED: "true"
|
|
||||||
|
|
||||||
noti-discord-and-update-url-readme:
|
|
||||||
needs: [build-macos, get-update-version, build-windows-x64, build-linux-x64, sync-temp-to-latest]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Set version to environment variable
|
|
||||||
run: |
|
|
||||||
VERSION=${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
VERSION="${VERSION#v}"
|
|
||||||
echo "VERSION=$VERSION" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Notify Discord
|
|
||||||
uses: Ilshidur/action-discord@master
|
|
||||||
with:
|
|
||||||
args: |
|
|
||||||
Jan-beta App version {{ VERSION }}, has been released, use the following links to download the app with faster speed or visit the Github release page for more information:
|
|
||||||
- Windows: https://delta.jan.ai/beta/jan-beta-win-x64-{{ VERSION }}.exe
|
|
||||||
- macOS Universal: https://delta.jan.ai/beta/jan-beta-mac-universal-{{ VERSION }}.dmg
|
|
||||||
- Linux Deb: https://delta.jan.ai/beta/jan-beta-linux-amd64-{{ VERSION }}.deb
|
|
||||||
- Linux AppImage: https://delta.jan.ai/beta/jan-beta-linux-x86_64-{{ VERSION }}.AppImage
|
|
||||||
env:
|
|
||||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_JAN_BETA }}
|
|
||||||
137
.github/workflows/jan-electron-build-nightly.yml
vendored
@ -12,8 +12,9 @@ on:
|
|||||||
- none
|
- none
|
||||||
- aws-s3
|
- aws-s3
|
||||||
default: none
|
default: none
|
||||||
pull_request_review:
|
pull_request:
|
||||||
types: [submitted]
|
branches:
|
||||||
|
- release/**
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
set-public-provider:
|
set-public-provider:
|
||||||
@ -47,48 +48,84 @@ jobs:
|
|||||||
get-update-version:
|
get-update-version:
|
||||||
uses: ./.github/workflows/template-get-update-version.yml
|
uses: ./.github/workflows/template-get-update-version.yml
|
||||||
|
|
||||||
build-macos:
|
build-tauri-macos:
|
||||||
uses: ./.github/workflows/template-build-macos.yml
|
uses: ./.github/workflows/template-tauri-build-macos.yml
|
||||||
needs: [get-update-version, set-public-provider]
|
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
needs: [get-update-version, set-public-provider]
|
||||||
with:
|
with:
|
||||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
ref: ${{ needs.set-public-provider.outputs.ref }}
|
||||||
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
nightly: true
|
channel: nightly
|
||||||
beta: false
|
|
||||||
cortex_api_port: "39261"
|
cortex_api_port: "39261"
|
||||||
|
|
||||||
build-windows-x64:
|
build-tauri-windows-x64:
|
||||||
uses: ./.github/workflows/template-build-windows-x64.yml
|
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
needs: [get-update-version, set-public-provider]
|
needs: [get-update-version, set-public-provider]
|
||||||
with:
|
with:
|
||||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
ref: ${{ needs.set-public-provider.outputs.ref }}
|
||||||
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
nightly: true
|
channel: nightly
|
||||||
beta: false
|
|
||||||
cortex_api_port: "39261"
|
cortex_api_port: "39261"
|
||||||
build-linux-x64:
|
|
||||||
uses: ./.github/workflows/template-build-linux-x64.yml
|
build-tauri-linux-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
needs: [get-update-version, set-public-provider]
|
needs: [get-update-version, set-public-provider]
|
||||||
with:
|
with:
|
||||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
ref: ${{ needs.set-public-provider.outputs.ref }}
|
||||||
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
nightly: true
|
channel: nightly
|
||||||
beta: false
|
|
||||||
cortex_api_port: "39261"
|
cortex_api_port: "39261"
|
||||||
|
|
||||||
sync-temp-to-latest:
|
sync-temp-to-latest:
|
||||||
needs: [set-public-provider, build-windows-x64, build-linux-x64, build-macos]
|
needs: [get-update-version, set-public-provider, build-tauri-windows-x64, build-tauri-linux-x64, build-tauri-macos]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
- name: create latest.json file
|
||||||
|
run: |
|
||||||
|
VERSION=${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
||||||
|
LINUX_SIGNATURE="${{ needs.build-tauri-linux-x64.outputs.APPIMAGE_SIG }}"
|
||||||
|
LINUX_URL="https://delta.jan.ai/nightly/${{ needs.build-tauri-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
|
||||||
|
WINDOWS_SIGNATURE="${{ needs.build-tauri-windows-x64.outputs.WIN_SIG }}"
|
||||||
|
WINDOWS_URL="https://delta.jan.ai/nightly/${{ needs.build-tauri-windows-x64.outputs.FILE_NAME }}"
|
||||||
|
DARWIN_SIGNATURE="${{ needs.build-tauri-macos.outputs.MAC_UNIVERSAL_SIG }}"
|
||||||
|
DARWIN_URL="https://delta.jan.ai/nightly/Jan-nightly_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
|
||||||
|
|
||||||
|
jq --arg version "$VERSION" \
|
||||||
|
--arg pub_date "$PUB_DATE" \
|
||||||
|
--arg linux_signature "$LINUX_SIGNATURE" \
|
||||||
|
--arg linux_url "$LINUX_URL" \
|
||||||
|
--arg windows_signature "$WINDOWS_SIGNATURE" \
|
||||||
|
--arg windows_url "$WINDOWS_URL" \
|
||||||
|
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
|
||||||
|
--arg darwin_arm_url "$DARWIN_URL" \
|
||||||
|
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
|
||||||
|
--arg darwin_amd_url "$DARWIN_URL" \
|
||||||
|
'.version = $version
|
||||||
|
| .pub_date = $pub_date
|
||||||
|
| .platforms["linux-x86_64"].signature = $linux_signature
|
||||||
|
| .platforms["linux-x86_64"].url = $linux_url
|
||||||
|
| .platforms["windows-x86_64"].signature = $windows_signature
|
||||||
|
| .platforms["windows-x86_64"].url = $windows_url
|
||||||
|
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
|
||||||
|
| .platforms["darwin-aarch64"].url = $darwin_arm_url
|
||||||
|
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
|
||||||
|
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
|
||||||
|
src-tauri/latest.json.template > latest.json
|
||||||
|
cat latest.json
|
||||||
- name: Sync temp to latest
|
- name: Sync temp to latest
|
||||||
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
|
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
|
||||||
run: |
|
run: |
|
||||||
|
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/latest.json
|
||||||
aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/
|
aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/
|
||||||
env:
|
env:
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
@ -97,7 +134,14 @@ jobs:
|
|||||||
AWS_EC2_METADATA_DISABLED: "true"
|
AWS_EC2_METADATA_DISABLED: "true"
|
||||||
|
|
||||||
noti-discord-nightly-and-update-url-readme:
|
noti-discord-nightly-and-update-url-readme:
|
||||||
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
|
needs: [
|
||||||
|
build-tauri-macos,
|
||||||
|
build-tauri-windows-x64,
|
||||||
|
build-tauri-linux-x64,
|
||||||
|
get-update-version,
|
||||||
|
set-public-provider,
|
||||||
|
sync-temp-to-latest
|
||||||
|
]
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule'
|
||||||
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
||||||
@ -108,7 +152,14 @@ jobs:
|
|||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
|
||||||
noti-discord-pre-release-and-update-url-readme:
|
noti-discord-pre-release-and-update-url-readme:
|
||||||
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
|
needs: [
|
||||||
|
build-tauri-macos,
|
||||||
|
build-tauri-windows-x64,
|
||||||
|
build-tauri-linux-x64,
|
||||||
|
get-update-version,
|
||||||
|
set-public-provider,
|
||||||
|
sync-temp-to-latest
|
||||||
|
]
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push'
|
||||||
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
||||||
@ -119,7 +170,14 @@ jobs:
|
|||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
|
||||||
noti-discord-manual-and-update-url-readme:
|
noti-discord-manual-and-update-url-readme:
|
||||||
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
|
needs: [
|
||||||
|
build-tauri-macos,
|
||||||
|
build-tauri-windows-x64,
|
||||||
|
build-tauri-linux-x64,
|
||||||
|
get-update-version,
|
||||||
|
set-public-provider,
|
||||||
|
sync-temp-to-latest
|
||||||
|
]
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3'
|
if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3'
|
||||||
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
||||||
@ -130,21 +188,28 @@ jobs:
|
|||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
|
||||||
|
|
||||||
comment-pr-build-url:
|
# comment-pr-build-url:
|
||||||
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest]
|
# needs: [
|
||||||
runs-on: ubuntu-latest
|
# build-tauri-macos,
|
||||||
if: github.event_name == 'pull_request_review'
|
# build-tauri-windows-x64,
|
||||||
steps:
|
# build-tauri-linux-x64,
|
||||||
- name: Set up GitHub CLI
|
# get-update-version,
|
||||||
run: |
|
# set-public-provider,
|
||||||
curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz
|
# sync-temp-to-latest
|
||||||
sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/
|
# ]
|
||||||
|
# runs-on: ubuntu-latest
|
||||||
|
# if: github.event_name == 'pull_request_review'
|
||||||
|
# steps:
|
||||||
|
# - name: Set up GitHub CLI
|
||||||
|
# run: |
|
||||||
|
# curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz
|
||||||
|
# sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/
|
||||||
|
|
||||||
- name: Comment build URL on PR
|
# - name: Comment build URL on PR
|
||||||
env:
|
# env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
# run: |
|
||||||
PR_URL=${{ github.event.pull_request.html_url }}
|
# PR_URL=${{ github.event.pull_request.html_url }}
|
||||||
RUN_ID=${{ github.run_id }}
|
# RUN_ID=${{ github.run_id }}
|
||||||
COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})."
|
# COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})."
|
||||||
gh pr comment $PR_URL --body "$COMMENT"
|
# gh pr comment $PR_URL --body "$COMMENT"
|
||||||
54
.github/workflows/jan-electron-build.yml
vendored
@ -33,8 +33,8 @@ jobs:
|
|||||||
draft: true
|
draft: true
|
||||||
prerelease: false
|
prerelease: false
|
||||||
|
|
||||||
build-macos:
|
build-electron-macos:
|
||||||
uses: ./.github/workflows/template-build-macos.yml
|
uses: ./.github/workflows/template-electron-build-macos.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
needs: [get-update-version]
|
needs: [get-update-version]
|
||||||
with:
|
with:
|
||||||
@ -44,8 +44,8 @@ jobs:
|
|||||||
nightly: false
|
nightly: false
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
|
||||||
build-windows-x64:
|
build-electron-windows-x64:
|
||||||
uses: ./.github/workflows/template-build-windows-x64.yml
|
uses: ./.github/workflows/template-electron-build-windows-x64.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
needs: [get-update-version]
|
needs: [get-update-version]
|
||||||
with:
|
with:
|
||||||
@ -55,8 +55,8 @@ jobs:
|
|||||||
nightly: false
|
nightly: false
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
|
||||||
build-linux-x64:
|
build-electron-linux-x64:
|
||||||
uses: ./.github/workflows/template-build-linux-x64.yml
|
uses: ./.github/workflows/template-electron-build-linux-x64.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
needs: [get-update-version]
|
needs: [get-update-version]
|
||||||
with:
|
with:
|
||||||
@ -65,9 +65,49 @@ jobs:
|
|||||||
beta: false
|
beta: false
|
||||||
nightly: false
|
nightly: false
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
|
||||||
|
# build-tauri-macos:
|
||||||
|
# uses: ./.github/workflows/template-tauri-build-macos.yml
|
||||||
|
# secrets: inherit
|
||||||
|
# needs: [get-update-version, create-draft-release]
|
||||||
|
# with:
|
||||||
|
# ref: ${{ github.ref }}
|
||||||
|
# public_provider: github
|
||||||
|
# channel: stable
|
||||||
|
# new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
# upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
# build-tauri-windows-x64:
|
||||||
|
# uses: ./.github/workflows/template-tauri-build-windows-x64.yml
|
||||||
|
# secrets: inherit
|
||||||
|
# needs: [get-update-version, create-draft-release]
|
||||||
|
# with:
|
||||||
|
# ref: ${{ github.ref }}
|
||||||
|
# public_provider: github
|
||||||
|
# channel: stable
|
||||||
|
# new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
# upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
# build-tauri-linux-x64:
|
||||||
|
# uses: ./.github/workflows/template-tauri-build-linux-x64.yml
|
||||||
|
# secrets: inherit
|
||||||
|
# needs: [get-update-version, create-draft-release]
|
||||||
|
# with:
|
||||||
|
# ref: ${{ github.ref }}
|
||||||
|
# public_provider: github
|
||||||
|
# channel: stable
|
||||||
|
# new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
# upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
update_release_draft:
|
update_release_draft:
|
||||||
needs: [build-macos, build-windows-x64, build-linux-x64]
|
needs: [
|
||||||
|
build-electron-windows-x64,
|
||||||
|
build-electron-linux-x64,
|
||||||
|
build-electron-macos,
|
||||||
|
build-tauri-windows-x64,
|
||||||
|
build-tauri-linux-x64,
|
||||||
|
build-tauri-macos
|
||||||
|
]
|
||||||
permissions:
|
permissions:
|
||||||
# write permission is required to create a github release
|
# write permission is required to create a github release
|
||||||
contents: write
|
contents: write
|
||||||
|
|||||||
@ -34,6 +34,8 @@ on:
|
|||||||
- 'Makefile'
|
- 'Makefile'
|
||||||
- 'extensions/**'
|
- 'extensions/**'
|
||||||
- 'core/**'
|
- 'core/**'
|
||||||
|
- 'src-tauri/**'
|
||||||
|
- 'web-app/**'
|
||||||
- '!README.md'
|
- '!README.md'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@ -53,7 +55,6 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
make config-yarn
|
make config-yarn
|
||||||
yarn
|
yarn
|
||||||
yarn build:joi
|
|
||||||
yarn build:core
|
yarn build:core
|
||||||
|
|
||||||
- name: Run test coverage
|
- name: Run test coverage
|
||||||
@ -305,52 +306,53 @@ jobs:
|
|||||||
path: electron/playwright-report/
|
path: electron/playwright-report/
|
||||||
retention-days: 2
|
retention-days: 2
|
||||||
|
|
||||||
coverage-check:
|
# coverage-check:
|
||||||
runs-on: ubuntu-latest
|
# runs-on: ubuntu-latest
|
||||||
needs: base_branch_cov
|
# needs: base_branch_cov
|
||||||
if: (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
|
# continue-on-error: true
|
||||||
steps:
|
# if: (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
|
||||||
- name: Getting the repo
|
# steps:
|
||||||
uses: actions/checkout@v3
|
# - name: Getting the repo
|
||||||
with:
|
# uses: actions/checkout@v3
|
||||||
fetch-depth: 0
|
# with:
|
||||||
|
# fetch-depth: 0
|
||||||
|
|
||||||
- name: Installing node
|
# - name: Installing node
|
||||||
uses: actions/setup-node@v3
|
# uses: actions/setup-node@v3
|
||||||
with:
|
# with:
|
||||||
node-version: 20
|
# node-version: 20
|
||||||
|
|
||||||
- name: Install yarn
|
# - name: Install yarn
|
||||||
run: npm install -g yarn
|
# run: npm install -g yarn
|
||||||
|
|
||||||
- name: 'Cleanup cache'
|
# - name: 'Cleanup cache'
|
||||||
continue-on-error: true
|
# continue-on-error: true
|
||||||
run: |
|
# run: |
|
||||||
rm -rf ~/jan
|
# rm -rf ~/jan
|
||||||
make clean
|
# make clean
|
||||||
|
|
||||||
- name: Download code coverage report from base branch
|
# - name: Download code coverage report from base branch
|
||||||
uses: actions/download-artifact@v4
|
# uses: actions/download-artifact@v4
|
||||||
with:
|
# with:
|
||||||
name: ref-lcov.info
|
# name: ref-lcov.info
|
||||||
|
|
||||||
- name: Linter and test coverage
|
# - name: Linter and test coverage
|
||||||
run: |
|
# run: |
|
||||||
export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
|
# export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
|
||||||
echo -e "Display ID: $DISPLAY"
|
# echo -e "Display ID: $DISPLAY"
|
||||||
make lint
|
# make lint
|
||||||
yarn build:test
|
# yarn build:test
|
||||||
yarn test:coverage
|
# yarn test:coverage
|
||||||
|
|
||||||
- name: Generate Code Coverage report
|
# - name: Generate Code Coverage report
|
||||||
id: code-coverage
|
# id: code-coverage
|
||||||
uses: barecheck/code-coverage-action@v1
|
# uses: barecheck/code-coverage-action@v1
|
||||||
with:
|
# with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
# github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
lcov-file: './coverage/lcov.info'
|
# lcov-file: './coverage/lcov.info'
|
||||||
base-lcov-file: './lcov.info'
|
# base-lcov-file: './lcov.info'
|
||||||
send-summary-comment: true
|
# send-summary-comment: true
|
||||||
show-annotations: 'warning'
|
# show-annotations: 'warning'
|
||||||
|
|
||||||
test-on-ubuntu-pr-target:
|
test-on-ubuntu-pr-target:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
156
.github/workflows/jan-tauri-build-beta.yml
vendored
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
name: Tauri Builder - Beta Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags: ["v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+-beta"]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Job create Update app version based on latest release tag with build number and save to output
|
||||||
|
get-update-version:
|
||||||
|
uses: ./.github/workflows/template-get-update-version.yml
|
||||||
|
create-draft-release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||||
|
outputs:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
version: ${{ steps.get_version.outputs.version }}
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Extract tag name without v prefix
|
||||||
|
id: get_version
|
||||||
|
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
|
||||||
|
env:
|
||||||
|
GITHUB_REF: ${{ github.ref }}
|
||||||
|
- name: Create Draft Release
|
||||||
|
id: create_release
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
|
with:
|
||||||
|
tag_name: ${{ github.ref_name }}
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
name: "${{ env.VERSION }}"
|
||||||
|
draft: true
|
||||||
|
prerelease: false
|
||||||
|
generate_release_notes: true
|
||||||
|
|
||||||
|
build-macos:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-macos.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, create-draft-release]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: github
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
channel: beta
|
||||||
|
cortex_api_port: "39271"
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
build-windows-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, create-draft-release]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: github
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
channel: beta
|
||||||
|
cortex_api_port: "39271"
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
build-linux-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, create-draft-release]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: github
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
channel: beta
|
||||||
|
cortex_api_port: "39271"
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
sync-temp-to-latest:
|
||||||
|
needs: [create-draft-release, get-update-version, build-macos, build-windows-x64, build-linux-x64]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: create latest.json file
|
||||||
|
run: |
|
||||||
|
|
||||||
|
VERSION=${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
||||||
|
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
|
||||||
|
LINUX_URL="https://delta.jan.ai/beta/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
|
||||||
|
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
|
||||||
|
WINDOWS_URL="https://delta.jan.ai/beta/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
|
||||||
|
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
|
||||||
|
DARWIN_URL="https://delta.jan.ai/beta/Jan-beta_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
|
||||||
|
|
||||||
|
jq --arg version "$VERSION" \
|
||||||
|
--arg pub_date "$PUB_DATE" \
|
||||||
|
--arg linux_signature "$LINUX_SIGNATURE" \
|
||||||
|
--arg linux_url "$LINUX_URL" \
|
||||||
|
--arg windows_signature "$WINDOWS_SIGNATURE" \
|
||||||
|
--arg windows_url "$WINDOWS_URL" \
|
||||||
|
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
|
||||||
|
--arg darwin_arm_url "$DARWIN_URL" \
|
||||||
|
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
|
||||||
|
--arg darwin_amd_url "$DARWIN_URL" \
|
||||||
|
'.version = $version
|
||||||
|
| .pub_date = $pub_date
|
||||||
|
| .platforms["linux-x86_64"].signature = $linux_signature
|
||||||
|
| .platforms["linux-x86_64"].url = $linux_url
|
||||||
|
| .platforms["windows-x86_64"].signature = $windows_signature
|
||||||
|
| .platforms["windows-x86_64"].url = $windows_url
|
||||||
|
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
|
||||||
|
| .platforms["darwin-aarch64"].url = $darwin_arm_url
|
||||||
|
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
|
||||||
|
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
|
||||||
|
src-tauri/latest.json.template > latest.json
|
||||||
|
cat latest.json
|
||||||
|
- name: Sync temp to latest
|
||||||
|
run: |
|
||||||
|
# sync temp-beta to beta by copy files that are different or new
|
||||||
|
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/latest.json
|
||||||
|
aws s3 sync "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/" "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/beta/"
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
||||||
|
AWS_EC2_METADATA_DISABLED: "true"
|
||||||
|
|
||||||
|
- name: Upload release assert if public provider is github
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
|
with:
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
asset_path: ./latest.json
|
||||||
|
asset_name: latest.json
|
||||||
|
asset_content_type: text/json
|
||||||
|
|
||||||
|
noti-discord-and-update-url-readme:
|
||||||
|
needs: [build-macos, get-update-version, build-windows-x64, build-linux-x64, sync-temp-to-latest]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Set version to environment variable
|
||||||
|
run: |
|
||||||
|
VERSION=${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
VERSION="${VERSION#v}"
|
||||||
|
echo "VERSION=$VERSION" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Notify Discord
|
||||||
|
uses: Ilshidur/action-discord@master
|
||||||
|
with:
|
||||||
|
args: |
|
||||||
|
Jan-beta App version {{ VERSION }}, has been released, use the following links to download the app with faster speed or visit the Github release page for more information:
|
||||||
|
- Windows: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_x64-setup.exe
|
||||||
|
- macOS Universal: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_universal.dmg
|
||||||
|
- Linux Deb: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_amd64.deb
|
||||||
|
- Linux AppImage: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_amd64.AppImage
|
||||||
|
env:
|
||||||
|
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_JAN_BETA }}
|
||||||
450
.github/workflows/jan-tauri-build-nightly.yaml
vendored
@ -1,225 +1,225 @@
|
|||||||
name: Tauri Builder - Nightly / Manual
|
name: Tauri Builder - Nightly / Manual
|
||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 20 * * 1,2,3' # At 8 PM UTC on Monday, Tuesday, and Wednesday which is 3 AM UTC+7 Tuesday, Wednesday, and Thursday
|
- cron: '0 20 * * 1,2,3' # At 8 PM UTC on Monday, Tuesday, and Wednesday which is 3 AM UTC+7 Tuesday, Wednesday, and Thursday
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
public_provider:
|
public_provider:
|
||||||
type: choice
|
type: choice
|
||||||
description: 'Public Provider'
|
description: 'Public Provider'
|
||||||
options:
|
options:
|
||||||
- none
|
- none
|
||||||
- aws-s3
|
- aws-s3
|
||||||
default: none
|
default: none
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- release/**
|
- release/**
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
set-public-provider:
|
set-public-provider:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
public_provider: ${{ steps.set-public-provider.outputs.public_provider }}
|
public_provider: ${{ steps.set-public-provider.outputs.public_provider }}
|
||||||
ref: ${{ steps.set-public-provider.outputs.ref }}
|
ref: ${{ steps.set-public-provider.outputs.ref }}
|
||||||
steps:
|
steps:
|
||||||
- name: Set public provider
|
- name: Set public provider
|
||||||
id: set-public-provider
|
id: set-public-provider
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
|
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
|
||||||
echo "::set-output name=public_provider::${{ github.event.inputs.public_provider }}"
|
echo "::set-output name=public_provider::${{ github.event.inputs.public_provider }}"
|
||||||
echo "::set-output name=ref::${{ github.ref }}"
|
echo "::set-output name=ref::${{ github.ref }}"
|
||||||
else
|
else
|
||||||
if [ "${{ github.event_name }}" == "schedule" ]; then
|
if [ "${{ github.event_name }}" == "schedule" ]; then
|
||||||
echo "::set-output name=public_provider::aws-s3"
|
echo "::set-output name=public_provider::aws-s3"
|
||||||
echo "::set-output name=ref::refs/heads/dev"
|
echo "::set-output name=ref::refs/heads/dev"
|
||||||
elif [ "${{ github.event_name }}" == "push" ]; then
|
elif [ "${{ github.event_name }}" == "push" ]; then
|
||||||
echo "::set-output name=public_provider::aws-s3"
|
echo "::set-output name=public_provider::aws-s3"
|
||||||
echo "::set-output name=ref::${{ github.ref }}"
|
echo "::set-output name=ref::${{ github.ref }}"
|
||||||
elif [ "${{ github.event_name }}" == "pull_request_review" ]; then
|
elif [ "${{ github.event_name }}" == "pull_request_review" ]; then
|
||||||
echo "::set-output name=public_provider::none"
|
echo "::set-output name=public_provider::none"
|
||||||
echo "::set-output name=ref::${{ github.ref }}"
|
echo "::set-output name=ref::${{ github.ref }}"
|
||||||
else
|
else
|
||||||
echo "::set-output name=public_provider::none"
|
echo "::set-output name=public_provider::none"
|
||||||
echo "::set-output name=ref::${{ github.ref }}"
|
echo "::set-output name=ref::${{ github.ref }}"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
# Job create Update app version based on latest release tag with build number and save to output
|
# Job create Update app version based on latest release tag with build number and save to output
|
||||||
get-update-version:
|
get-update-version:
|
||||||
uses: ./.github/workflows/template-get-update-version.yml
|
uses: ./.github/workflows/template-get-update-version.yml
|
||||||
|
|
||||||
build-macos:
|
build-macos:
|
||||||
uses: ./.github/workflows/template-tauri-build-macos.yml
|
uses: ./.github/workflows/template-tauri-build-macos.yml
|
||||||
needs: [get-update-version, set-public-provider]
|
needs: [get-update-version, set-public-provider]
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
ref: ${{ needs.set-public-provider.outputs.ref }}
|
||||||
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
channel: nightly
|
channel: nightly
|
||||||
cortex_api_port: '39261'
|
cortex_api_port: '39261'
|
||||||
|
|
||||||
build-windows-x64:
|
build-windows-x64:
|
||||||
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
|
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
needs: [get-update-version, set-public-provider]
|
needs: [get-update-version, set-public-provider]
|
||||||
with:
|
with:
|
||||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
ref: ${{ needs.set-public-provider.outputs.ref }}
|
||||||
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
channel: nightly
|
channel: nightly
|
||||||
cortex_api_port: '39261'
|
cortex_api_port: '39261'
|
||||||
build-linux-x64:
|
build-linux-x64:
|
||||||
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
|
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
needs: [get-update-version, set-public-provider]
|
needs: [get-update-version, set-public-provider]
|
||||||
with:
|
with:
|
||||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
ref: ${{ needs.set-public-provider.outputs.ref }}
|
||||||
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
channel: nightly
|
channel: nightly
|
||||||
cortex_api_port: '39261'
|
cortex_api_port: '39261'
|
||||||
|
|
||||||
sync-temp-to-latest:
|
sync-temp-to-latest:
|
||||||
needs:
|
needs:
|
||||||
[
|
[
|
||||||
get-update-version,
|
get-update-version,
|
||||||
set-public-provider,
|
set-public-provider,
|
||||||
build-windows-x64,
|
build-windows-x64,
|
||||||
build-linux-x64,
|
build-linux-x64,
|
||||||
build-macos,
|
build-macos,
|
||||||
]
|
]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Getting the repo
|
- name: Getting the repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
- name: Install jq
|
- name: Install jq
|
||||||
uses: dcarbone/install-jq-action@v2.0.1
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
- name: create latest.json file
|
- name: create latest.json file
|
||||||
run: |
|
run: |
|
||||||
|
|
||||||
VERSION=${{ needs.get-update-version.outputs.new_version }}
|
VERSION=${{ needs.get-update-version.outputs.new_version }}
|
||||||
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
||||||
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
|
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
|
||||||
LINUX_URL="https://delta.jan.ai/nightly/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
|
LINUX_URL="https://delta.jan.ai/nightly/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
|
||||||
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
|
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
|
||||||
WINDOWS_URL="https://delta.jan.ai/nightly/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
|
WINDOWS_URL="https://delta.jan.ai/nightly/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
|
||||||
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
|
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
|
||||||
DARWIN_URL="https://delta.jan.ai/nightly/Jan-nightly_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
|
DARWIN_URL="https://delta.jan.ai/nightly/Jan-nightly_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
|
||||||
|
|
||||||
jq --arg version "$VERSION" \
|
jq --arg version "$VERSION" \
|
||||||
--arg pub_date "$PUB_DATE" \
|
--arg pub_date "$PUB_DATE" \
|
||||||
--arg linux_signature "$LINUX_SIGNATURE" \
|
--arg linux_signature "$LINUX_SIGNATURE" \
|
||||||
--arg linux_url "$LINUX_URL" \
|
--arg linux_url "$LINUX_URL" \
|
||||||
--arg windows_signature "$WINDOWS_SIGNATURE" \
|
--arg windows_signature "$WINDOWS_SIGNATURE" \
|
||||||
--arg windows_url "$WINDOWS_URL" \
|
--arg windows_url "$WINDOWS_URL" \
|
||||||
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
|
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
|
||||||
--arg darwin_arm_url "$DARWIN_URL" \
|
--arg darwin_arm_url "$DARWIN_URL" \
|
||||||
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
|
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
|
||||||
--arg darwin_amd_url "$DARWIN_URL" \
|
--arg darwin_amd_url "$DARWIN_URL" \
|
||||||
'.version = $version
|
'.version = $version
|
||||||
| .pub_date = $pub_date
|
| .pub_date = $pub_date
|
||||||
| .platforms["linux-x86_64"].signature = $linux_signature
|
| .platforms["linux-x86_64"].signature = $linux_signature
|
||||||
| .platforms["linux-x86_64"].url = $linux_url
|
| .platforms["linux-x86_64"].url = $linux_url
|
||||||
| .platforms["windows-x86_64"].signature = $windows_signature
|
| .platforms["windows-x86_64"].signature = $windows_signature
|
||||||
| .platforms["windows-x86_64"].url = $windows_url
|
| .platforms["windows-x86_64"].url = $windows_url
|
||||||
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
|
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
|
||||||
| .platforms["darwin-aarch64"].url = $darwin_arm_url
|
| .platforms["darwin-aarch64"].url = $darwin_arm_url
|
||||||
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
|
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
|
||||||
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
|
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
|
||||||
src-tauri/latest.json.template > latest.json
|
src-tauri/latest.json.template > latest.json
|
||||||
cat latest.json
|
cat latest.json
|
||||||
- name: Sync temp to latest
|
- name: Sync temp to latest
|
||||||
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
|
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
|
||||||
run: |
|
run: |
|
||||||
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/latest.json
|
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/latest.json
|
||||||
aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/
|
aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/
|
||||||
env:
|
env:
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
||||||
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
||||||
AWS_EC2_METADATA_DISABLED: 'true'
|
AWS_EC2_METADATA_DISABLED: 'true'
|
||||||
|
|
||||||
noti-discord-nightly-and-update-url-readme:
|
noti-discord-nightly-and-update-url-readme:
|
||||||
needs:
|
needs:
|
||||||
[
|
[
|
||||||
build-macos,
|
build-macos,
|
||||||
build-windows-x64,
|
build-windows-x64,
|
||||||
build-linux-x64,
|
build-linux-x64,
|
||||||
get-update-version,
|
get-update-version,
|
||||||
set-public-provider,
|
set-public-provider,
|
||||||
sync-temp-to-latest,
|
sync-temp-to-latest,
|
||||||
]
|
]
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule'
|
||||||
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
||||||
with:
|
with:
|
||||||
ref: refs/heads/dev
|
ref: refs/heads/dev
|
||||||
build_reason: Nightly
|
build_reason: Nightly
|
||||||
push_to_branch: dev
|
push_to_branch: dev
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
|
||||||
noti-discord-pre-release-and-update-url-readme:
|
noti-discord-pre-release-and-update-url-readme:
|
||||||
needs:
|
needs:
|
||||||
[
|
[
|
||||||
build-macos,
|
build-macos,
|
||||||
build-windows-x64,
|
build-windows-x64,
|
||||||
build-linux-x64,
|
build-linux-x64,
|
||||||
get-update-version,
|
get-update-version,
|
||||||
set-public-provider,
|
set-public-provider,
|
||||||
sync-temp-to-latest,
|
sync-temp-to-latest,
|
||||||
]
|
]
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push'
|
||||||
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
||||||
with:
|
with:
|
||||||
ref: refs/heads/dev
|
ref: refs/heads/dev
|
||||||
build_reason: Pre-release
|
build_reason: Pre-release
|
||||||
push_to_branch: dev
|
push_to_branch: dev
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
|
||||||
noti-discord-manual-and-update-url-readme:
|
noti-discord-manual-and-update-url-readme:
|
||||||
needs:
|
needs:
|
||||||
[
|
[
|
||||||
build-macos,
|
build-macos,
|
||||||
build-windows-x64,
|
build-windows-x64,
|
||||||
build-linux-x64,
|
build-linux-x64,
|
||||||
get-update-version,
|
get-update-version,
|
||||||
set-public-provider,
|
set-public-provider,
|
||||||
sync-temp-to-latest,
|
sync-temp-to-latest,
|
||||||
]
|
]
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3'
|
if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3'
|
||||||
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
||||||
with:
|
with:
|
||||||
ref: refs/heads/dev
|
ref: refs/heads/dev
|
||||||
build_reason: Manual
|
build_reason: Manual
|
||||||
push_to_branch: dev
|
push_to_branch: dev
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
|
||||||
comment-pr-build-url:
|
comment-pr-build-url:
|
||||||
needs:
|
needs:
|
||||||
[
|
[
|
||||||
build-macos,
|
build-macos,
|
||||||
build-windows-x64,
|
build-windows-x64,
|
||||||
build-linux-x64,
|
build-linux-x64,
|
||||||
get-update-version,
|
get-update-version,
|
||||||
set-public-provider,
|
set-public-provider,
|
||||||
sync-temp-to-latest,
|
sync-temp-to-latest,
|
||||||
]
|
]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.event_name == 'pull_request_review'
|
if: github.event_name == 'pull_request_review'
|
||||||
steps:
|
steps:
|
||||||
- name: Set up GitHub CLI
|
- name: Set up GitHub CLI
|
||||||
run: |
|
run: |
|
||||||
curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz
|
curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz
|
||||||
sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/
|
sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/
|
||||||
|
|
||||||
- name: Comment build URL on PR
|
- name: Comment build URL on PR
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
PR_URL=${{ github.event.pull_request.html_url }}
|
PR_URL=${{ github.event.pull_request.html_url }}
|
||||||
RUN_ID=${{ github.run_id }}
|
RUN_ID=${{ github.run_id }}
|
||||||
COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})."
|
COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})."
|
||||||
gh pr comment $PR_URL --body "$COMMENT"
|
gh pr comment $PR_URL --body "$COMMENT"
|
||||||
|
|||||||
145
.github/workflows/jan-tauri-build.yaml
vendored
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
name: Tauri Builder - Tag
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags: ["v[0-9]+.[0-9]+.[0-9]+"]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Job create Update app version based on latest release tag with build number and save to output
|
||||||
|
get-update-version:
|
||||||
|
uses: ./.github/workflows/template-get-update-version.yml
|
||||||
|
|
||||||
|
create-draft-release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||||
|
outputs:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
version: ${{ steps.get_version.outputs.version }}
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Extract tag name without v prefix
|
||||||
|
id: get_version
|
||||||
|
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
|
||||||
|
env:
|
||||||
|
GITHUB_REF: ${{ github.ref }}
|
||||||
|
- name: Create Draft Release
|
||||||
|
id: create_release
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
|
with:
|
||||||
|
tag_name: ${{ github.ref_name }}
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
name: "${{ env.VERSION }}"
|
||||||
|
draft: true
|
||||||
|
prerelease: false
|
||||||
|
|
||||||
|
build-macos:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-macos.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, create-draft-release]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: github
|
||||||
|
channel: stable
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
build-windows-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, create-draft-release]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: github
|
||||||
|
channel: stable
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
build-linux-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, create-draft-release]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: github
|
||||||
|
channel: stable
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
sync-temp-to-latest:
|
||||||
|
needs: [create-draft-release, get-update-version, build-macos, build-windows-x64, build-linux-x64]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: create latest.json file
|
||||||
|
run: |
|
||||||
|
|
||||||
|
VERSION=${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
||||||
|
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
|
||||||
|
LINUX_URL="https://github.com/menloresearch/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
|
||||||
|
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
|
||||||
|
WINDOWS_URL="https://github.com/menloresearch/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
|
||||||
|
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
|
||||||
|
DARWIN_URL="https://github.com/menloresearch/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-macos.outputs.TAR_NAME }}"
|
||||||
|
|
||||||
|
jq --arg version "$VERSION" \
|
||||||
|
--arg pub_date "$PUB_DATE" \
|
||||||
|
--arg linux_signature "$LINUX_SIGNATURE" \
|
||||||
|
--arg linux_url "$LINUX_URL" \
|
||||||
|
--arg windows_signature "$WINDOWS_SIGNATURE" \
|
||||||
|
--arg windows_url "$WINDOWS_URL" \
|
||||||
|
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
|
||||||
|
--arg darwin_arm_url "$DARWIN_URL" \
|
||||||
|
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
|
||||||
|
--arg darwin_amd_url "$DARWIN_URL" \
|
||||||
|
'.version = $version
|
||||||
|
| .pub_date = $pub_date
|
||||||
|
| .platforms["linux-x86_64"].signature = $linux_signature
|
||||||
|
| .platforms["linux-x86_64"].url = $linux_url
|
||||||
|
| .platforms["windows-x86_64"].signature = $windows_signature
|
||||||
|
| .platforms["windows-x86_64"].url = $windows_url
|
||||||
|
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
|
||||||
|
| .platforms["darwin-aarch64"].url = $darwin_arm_url
|
||||||
|
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
|
||||||
|
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
|
||||||
|
src-tauri/latest.json.template > latest.json
|
||||||
|
cat latest.json
|
||||||
|
|
||||||
|
- name: Upload release assert if public provider is github
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
|
with:
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
asset_path: ./latest.json
|
||||||
|
asset_name: latest.json
|
||||||
|
asset_content_type: text/json
|
||||||
|
|
||||||
|
update_release_draft:
|
||||||
|
needs: [build-macos, build-windows-x64, build-linux-x64]
|
||||||
|
permissions:
|
||||||
|
# write permission is required to create a github release
|
||||||
|
contents: write
|
||||||
|
# write permission is required for autolabeler
|
||||||
|
# otherwise, read permission is required at least
|
||||||
|
pull-requests: write
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
# (Optional) GitHub Enterprise requires GHE_HOST variable set
|
||||||
|
#- name: Set GHE_HOST
|
||||||
|
# run: |
|
||||||
|
# echo "GHE_HOST=${GITHUB_SERVER_URL##https:\/\/}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
# Drafts your next Release notes as Pull Requests are merged into "master"
|
||||||
|
- uses: release-drafter/release-drafter@v5
|
||||||
|
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
|
||||||
|
# with:
|
||||||
|
# config-name: my-config.yml
|
||||||
|
# disable-autolabeler: true
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
127
.github/workflows/nightly-integrate-cortex-cpp.yml
vendored
@ -1,127 +0,0 @@
|
|||||||
name: Nightly Update cortex cpp
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '30 19 * * 1-5' # At 01:30 on every day-of-week from Monday through Friday UTC +7
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update-submodule:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
actions: write
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
pr_number: ${{ steps.check-update.outputs.pr_number }}
|
|
||||||
pr_created: ${{ steps.check-update.outputs.pr_created }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
submodules: recursive
|
|
||||||
ref: dev
|
|
||||||
fetch-depth: 0
|
|
||||||
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
|
||||||
|
|
||||||
- name: Configure Git
|
|
||||||
run: |
|
|
||||||
git config --global user.name 'github-actions[bot]'
|
|
||||||
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
|
|
||||||
|
|
||||||
- name: Update submodule to latest release
|
|
||||||
id: check-update
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
|
||||||
run: |
|
|
||||||
curl -s https://api.github.com/repos/menloresearch/cortex/releases > /tmp/github_api_releases.json
|
|
||||||
latest_prerelease_name=$(cat /tmp/github_api_releases.json | jq -r '.[] | select(.prerelease) | .name' | head -n 1)
|
|
||||||
|
|
||||||
get_asset_count() {
|
|
||||||
local version_name=$1
|
|
||||||
cat /tmp/github_api_releases.json | jq -r --arg version_name "$version_name" '.[] | select(.name == $version_name) | .assets | length'
|
|
||||||
}
|
|
||||||
|
|
||||||
cortex_cpp_version_file_path="extensions/inference-nitro-extension/bin/version.txt"
|
|
||||||
current_version_name=$(cat "$cortex_cpp_version_file_path" | head -n 1)
|
|
||||||
|
|
||||||
current_version_asset_count=$(get_asset_count "$current_version_name")
|
|
||||||
latest_prerelease_asset_count=$(get_asset_count "$latest_prerelease_name")
|
|
||||||
|
|
||||||
if [ "$current_version_name" = "$latest_prerelease_name" ]; then
|
|
||||||
echo "cortex cpp remote repo doesn't have update today, skip update cortex.cpp for today nightly build"
|
|
||||||
echo "::set-output name=pr_created::false"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$current_version_asset_count" != "$latest_prerelease_asset_count" ]; then
|
|
||||||
echo "Latest prerelease version has different number of assets, somethink went wrong, skip update cortex.cpp for today nightly build"
|
|
||||||
echo "::set-output name=pr_created::false"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo $latest_prerelease_name > $cortex_cpp_version_file_path
|
|
||||||
echo "Updated version from $current_version_name to $latest_prerelease_name."
|
|
||||||
echo "::set-output name=pr_created::true"
|
|
||||||
|
|
||||||
git add -f $cortex_cpp_version_file_path
|
|
||||||
git commit -m "Update cortex cpp nightly to version $latest_prerelease_name"
|
|
||||||
branch_name="update-nightly-$(date +'%Y-%m-%d-%H-%M')"
|
|
||||||
git checkout -b $branch_name
|
|
||||||
git push origin $branch_name
|
|
||||||
|
|
||||||
pr_title="Update cortex cpp nightly to version $latest_prerelease_name"
|
|
||||||
pr_body="This PR updates the Update cortex cpp nightly to version $latest_prerelease_name"
|
|
||||||
|
|
||||||
gh pr create --title "$pr_title" --body "$pr_body" --head $branch_name --base dev --reviewer Van-QA
|
|
||||||
|
|
||||||
pr_number=$(gh pr list --head $branch_name --json number --jq '.[0].number')
|
|
||||||
echo "::set-output name=pr_number::$pr_number"
|
|
||||||
|
|
||||||
check-and-merge-pr:
|
|
||||||
needs: update-submodule
|
|
||||||
if: needs.update-submodule.outputs.pr_created == 'true'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
submodules: recursive
|
|
||||||
fetch-depth: 0
|
|
||||||
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
|
||||||
|
|
||||||
- name: Wait for CI to pass
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
|
||||||
run: |
|
|
||||||
pr_number=${{ needs.update-submodule.outputs.pr_number }}
|
|
||||||
while true; do
|
|
||||||
ci_completed=$(gh pr checks $pr_number --json completedAt --jq '.[].completedAt')
|
|
||||||
if echo "$ci_completed" | grep -q "0001-01-01T00:00:00Z"; then
|
|
||||||
echo "CI is still running, waiting..."
|
|
||||||
sleep 60
|
|
||||||
else
|
|
||||||
echo "CI has completed, checking states..."
|
|
||||||
ci_states=$(gh pr checks $pr_number --json state --jq '.[].state')
|
|
||||||
if echo "$ci_states" | grep -vqE "SUCCESS|SKIPPED"; then
|
|
||||||
echo "CI failed, exiting..."
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "CI passed, merging PR..."
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Merge the PR
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
|
||||||
run: |
|
|
||||||
pr_number=${{ needs.update-submodule.outputs.pr_number }}
|
|
||||||
gh pr merge $pr_number --merge --admin
|
|
||||||
15
.github/workflows/publish-npm-core.yml
vendored
@ -1,10 +1,9 @@
|
|||||||
name: Publish core Package to npmjs
|
name: Publish core Package to npmjs
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags: ["v[0-9]+.[0-9]+.[0-9]+-core"]
|
tags: ['v[0-9]+.[0-9]+.[0-9]+-core']
|
||||||
paths: ["core/**", ".github/workflows/publish-npm-core.yml"]
|
paths: ['core/**', '.github/workflows/publish-npm-core.yml']
|
||||||
pull_request:
|
workflow_dispatch:
|
||||||
paths: ["core/**", ".github/workflows/publish-npm-core.yml"]
|
|
||||||
jobs:
|
jobs:
|
||||||
build-and-publish-plugins:
|
build-and-publish-plugins:
|
||||||
environment: production
|
environment: production
|
||||||
@ -12,7 +11,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: "0"
|
fetch-depth: '0'
|
||||||
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
||||||
|
|
||||||
- name: Install jq
|
- name: Install jq
|
||||||
@ -24,7 +23,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GITHUB_REF: ${{ github.ref }}
|
GITHUB_REF: ${{ github.ref }}
|
||||||
|
|
||||||
- name: "Get Semantic Version from tag"
|
- name: 'Get Semantic Version from tag'
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push'
|
||||||
run: |
|
run: |
|
||||||
# Get the tag from the event
|
# Get the tag from the event
|
||||||
@ -42,8 +41,8 @@ jobs:
|
|||||||
# Setup .npmrc file to publish to npm
|
# Setup .npmrc file to publish to npm
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "20.x"
|
node-version: '20.x'
|
||||||
registry-url: "https://registry.npmjs.org"
|
registry-url: 'https://registry.npmjs.org'
|
||||||
|
|
||||||
- run: cd core && corepack enable && corepack prepare yarn@4.5.3 --activate && yarn --version && yarn config set -H enableImmutableInstalls false && yarn install && yarn build
|
- run: cd core && corepack enable && corepack prepare yarn@4.5.3 --activate && yarn --version && yarn config set -H enableImmutableInstalls false && yarn install && yarn build
|
||||||
|
|
||||||
|
|||||||
53
.github/workflows/publish-npm-joi.yml
vendored
@ -1,53 +0,0 @@
|
|||||||
name: Publish joi Package to npmjs
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags: ["v[0-9]+.[0-9]+.[0-9]+-joi"]
|
|
||||||
paths: ["joi/**", ".github/workflows/publish-npm-joi.yml"]
|
|
||||||
pull_request:
|
|
||||||
paths: ["joi/**", ".github/workflows/publish-npm-joi.yml"]
|
|
||||||
jobs:
|
|
||||||
build-and-publish-plugins:
|
|
||||||
environment: production
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: "0"
|
|
||||||
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
|
||||||
|
|
||||||
- name: Install jq
|
|
||||||
uses: dcarbone/install-jq-action@v2.0.1
|
|
||||||
|
|
||||||
- name: Extract tag name without v prefix
|
|
||||||
id: get_version
|
|
||||||
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
|
|
||||||
env:
|
|
||||||
GITHUB_REF: ${{ github.ref }}
|
|
||||||
|
|
||||||
- name: "Get Semantic Version from tag"
|
|
||||||
if: github.event_name == 'push'
|
|
||||||
run: |
|
|
||||||
# Get the tag from the event
|
|
||||||
tag=${GITHUB_REF#refs/tags/v}
|
|
||||||
# remove the -joi suffix
|
|
||||||
new_version=$(echo $tag | sed -n 's/-joi//p')
|
|
||||||
echo $new_version
|
|
||||||
# Replace the old version with the new version in package.json
|
|
||||||
jq --arg version "$new_version" '.version = $version' joi/package.json > /tmp/package.json && mv /tmp/package.json joi/package.json
|
|
||||||
|
|
||||||
# Print the new version
|
|
||||||
echo "Updated package.json version to: $new_version"
|
|
||||||
cat joi/package.json
|
|
||||||
|
|
||||||
# Setup .npmrc file to publish to npm
|
|
||||||
- uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: "20.x"
|
|
||||||
registry-url: "https://registry.npmjs.org"
|
|
||||||
|
|
||||||
- run: cd joi && corepack enable && corepack prepare yarn@4.5.3 --activate && yarn --version && yarn config set -H enableImmutableInstalls false && yarn install && yarn build
|
|
||||||
|
|
||||||
- run: cd joi && yarn publish --access public
|
|
||||||
if: github.event_name == 'push'
|
|
||||||
env:
|
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
|
||||||
@ -41,6 +41,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-linux-x64:
|
build-linux-x64:
|
||||||
|
if: inputs.public_provider == 'github' || inputs.public_provider == 'none'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
environment: production
|
environment: production
|
||||||
permissions:
|
permissions:
|
||||||
@ -130,7 +131,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
VERSION_TAG: ${{ inputs.new_version }}
|
VERSION_TAG: ${{ inputs.new_version }}
|
||||||
|
|
||||||
- name: Build and publish app to aws s3 r2 or github artifactory
|
- name: Build and publish app to aws s3 r2 or github artifactory
|
||||||
if: inputs.public_provider != 'github'
|
if: inputs.public_provider != 'github'
|
||||||
run: |
|
run: |
|
||||||
# check public_provider is true or not
|
# check public_provider is true or not
|
||||||
@ -176,12 +177,12 @@ jobs:
|
|||||||
if: inputs.public_provider != 'github'
|
if: inputs.public_provider != 'github'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: jan-linux-amd64-${{ inputs.new_version }}-deb
|
name: jan-electron-linux-amd64-${{ inputs.new_version }}-deb
|
||||||
path: ./electron/dist/*.deb
|
path: ./electron/dist/*.deb
|
||||||
|
|
||||||
- name: Upload Artifact .AppImage file
|
- name: Upload Artifact .AppImage file
|
||||||
if: inputs.public_provider != 'github'
|
if: inputs.public_provider != 'github'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
|
name: jan-electron-linux-amd64-${{ inputs.new_version }}-AppImage
|
||||||
path: ./electron/dist/*.AppImage
|
path: ./electron/dist/*.AppImage
|
||||||
@ -51,6 +51,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-macos:
|
build-macos:
|
||||||
|
if: inputs.public_provider == 'github' || inputs.public_provider == 'none'
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
environment: production
|
environment: production
|
||||||
permissions:
|
permissions:
|
||||||
@ -160,7 +161,7 @@ jobs:
|
|||||||
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
||||||
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
||||||
|
|
||||||
- name: Build and publish app to aws s3 r2 or github artifactory
|
- name: Build and publish app to aws s3 r2 or github artifactory
|
||||||
if: inputs.public_provider != 'github'
|
if: inputs.public_provider != 'github'
|
||||||
run: |
|
run: |
|
||||||
# check public_provider is true or not
|
# check public_provider is true or not
|
||||||
@ -229,5 +230,5 @@ jobs:
|
|||||||
if: inputs.public_provider != 'github'
|
if: inputs.public_provider != 'github'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: jan-mac-universal-${{ inputs.new_version }}
|
name: jan-electron-mac-universal-${{ inputs.new_version }}
|
||||||
path: ./electron/dist/*.dmg
|
path: ./electron/dist/*.dmg
|
||||||
@ -51,6 +51,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-windows-x64:
|
build-windows-x64:
|
||||||
|
if: inputs.public_provider == 'github' || inputs.public_provider == 'none'
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
@ -225,5 +226,5 @@ jobs:
|
|||||||
if: inputs.public_provider != 'github'
|
if: inputs.public_provider != 'github'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: jan-win-x64-${{ inputs.new_version }}
|
name: jan-electron-win-x64-${{ inputs.new_version }}
|
||||||
path: ./electron/dist/*.exe
|
path: ./electron/dist/*.exe
|
||||||
@ -44,9 +44,12 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then
|
if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then
|
||||||
echo "Tag detected, set output follow tag"
|
echo "Tag detected, set output follow tag"
|
||||||
echo "::set-output name=new_version::${{ steps.tag.outputs.tag }}"
|
sanitized_tag="${{ steps.tag.outputs.tag }}"
|
||||||
|
# Remove the 'v' prefix if it exists
|
||||||
|
sanitized_tag="${sanitized_tag#v}"
|
||||||
|
echo "::set-output name=new_version::$sanitized_tag"
|
||||||
else
|
else
|
||||||
# Get the latest release tag from GitHub API
|
# Get the latest release tag from GitHub API
|
||||||
LATEST_TAG=$(get_latest_tag)
|
LATEST_TAG=$(get_latest_tag)
|
||||||
|
|||||||
@ -47,10 +47,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
Jan App ${{ inputs.build_reason }} build artifact version {{ VERSION }}:
|
Jan App ${{ inputs.build_reason }} build artifact version {{ VERSION }}:
|
||||||
- Windows: https://delta.jan.ai/nightly/jan-nightly-win-x64-{{ VERSION }}.exe
|
- Windows: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_x64-setup.exe
|
||||||
- macOS Universal: https://delta.jan.ai/nightly/jan-nightly-mac-universal-{{ VERSION }}.dmg
|
- macOS Universal: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_universal.dmg
|
||||||
- Linux Deb: https://delta.jan.ai/nightly/jan-nightly-linux-amd64-{{ VERSION }}.deb
|
- Linux Deb: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_amd64.deb
|
||||||
- Linux AppImage: https://delta.jan.ai/nightly/jan-nightly-linux-x86_64-{{ VERSION }}.AppImage
|
- Linux AppImage: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_amd64.AppImage
|
||||||
- Github action run: https://github.com/menloresearch/jan/actions/runs/{{ GITHUB_RUN_ID }}
|
- Github action run: https://github.com/menloresearch/jan/actions/runs/{{ GITHUB_RUN_ID }}
|
||||||
env:
|
env:
|
||||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
|
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
|
|||||||
644
.github/workflows/template-tauri-build-linux-x64.yml
vendored
@ -1,318 +1,326 @@
|
|||||||
name: tauri-build-linux-x64
|
name: tauri-build-linux-x64
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
ref:
|
ref:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
default: 'refs/heads/main'
|
default: 'refs/heads/main'
|
||||||
public_provider:
|
public_provider:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
default: none
|
default: none
|
||||||
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
||||||
new_version:
|
new_version:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
cortex_api_port:
|
cortex_api_port:
|
||||||
required: false
|
required: false
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
upload_url:
|
upload_url:
|
||||||
required: false
|
required: false
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
channel:
|
channel:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
default: 'nightly'
|
default: 'nightly'
|
||||||
description: 'The channel to use for this job'
|
description: 'The channel to use for this job'
|
||||||
secrets:
|
secrets:
|
||||||
DELTA_AWS_S3_BUCKET_NAME:
|
DELTA_AWS_S3_BUCKET_NAME:
|
||||||
required: false
|
required: false
|
||||||
DELTA_AWS_ACCESS_KEY_ID:
|
DELTA_AWS_ACCESS_KEY_ID:
|
||||||
required: false
|
required: false
|
||||||
DELTA_AWS_SECRET_ACCESS_KEY:
|
DELTA_AWS_SECRET_ACCESS_KEY:
|
||||||
required: false
|
required: false
|
||||||
TAURI_SIGNING_PRIVATE_KEY:
|
TAURI_SIGNING_PRIVATE_KEY:
|
||||||
required: false
|
required: false
|
||||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
|
||||||
required: false
|
required: false
|
||||||
TAURI_SIGNING_PUBLIC_KEY:
|
TAURI_SIGNING_PUBLIC_KEY:
|
||||||
required: false
|
required: false
|
||||||
outputs:
|
outputs:
|
||||||
DEB_SIG:
|
DEB_SIG:
|
||||||
value: ${{ jobs.build-linux-x64.outputs.DEB_SIG }}
|
value: ${{ jobs.build-linux-x64.outputs.DEB_SIG }}
|
||||||
APPIMAGE_SIG:
|
APPIMAGE_SIG:
|
||||||
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_SIG }}
|
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_SIG }}
|
||||||
APPIMAGE_FILE_NAME:
|
APPIMAGE_FILE_NAME:
|
||||||
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}
|
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}
|
||||||
jobs:
|
jobs:
|
||||||
build-linux-x64:
|
build-linux-x64:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
outputs:
|
outputs:
|
||||||
DEB_SIG: ${{ steps.packageinfo.outputs.DEB_SIG }}
|
DEB_SIG: ${{ steps.packageinfo.outputs.DEB_SIG }}
|
||||||
APPIMAGE_SIG: ${{ steps.packageinfo.outputs.APPIMAGE_SIG }}
|
APPIMAGE_SIG: ${{ steps.packageinfo.outputs.APPIMAGE_SIG }}
|
||||||
APPIMAGE_FILE_NAME: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
|
APPIMAGE_FILE_NAME: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
|
||||||
environment: production
|
environment: production
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- name: Getting the repo
|
- name: Getting the repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.ref }}
|
ref: ${{ inputs.ref }}
|
||||||
|
|
||||||
- name: Free Disk Space Before Build
|
- name: Free Disk Space Before Build
|
||||||
run: |
|
run: |
|
||||||
echo "Disk space before cleanup:"
|
echo "Disk space before cleanup:"
|
||||||
df -h
|
df -h
|
||||||
sudo rm -rf /usr/local/.ghcup
|
sudo rm -rf /usr/local/.ghcup
|
||||||
sudo rm -rf /opt/hostedtoolcache/CodeQL
|
sudo rm -rf /opt/hostedtoolcache/CodeQL
|
||||||
sudo rm -rf /usr/local/lib/android/sdk/ndk
|
sudo rm -rf /usr/local/lib/android/sdk/ndk
|
||||||
sudo rm -rf /usr/share/dotnet
|
sudo rm -rf /usr/share/dotnet
|
||||||
sudo rm -rf /opt/ghc
|
sudo rm -rf /opt/ghc
|
||||||
sudo rm -rf /usr/local/share/boost
|
sudo rm -rf /usr/local/share/boost
|
||||||
sudo apt-get clean
|
sudo apt-get clean
|
||||||
echo "Disk space after cleanup:"
|
echo "Disk space after cleanup:"
|
||||||
df -h
|
df -h
|
||||||
|
|
||||||
- name: Replace Icons for Beta Build
|
- name: Replace Icons for Beta Build
|
||||||
if: inputs.channel != 'stable'
|
if: inputs.channel != 'stable'
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
|
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
|
||||||
|
|
||||||
- name: Installing node
|
- name: Installing node
|
||||||
uses: actions/setup-node@v1
|
uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 20
|
||||||
|
|
||||||
- name: Install jq
|
- name: Install jq
|
||||||
uses: dcarbone/install-jq-action@v2.0.1
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
- name: Install ctoml
|
- name: Install ctoml
|
||||||
run: |
|
run: |
|
||||||
cargo install ctoml
|
cargo install ctoml
|
||||||
|
|
||||||
- name: Install Tauri dependecies
|
- name: Install Tauri dependecies
|
||||||
run: |
|
run: |
|
||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2
|
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2
|
||||||
|
|
||||||
- name: Update app version base public_provider
|
- name: Update app version base public_provider
|
||||||
run: |
|
run: |
|
||||||
echo "Version: ${{ inputs.new_version }}"
|
echo "Version: ${{ inputs.new_version }}"
|
||||||
# Update tauri.conf.json
|
# Update tauri.conf.json
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true | .bundle.resources = ["resources/themes/**/*", "resources/pre-install/**/*"] | .bundle.externalBin = ["binaries/cortex-server", "resources/bin/uv"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true | .bundle.resources = ["resources/pre-install/**/*"] | .bundle.externalBin = ["binaries/cortex-server", "resources/bin/uv"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
|
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
|
||||||
"usr/lib/Jan-${{ inputs.channel }}/binaries": "binaries/deps",
|
"usr/lib/Jan-${{ inputs.channel }}/binaries": "binaries/deps",
|
||||||
"usr/lib/Jan-${{ inputs.channel }}/binaries/engines": "binaries/engines",
|
"usr/lib/Jan-${{ inputs.channel }}/binaries/engines": "binaries/engines",
|
||||||
"usr/lib/Jan-${{ inputs.channel }}/binaries/libvulkan.so": "binaries/libvulkan.so"}' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
"usr/lib/Jan-${{ inputs.channel }}/binaries/libvulkan.so": "binaries/libvulkan.so"}' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
else
|
else
|
||||||
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
|
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
|
||||||
"usr/lib/Jan/binaries": "binaries/deps",
|
"usr/lib/Jan/binaries": "binaries/deps",
|
||||||
"usr/lib/Jan/binaries/engines": "binaries/engines",
|
"usr/lib/Jan/binaries/engines": "binaries/engines",
|
||||||
"usr/lib/Jan/binaries/libvulkan.so": "binaries/libvulkan.so"}' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
"usr/lib/Jan/binaries/libvulkan.so": "binaries/libvulkan.so"}' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
fi
|
fi
|
||||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||||
mv /tmp/package.json web/package.json
|
mv /tmp/package.json web-app/package.json
|
||||||
|
|
||||||
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
cat ./src-tauri/Cargo.toml
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
# Change app name for beta and nightly builds
|
# Change app name for beta and nightly builds
|
||||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
chmod +x .github/scripts/rename-tauri-app.sh
|
chmod +x .github/scripts/rename-tauri-app.sh
|
||||||
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||||
|
|
||||||
cat ./src-tauri/tauri.conf.json
|
cat ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
# Update Cargo.toml
|
# Update Cargo.toml
|
||||||
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||||
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
echo "------------------"
|
echo "------------------"
|
||||||
cat ./src-tauri/Cargo.toml
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
chmod +x .github/scripts/rename-workspace.sh
|
chmod +x .github/scripts/rename-workspace.sh
|
||||||
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||||
cat ./package.json
|
cat ./package.json
|
||||||
fi
|
fi
|
||||||
- name: Build app
|
- name: Build app
|
||||||
run: |
|
run: |
|
||||||
make build-tauri
|
make build-tauri
|
||||||
# Copy engines and bun to appimage
|
# Copy engines and bun to appimage
|
||||||
wget https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O ./appimagetool
|
wget https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O ./appimagetool
|
||||||
chmod +x ./appimagetool
|
chmod +x ./appimagetool
|
||||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
ls ./src-tauri/target/release/bundle/appimage/
|
ls ./src-tauri/target/release/bundle/appimage/
|
||||||
cp ./src-tauri/resources/bin/bun ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/bin/bun
|
cp ./src-tauri/resources/bin/bun ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/bin/bun
|
||||||
mkdir -p ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/engines
|
mkdir -p ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/engines
|
||||||
cp -f ./src-tauri/binaries/deps/*.so* ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/
|
cp -f ./src-tauri/binaries/deps/*.so* ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/
|
||||||
cp -f ./src-tauri/binaries/*.so* ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/
|
cp -f ./src-tauri/binaries/*.so* ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/
|
||||||
cp -rf ./src-tauri/binaries/engines ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/
|
cp -rf ./src-tauri/binaries/engines ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/binaries/
|
||||||
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep .AppImage | head -1)
|
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep .AppImage | head -1)
|
||||||
echo $APP_IMAGE
|
echo $APP_IMAGE
|
||||||
rm -f $APP_IMAGE
|
rm -f $APP_IMAGE
|
||||||
./appimagetool ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir $APP_IMAGE
|
./appimagetool ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir $APP_IMAGE
|
||||||
else
|
yarn tauri signer sign \
|
||||||
cp ./src-tauri/resources/bin/bun ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/bin/bun
|
--private-key "$TAURI_SIGNING_PRIVATE_KEY" \
|
||||||
mkdir -p ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/engines
|
--password "$TAURI_SIGNING_PRIVATE_KEY_PASSWORD" \
|
||||||
cp -f ./src-tauri/binaries/deps/*.so* ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/
|
"$APP_IMAGE"
|
||||||
cp -f ./src-tauri/binaries/*.so* ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/
|
else
|
||||||
cp -rf ./src-tauri/binaries/engines ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/
|
cp ./src-tauri/resources/bin/bun ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/bin/bun
|
||||||
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep AppImage | head -1)
|
mkdir -p ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/engines
|
||||||
echo $APP_IMAGE
|
cp -f ./src-tauri/binaries/deps/*.so* ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/
|
||||||
rm -f $APP_IMAGE
|
cp -f ./src-tauri/binaries/*.so* ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/
|
||||||
./appimagetool ./src-tauri/target/release/bundle/appimage/Jan.AppDir $APP_IMAGE
|
cp -rf ./src-tauri/binaries/engines ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/binaries/
|
||||||
fi
|
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep AppImage | head -1)
|
||||||
|
echo $APP_IMAGE
|
||||||
env:
|
rm -f $APP_IMAGE
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
./appimagetool ./src-tauri/target/release/bundle/appimage/Jan.AppDir $APP_IMAGE
|
||||||
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
|
yarn tauri signer sign \
|
||||||
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
--private-key "$TAURI_SIGNING_PRIVATE_KEY" \
|
||||||
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
|
--password "$TAURI_SIGNING_PRIVATE_KEY_PASSWORD" \
|
||||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
"$APP_IMAGE"
|
||||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
fi
|
||||||
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
|
|
||||||
|
env:
|
||||||
# Publish app
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
|
||||||
## Artifacts, for dev and test
|
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
||||||
- name: Upload Artifact
|
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
|
||||||
if: inputs.public_provider != 'github'
|
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
||||||
uses: actions/upload-artifact@v4
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
||||||
with:
|
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
|
||||||
name: jan-linux-amd64-${{ inputs.new_version }}-deb
|
|
||||||
path: ./src-tauri/target/release/bundle/deb/*.deb
|
# Publish app
|
||||||
|
|
||||||
- name: Upload Artifact
|
## Artifacts, for dev and test
|
||||||
if: inputs.public_provider != 'github'
|
- name: Upload Artifact
|
||||||
uses: actions/upload-artifact@v4
|
if: inputs.public_provider != 'github'
|
||||||
with:
|
uses: actions/upload-artifact@v4
|
||||||
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
|
with:
|
||||||
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
|
name: jan-linux-amd64-${{ inputs.new_version }}-deb
|
||||||
|
path: ./src-tauri/target/release/bundle/deb/*.deb
|
||||||
## create zip file and latest-linux.yml for linux electron auto updater
|
|
||||||
- name: Create zip file and latest-linux.yml for linux electron auto updater
|
- name: Upload Artifact
|
||||||
id: packageinfo
|
if: inputs.public_provider != 'github'
|
||||||
run: |
|
uses: actions/upload-artifact@v4
|
||||||
cd ./src-tauri/target/release/bundle
|
with:
|
||||||
|
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
|
||||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
|
||||||
DEB_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
|
|
||||||
APPIMAGE_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
|
## create zip file and latest-linux.yml for linux electron auto updater
|
||||||
DEB_SIG=$(cat deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig)
|
- name: Create zip file and latest-linux.yml for linux electron auto updater
|
||||||
APPIMAGE_SIG=$(cat appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig)
|
id: packageinfo
|
||||||
else
|
run: |
|
||||||
DEB_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.deb
|
cd ./src-tauri/target/release/bundle
|
||||||
APPIMAGE_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.AppImage
|
|
||||||
DEB_SIG=$(cat deb/Jan_${{ inputs.new_version }}_amd64.deb.sig)
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
APPIMAGE_SIG=$(cat appimage/Jan_${{ inputs.new_version }}_amd64.AppImage.sig)
|
DEB_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
|
||||||
fi
|
APPIMAGE_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
|
||||||
|
DEB_SIG=$(cat deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig)
|
||||||
DEB_FILE_SIZE=$(stat -c%s deb/$DEB_FILE_NAME)
|
APPIMAGE_SIG=$(cat appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig)
|
||||||
APPIMAGE_FILE_SIZE=$(stat -c%s appimage/$APPIMAGE_FILE_NAME)
|
else
|
||||||
echo "deb file size: $DEB_FILE_SIZE"
|
DEB_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.deb
|
||||||
echo "appimage file size: $APPIMAGE_FILE_SIZE"
|
APPIMAGE_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.AppImage
|
||||||
|
DEB_SIG=$(cat deb/Jan_${{ inputs.new_version }}_amd64.deb.sig)
|
||||||
DEB_SH512_CHECKSUM=$(python3 ../../../../.github/scripts/electron-checksum.py deb/$DEB_FILE_NAME)
|
APPIMAGE_SIG=$(cat appimage/Jan_${{ inputs.new_version }}_amd64.AppImage.sig)
|
||||||
APPIMAGE_SH512_CHECKSUM=$(python3 ../../../../.github/scripts/electron-checksum.py appimage/$APPIMAGE_FILE_NAME)
|
fi
|
||||||
echo "deb sh512 checksum: $DEB_SH512_CHECKSUM"
|
|
||||||
echo "appimage sh512 checksum: $APPIMAGE_SH512_CHECKSUM"
|
DEB_FILE_SIZE=$(stat -c%s deb/$DEB_FILE_NAME)
|
||||||
|
APPIMAGE_FILE_SIZE=$(stat -c%s appimage/$APPIMAGE_FILE_NAME)
|
||||||
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
echo "deb file size: $DEB_FILE_SIZE"
|
||||||
echo "releaseDate: $CURRENT_TIME"
|
echo "appimage file size: $APPIMAGE_FILE_SIZE"
|
||||||
|
|
||||||
# Create latest-linux.yml file
|
DEB_SH512_CHECKSUM=$(python3 ../../../../.github/scripts/electron-checksum.py deb/$DEB_FILE_NAME)
|
||||||
echo "version: ${{ inputs.new_version }}" > latest-linux.yml
|
APPIMAGE_SH512_CHECKSUM=$(python3 ../../../../.github/scripts/electron-checksum.py appimage/$APPIMAGE_FILE_NAME)
|
||||||
echo "files:" >> latest-linux.yml
|
echo "deb sh512 checksum: $DEB_SH512_CHECKSUM"
|
||||||
echo " - url: $DEB_FILE_NAME" >> latest-linux.yml
|
echo "appimage sh512 checksum: $APPIMAGE_SH512_CHECKSUM"
|
||||||
echo " sha512: $DEB_SH512_CHECKSUM" >> latest-linux.yml
|
|
||||||
echo " size: $DEB_FILE_SIZE" >> latest-linux.yml
|
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
||||||
echo " - url: $APPIMAGE_FILE_NAME" >> latest-linux.yml
|
echo "releaseDate: $CURRENT_TIME"
|
||||||
echo " sha512: $APPIMAGE_SH512_CHECKSUM" >> latest-linux.yml
|
|
||||||
echo " size: $APPIMAGE_FILE_SIZE" >> latest-linux.yml
|
# Create latest-linux.yml file
|
||||||
echo "path: $APPIMAGE_FILE_NAME" >> latest-linux.yml
|
echo "version: ${{ inputs.new_version }}" > latest-linux.yml
|
||||||
echo "sha512: $APPIMAGE_SH512_CHECKSUM" >> latest-linux.yml
|
echo "files:" >> latest-linux.yml
|
||||||
echo "releaseDate: $CURRENT_TIME" >> latest-linux.yml
|
echo " - url: $DEB_FILE_NAME" >> latest-linux.yml
|
||||||
|
echo " sha512: $DEB_SH512_CHECKSUM" >> latest-linux.yml
|
||||||
cat latest-linux.yml
|
echo " size: $DEB_FILE_SIZE" >> latest-linux.yml
|
||||||
cp latest-linux.yml beta-linux.yml
|
echo " - url: $APPIMAGE_FILE_NAME" >> latest-linux.yml
|
||||||
|
echo " sha512: $APPIMAGE_SH512_CHECKSUM" >> latest-linux.yml
|
||||||
echo "DEB_SIG=$DEB_SIG" >> $GITHUB_OUTPUT
|
echo " size: $APPIMAGE_FILE_SIZE" >> latest-linux.yml
|
||||||
echo "APPIMAGE_SIG=$APPIMAGE_SIG" >> $GITHUB_OUTPUT
|
echo "path: $APPIMAGE_FILE_NAME" >> latest-linux.yml
|
||||||
echo "DEB_FILE_NAME=$DEB_FILE_NAME" >> $GITHUB_OUTPUT
|
echo "sha512: $APPIMAGE_SH512_CHECKSUM" >> latest-linux.yml
|
||||||
echo "APPIMAGE_FILE_NAME=$APPIMAGE_FILE_NAME" >> $GITHUB_OUTPUT
|
echo "releaseDate: $CURRENT_TIME" >> latest-linux.yml
|
||||||
|
|
||||||
## Upload to s3 for nightly and beta
|
cat latest-linux.yml
|
||||||
- name: upload to aws s3 if public provider is aws
|
cp latest-linux.yml beta-linux.yml
|
||||||
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
|
|
||||||
run: |
|
echo "DEB_SIG=$DEB_SIG" >> $GITHUB_OUTPUT
|
||||||
cd ./src-tauri/target/release/bundle
|
echo "APPIMAGE_SIG=$APPIMAGE_SIG" >> $GITHUB_OUTPUT
|
||||||
|
echo "DEB_FILE_NAME=$DEB_FILE_NAME" >> $GITHUB_OUTPUT
|
||||||
# Upload for electron updater for nightly
|
echo "APPIMAGE_FILE_NAME=$APPIMAGE_FILE_NAME" >> $GITHUB_OUTPUT
|
||||||
aws s3 cp ./latest-linux.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest-linux.yml
|
|
||||||
aws s3 cp ./beta-linux.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta-linux.yml
|
## Upload to s3 for nightly and beta
|
||||||
|
- name: upload to aws s3 if public provider is aws
|
||||||
# Upload for tauri updater
|
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
|
||||||
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
|
run: |
|
||||||
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
|
cd ./src-tauri/target/release/bundle
|
||||||
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig
|
|
||||||
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig
|
# Upload for electron updater for nightly
|
||||||
env:
|
aws s3 cp ./latest-linux.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest-linux.yml
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
aws s3 cp ./beta-linux.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta-linux.yml
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
# Upload for tauri updater
|
||||||
AWS_EC2_METADATA_DISABLED: 'true'
|
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
|
||||||
|
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
|
||||||
## Upload to github release for stable release
|
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig
|
||||||
- name: Upload release assert if public provider is github
|
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig
|
||||||
if: inputs.channel == 'stable'
|
env:
|
||||||
env:
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
||||||
with:
|
AWS_EC2_METADATA_DISABLED: 'true'
|
||||||
upload_url: ${{ inputs.upload_url }}
|
|
||||||
asset_path: ./src-tauri/target/release/bundle/latest-linux.yml
|
## Upload to github release for stable release
|
||||||
asset_name: latest-linux.yml
|
- name: Upload release assert if public provider is github
|
||||||
asset_content_type: text/yaml
|
if: inputs.channel == 'stable'
|
||||||
|
env:
|
||||||
- name: Upload release assert if public provider is github
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
if: inputs.channel == 'beta'
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
env:
|
with:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
upload_url: ${{ inputs.upload_url }}
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
asset_path: ./src-tauri/target/release/bundle/latest-linux.yml
|
||||||
with:
|
asset_name: latest-linux.yml
|
||||||
upload_url: ${{ inputs.upload_url }}
|
asset_content_type: text/yaml
|
||||||
asset_path: ./src-tauri/target/release/bundle/beta-linux.yml
|
|
||||||
asset_name: beta-linux.yml
|
- name: Upload release assert if public provider is github
|
||||||
asset_content_type: text/yaml
|
if: inputs.channel == 'beta'
|
||||||
- name: Upload release assert if public provider is github
|
env:
|
||||||
if: inputs.public_provider == 'github'
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
env:
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
with:
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
upload_url: ${{ inputs.upload_url }}
|
||||||
with:
|
asset_path: ./src-tauri/target/release/bundle/beta-linux.yml
|
||||||
upload_url: ${{ inputs.upload_url }}
|
asset_name: beta-linux.yml
|
||||||
asset_path: ./src-tauri/target/release/bundle/appimage/${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
|
asset_content_type: text/yaml
|
||||||
asset_name: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
|
- name: Upload release assert if public provider is github
|
||||||
asset_content_type: application/octet-stream
|
if: inputs.public_provider == 'github'
|
||||||
|
env:
|
||||||
- name: Upload release assert if public provider is github
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
if: inputs.public_provider == 'github'
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
env:
|
with:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
upload_url: ${{ inputs.upload_url }}
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
asset_path: ./src-tauri/target/release/bundle/appimage/${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
|
||||||
with:
|
asset_name: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
|
||||||
upload_url: ${{ inputs.upload_url }}
|
asset_content_type: application/octet-stream
|
||||||
asset_path: ./src-tauri/target/release/bundle/deb/${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
|
|
||||||
asset_name: ${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
|
- name: Upload release assert if public provider is github
|
||||||
asset_content_type: application/octet-stream
|
if: inputs.public_provider == 'github'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
|
with:
|
||||||
|
upload_url: ${{ inputs.upload_url }}
|
||||||
|
asset_path: ./src-tauri/target/release/bundle/deb/${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
|
||||||
|
asset_name: ${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
|
||||||
|
asset_content_type: application/octet-stream
|
||||||
|
|||||||
624
.github/workflows/template-tauri-build-macos.yml
vendored
@ -1,312 +1,312 @@
|
|||||||
name: tauri-build-macos
|
name: tauri-build-macos
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
ref:
|
ref:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
default: 'refs/heads/main'
|
default: 'refs/heads/main'
|
||||||
public_provider:
|
public_provider:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
default: none
|
default: none
|
||||||
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
||||||
new_version:
|
new_version:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
cortex_api_port:
|
cortex_api_port:
|
||||||
required: false
|
required: false
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
upload_url:
|
upload_url:
|
||||||
required: false
|
required: false
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
channel:
|
channel:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
default: 'nightly'
|
default: 'nightly'
|
||||||
description: 'The channel to use for this job'
|
description: 'The channel to use for this job'
|
||||||
secrets:
|
secrets:
|
||||||
DELTA_AWS_S3_BUCKET_NAME:
|
DELTA_AWS_S3_BUCKET_NAME:
|
||||||
required: false
|
required: false
|
||||||
DELTA_AWS_ACCESS_KEY_ID:
|
DELTA_AWS_ACCESS_KEY_ID:
|
||||||
required: false
|
required: false
|
||||||
DELTA_AWS_SECRET_ACCESS_KEY:
|
DELTA_AWS_SECRET_ACCESS_KEY:
|
||||||
required: false
|
required: false
|
||||||
CODE_SIGN_P12_BASE64:
|
CODE_SIGN_P12_BASE64:
|
||||||
required: false
|
required: false
|
||||||
CODE_SIGN_P12_PASSWORD:
|
CODE_SIGN_P12_PASSWORD:
|
||||||
required: false
|
required: false
|
||||||
APPLE_ID:
|
APPLE_ID:
|
||||||
required: false
|
required: false
|
||||||
APPLE_APP_SPECIFIC_PASSWORD:
|
APPLE_APP_SPECIFIC_PASSWORD:
|
||||||
required: false
|
required: false
|
||||||
DEVELOPER_ID:
|
DEVELOPER_ID:
|
||||||
required: false
|
required: false
|
||||||
TAURI_SIGNING_PRIVATE_KEY:
|
TAURI_SIGNING_PRIVATE_KEY:
|
||||||
required: false
|
required: false
|
||||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
|
||||||
required: false
|
required: false
|
||||||
TAURI_SIGNING_PUBLIC_KEY:
|
TAURI_SIGNING_PUBLIC_KEY:
|
||||||
required: false
|
required: false
|
||||||
outputs:
|
outputs:
|
||||||
MAC_UNIVERSAL_SIG:
|
MAC_UNIVERSAL_SIG:
|
||||||
value: ${{ jobs.build-macos.outputs.MAC_UNIVERSAL_SIG }}
|
value: ${{ jobs.build-macos.outputs.MAC_UNIVERSAL_SIG }}
|
||||||
TAR_NAME:
|
TAR_NAME:
|
||||||
value: ${{ jobs.build-macos.outputs.TAR_NAME }}
|
value: ${{ jobs.build-macos.outputs.TAR_NAME }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-macos:
|
build-macos:
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
outputs:
|
outputs:
|
||||||
MAC_UNIVERSAL_SIG: ${{ steps.metadata.outputs.MAC_UNIVERSAL_SIG }}
|
MAC_UNIVERSAL_SIG: ${{ steps.metadata.outputs.MAC_UNIVERSAL_SIG }}
|
||||||
TAR_NAME: ${{ steps.metadata.outputs.TAR_NAME }}
|
TAR_NAME: ${{ steps.metadata.outputs.TAR_NAME }}
|
||||||
environment: production
|
environment: production
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- name: Getting the repo
|
- name: Getting the repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.ref }}
|
ref: ${{ inputs.ref }}
|
||||||
- name: Replace Icons for Beta Build
|
- name: Replace Icons for Beta Build
|
||||||
if: inputs.channel != 'stable'
|
if: inputs.channel != 'stable'
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
|
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
|
||||||
|
|
||||||
- name: Installing node
|
- name: Installing node
|
||||||
uses: actions/setup-node@v1
|
uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 20
|
||||||
|
|
||||||
- name: Install jq
|
- name: Install jq
|
||||||
uses: dcarbone/install-jq-action@v2.0.1
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
- name: Install ctoml
|
- name: Install ctoml
|
||||||
run: |
|
run: |
|
||||||
cargo install ctoml
|
cargo install ctoml
|
||||||
|
|
||||||
- name: Create bun and uv universal
|
- name: Create bun and uv universal
|
||||||
run: |
|
run: |
|
||||||
mkdir -p ./src-tauri/resources/bin/
|
mkdir -p ./src-tauri/resources/bin/
|
||||||
cd ./src-tauri/resources/bin/
|
cd ./src-tauri/resources/bin/
|
||||||
curl -L -o bun-darwin-x64.zip https://github.com/oven-sh/bun/releases/download/bun-v1.2.10/bun-darwin-x64.zip
|
curl -L -o bun-darwin-x64.zip https://github.com/oven-sh/bun/releases/download/bun-v1.2.10/bun-darwin-x64.zip
|
||||||
curl -L -o bun-darwin-aarch64.zip https://github.com/oven-sh/bun/releases/download/bun-v1.2.10/bun-darwin-aarch64.zip
|
curl -L -o bun-darwin-aarch64.zip https://github.com/oven-sh/bun/releases/download/bun-v1.2.10/bun-darwin-aarch64.zip
|
||||||
unzip bun-darwin-x64.zip
|
unzip bun-darwin-x64.zip
|
||||||
unzip bun-darwin-aarch64.zip
|
unzip bun-darwin-aarch64.zip
|
||||||
lipo -create -output bun-universal-apple-darwin bun-darwin-x64/bun bun-darwin-aarch64/bun
|
lipo -create -output bun-universal-apple-darwin bun-darwin-x64/bun bun-darwin-aarch64/bun
|
||||||
cp -f bun-darwin-aarch64/bun bun-aarch64-apple-darwin
|
cp -f bun-darwin-aarch64/bun bun-aarch64-apple-darwin
|
||||||
cp -f bun-darwin-x64/bun bun-x86_64-apple-darwin
|
cp -f bun-darwin-x64/bun bun-x86_64-apple-darwin
|
||||||
cp -f bun-universal-apple-darwin bun
|
cp -f bun-universal-apple-darwin bun
|
||||||
|
|
||||||
curl -L -o uv-x86_64.tar.gz https://github.com/astral-sh/uv/releases/download/0.6.17/uv-x86_64-apple-darwin.tar.gz
|
curl -L -o uv-x86_64.tar.gz https://github.com/astral-sh/uv/releases/download/0.6.17/uv-x86_64-apple-darwin.tar.gz
|
||||||
curl -L -o uv-arm64.tar.gz https://github.com/astral-sh/uv/releases/download/0.6.17/uv-aarch64-apple-darwin.tar.gz
|
curl -L -o uv-arm64.tar.gz https://github.com/astral-sh/uv/releases/download/0.6.17/uv-aarch64-apple-darwin.tar.gz
|
||||||
tar -xzf uv-x86_64.tar.gz
|
tar -xzf uv-x86_64.tar.gz
|
||||||
tar -xzf uv-arm64.tar.gz
|
tar -xzf uv-arm64.tar.gz
|
||||||
mv uv-x86_64-apple-darwin uv-x86_64
|
mv uv-x86_64-apple-darwin uv-x86_64
|
||||||
mv uv-aarch64-apple-darwin uv-aarch64
|
mv uv-aarch64-apple-darwin uv-aarch64
|
||||||
lipo -create -output uv-universal-apple-darwin uv-x86_64/uv uv-aarch64/uv
|
lipo -create -output uv-universal-apple-darwin uv-x86_64/uv uv-aarch64/uv
|
||||||
cp -f uv-x86_64/uv uv-x86_64-apple-darwin
|
cp -f uv-x86_64/uv uv-x86_64-apple-darwin
|
||||||
cp -f uv-aarch64/uv uv-aarch64-apple-darwin
|
cp -f uv-aarch64/uv uv-aarch64-apple-darwin
|
||||||
cp -f uv-universal-apple-darwin uv
|
cp -f uv-universal-apple-darwin uv
|
||||||
ls -la
|
ls -la
|
||||||
|
|
||||||
- name: Update app version based on latest release tag with build number
|
- name: Update app version based on latest release tag with build number
|
||||||
run: |
|
run: |
|
||||||
echo "Version: ${{ inputs.new_version }}"
|
echo "Version: ${{ inputs.new_version }}"
|
||||||
# Update tauri.conf.json
|
# Update tauri.conf.json
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||||
mv /tmp/package.json web/package.json
|
mv /tmp/package.json web-app/package.json
|
||||||
|
|
||||||
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
cat ./src-tauri/Cargo.toml
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
# Change app name for beta and nightly builds
|
# Change app name for beta and nightly builds
|
||||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
chmod +x .github/scripts/rename-tauri-app.sh
|
chmod +x .github/scripts/rename-tauri-app.sh
|
||||||
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||||
|
|
||||||
cat ./src-tauri/tauri.conf.json
|
cat ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
# Update Cargo.toml
|
# Update Cargo.toml
|
||||||
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||||
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
echo "------------------"
|
echo "------------------"
|
||||||
cat ./src-tauri/Cargo.toml
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
chmod +x .github/scripts/rename-workspace.sh
|
chmod +x .github/scripts/rename-workspace.sh
|
||||||
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||||
cat ./package.json
|
cat ./package.json
|
||||||
fi
|
fi
|
||||||
- name: Get key for notarize
|
- name: Get key for notarize
|
||||||
run: base64 -d <<< "$NOTARIZE_P8_BASE64" > /tmp/notary-key.p8
|
run: base64 -d <<< "$NOTARIZE_P8_BASE64" > /tmp/notary-key.p8
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
NOTARIZE_P8_BASE64: ${{ secrets.NOTARIZE_P8_BASE64 }}
|
NOTARIZE_P8_BASE64: ${{ secrets.NOTARIZE_P8_BASE64 }}
|
||||||
|
|
||||||
- uses: apple-actions/import-codesign-certs@v2
|
- uses: apple-actions/import-codesign-certs@v2
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
||||||
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
||||||
|
|
||||||
- name: Build app
|
- name: Build app
|
||||||
run: |
|
run: |
|
||||||
rustup target add x86_64-apple-darwin
|
rustup target add x86_64-apple-darwin
|
||||||
make build-tauri
|
make build-tauri
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
APP_PATH: '.'
|
APP_PATH: '.'
|
||||||
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
|
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
|
||||||
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
||||||
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
|
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
|
||||||
APPLE_CERTIFICATE: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
APPLE_CERTIFICATE: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
||||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
||||||
APPLE_API_ISSUER: ${{ secrets.NOTARY_ISSUER }}
|
APPLE_API_ISSUER: ${{ secrets.NOTARY_ISSUER }}
|
||||||
APPLE_API_KEY: ${{ secrets.NOTARY_KEY_ID }}
|
APPLE_API_KEY: ${{ secrets.NOTARY_KEY_ID }}
|
||||||
APPLE_API_KEY_PATH: /tmp/notary-key.p8
|
APPLE_API_KEY_PATH: /tmp/notary-key.p8
|
||||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
||||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
||||||
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
|
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
|
||||||
|
|
||||||
# Publish app
|
# Publish app
|
||||||
|
|
||||||
## Artifacts, for dev and test
|
## Artifacts, for dev and test
|
||||||
- name: Upload Artifact
|
- name: Upload Artifact
|
||||||
if: inputs.public_provider != 'github'
|
if: inputs.public_provider != 'github'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.dmg
|
name: jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.dmg
|
||||||
path: |
|
path: |
|
||||||
./src-tauri/target/universal-apple-darwin/release/bundle/dmg/*.dmg
|
./src-tauri/target/universal-apple-darwin/release/bundle/dmg/*.dmg
|
||||||
|
|
||||||
## create zip file and latest-mac.yml for mac electron auto updater
|
## create zip file and latest-mac.yml for mac electron auto updater
|
||||||
- name: create zip file and latest-mac.yml for mac electron auto updater
|
- name: create zip file and latest-mac.yml for mac electron auto updater
|
||||||
run: |
|
run: |
|
||||||
cd ./src-tauri/target/universal-apple-darwin/release/bundle/macos
|
cd ./src-tauri/target/universal-apple-darwin/release/bundle/macos
|
||||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
zip -r jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip Jan-${{ inputs.channel }}.app
|
zip -r jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip Jan-${{ inputs.channel }}.app
|
||||||
FILE_NAME=jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip
|
FILE_NAME=jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip
|
||||||
DMG_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
|
DMG_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
|
||||||
MAC_UNIVERSAL_SIG=$(cat Jan-${{ inputs.channel }}.app.tar.gz.sig)
|
MAC_UNIVERSAL_SIG=$(cat Jan-${{ inputs.channel }}.app.tar.gz.sig)
|
||||||
TAR_NAME=Jan-${{ inputs.channel }}.app.tar.gz
|
TAR_NAME=Jan-${{ inputs.channel }}.app.tar.gz
|
||||||
else
|
else
|
||||||
zip -r jan-mac-universal-${{ inputs.new_version }}.zip Jan.app
|
zip -r jan-mac-universal-${{ inputs.new_version }}.zip Jan.app
|
||||||
FILE_NAME=jan-mac-universal-${{ inputs.new_version }}.zip
|
FILE_NAME=jan-mac-universal-${{ inputs.new_version }}.zip
|
||||||
MAC_UNIVERSAL_SIG=$(cat Jan.app.tar.gz.sig)
|
MAC_UNIVERSAL_SIG=$(cat Jan.app.tar.gz.sig)
|
||||||
DMG_NAME=Jan_${{ inputs.new_version }}_universal.dmg
|
DMG_NAME=Jan_${{ inputs.new_version }}_universal.dmg
|
||||||
TAR_NAME=Jan.app.tar.gz
|
TAR_NAME=Jan.app.tar.gz
|
||||||
fi
|
fi
|
||||||
|
|
||||||
FILE_SIZE=$(stat -f%z $FILE_NAME)
|
FILE_SIZE=$(stat -f%z $FILE_NAME)
|
||||||
echo "size: $FILE_SIZE"
|
echo "size: $FILE_SIZE"
|
||||||
|
|
||||||
SH512_CHECKSUM=$(python3 ../../../../../../.github/scripts/electron-checksum.py $FILE_NAME)
|
SH512_CHECKSUM=$(python3 ../../../../../../.github/scripts/electron-checksum.py $FILE_NAME)
|
||||||
echo "sha512: $SH512_CHECKSUM"
|
echo "sha512: $SH512_CHECKSUM"
|
||||||
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
||||||
echo "releaseDate: $CURRENT_TIME"
|
echo "releaseDate: $CURRENT_TIME"
|
||||||
|
|
||||||
# Create latest-mac.yml file
|
# Create latest-mac.yml file
|
||||||
echo "version: ${{ inputs.new_version }}" > latest-mac.yml
|
echo "version: ${{ inputs.new_version }}" > latest-mac.yml
|
||||||
echo "files:" >> latest-mac.yml
|
echo "files:" >> latest-mac.yml
|
||||||
echo " - url: $FILE_NAME" >> latest-mac.yml
|
echo " - url: $FILE_NAME" >> latest-mac.yml
|
||||||
echo " sha512: $SH512_CHECKSUM" >> latest-mac.yml
|
echo " sha512: $SH512_CHECKSUM" >> latest-mac.yml
|
||||||
echo " size: $FILE_NAME" >> latest-mac.yml
|
echo " size: $FILE_SIZE" >> latest-mac.yml
|
||||||
echo "path: $FILE_NAME" >> latest-mac.yml
|
echo "path: $FILE_NAME" >> latest-mac.yml
|
||||||
echo "sha512: $SH512_CHECKSUM" >> latest-mac.yml
|
echo "sha512: $SH512_CHECKSUM" >> latest-mac.yml
|
||||||
echo "releaseDate: $CURRENT_TIME" >> latest-mac.yml
|
echo "releaseDate: $CURRENT_TIME" >> latest-mac.yml
|
||||||
|
|
||||||
cat latest-mac.yml
|
cat latest-mac.yml
|
||||||
cp latest-mac.yml beta-mac.yml
|
cp latest-mac.yml beta-mac.yml
|
||||||
|
|
||||||
echo "::set-output name=MAC_UNIVERSAL_SIG::$MAC_UNIVERSAL_SIG"
|
echo "::set-output name=MAC_UNIVERSAL_SIG::$MAC_UNIVERSAL_SIG"
|
||||||
echo "::set-output name=FILE_NAME::$FILE_NAME"
|
echo "::set-output name=FILE_NAME::$FILE_NAME"
|
||||||
echo "::set-output name=DMG_NAME::$DMG_NAME"
|
echo "::set-output name=DMG_NAME::$DMG_NAME"
|
||||||
echo "::set-output name=TAR_NAME::$TAR_NAME"
|
echo "::set-output name=TAR_NAME::$TAR_NAME"
|
||||||
id: metadata
|
id: metadata
|
||||||
|
|
||||||
## Upload to s3 for nightly and beta
|
## Upload to s3 for nightly and beta
|
||||||
- name: upload to aws s3 if public provider is aws
|
- name: upload to aws s3 if public provider is aws
|
||||||
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
|
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
|
||||||
run: |
|
run: |
|
||||||
cd ./src-tauri/target/universal-apple-darwin/release/bundle
|
cd ./src-tauri/target/universal-apple-darwin/release/bundle
|
||||||
|
|
||||||
# Upload for electron updater for nightly
|
# Upload for electron updater for nightly
|
||||||
aws s3 cp ./macos/latest-mac.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest-mac.yml
|
aws s3 cp ./macos/latest-mac.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest-mac.yml
|
||||||
aws s3 cp ./macos/beta-mac.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta-mac.yml
|
aws s3 cp ./macos/beta-mac.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta-mac.yml
|
||||||
aws s3 cp ./macos/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip
|
aws s3 cp ./macos/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip
|
||||||
aws s3 cp ./macos/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip.sig
|
# aws s3 cp ./macos/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip.sig
|
||||||
|
|
||||||
# Upload for tauri updater
|
# Upload for tauri updater
|
||||||
aws s3 cp ./dmg/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
|
aws s3 cp ./dmg/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
|
||||||
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz
|
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz
|
||||||
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz.sig
|
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz.sig
|
||||||
env:
|
env:
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
||||||
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
||||||
AWS_EC2_METADATA_DISABLED: 'true'
|
AWS_EC2_METADATA_DISABLED: 'true'
|
||||||
|
|
||||||
## Upload to github release for stable release
|
## Upload to github release for stable release
|
||||||
- name: Upload release assert if public provider is github
|
- name: Upload release assert if public provider is github
|
||||||
if: inputs.channel == 'stable'
|
if: inputs.channel == 'stable'
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ inputs.upload_url }}
|
upload_url: ${{ inputs.upload_url }}
|
||||||
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/latest-mac.yml
|
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/latest-mac.yml
|
||||||
asset_name: latest-mac.yml
|
asset_name: latest-mac.yml
|
||||||
asset_content_type: text/yaml
|
asset_content_type: text/yaml
|
||||||
|
|
||||||
- name: Upload release assert if public provider is github
|
- name: Upload release assert if public provider is github
|
||||||
if: inputs.channel == 'beta'
|
if: inputs.channel == 'beta'
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ inputs.upload_url }}
|
upload_url: ${{ inputs.upload_url }}
|
||||||
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/beta-mac.yml
|
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/beta-mac.yml
|
||||||
asset_name: beta-mac.yml
|
asset_name: beta-mac.yml
|
||||||
asset_content_type: text/yaml
|
asset_content_type: text/yaml
|
||||||
|
|
||||||
- name: Upload release assert if public provider is github
|
- name: Upload release assert if public provider is github
|
||||||
if: inputs.public_provider == 'github'
|
if: inputs.public_provider == 'github'
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ inputs.upload_url }}
|
upload_url: ${{ inputs.upload_url }}
|
||||||
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.FILE_NAME }}
|
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
|
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
asset_content_type: application/gzip
|
asset_content_type: application/gzip
|
||||||
|
|
||||||
- name: Upload release assert if public provider is github
|
- name: Upload release assert if public provider is github
|
||||||
if: inputs.public_provider == 'github'
|
if: inputs.public_provider == 'github'
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ inputs.upload_url }}
|
upload_url: ${{ inputs.upload_url }}
|
||||||
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/dmg/${{ steps.metadata.outputs.DMG_NAME }}
|
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/dmg/${{ steps.metadata.outputs.DMG_NAME }}
|
||||||
asset_name: ${{ steps.metadata.outputs.DMG_NAME }}
|
asset_name: ${{ steps.metadata.outputs.DMG_NAME }}
|
||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
- name: Upload release assert if public provider is github
|
- name: Upload release assert if public provider is github
|
||||||
if: inputs.public_provider == 'github'
|
if: inputs.public_provider == 'github'
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ inputs.upload_url }}
|
upload_url: ${{ inputs.upload_url }}
|
||||||
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.TAR_NAME }}
|
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.TAR_NAME }}
|
||||||
asset_name: ${{ steps.metadata.outputs.TAR_NAME }}
|
asset_name: ${{ steps.metadata.outputs.TAR_NAME }}
|
||||||
asset_content_type: application/gzip
|
asset_content_type: application/gzip
|
||||||
|
|||||||
@ -1,290 +1,293 @@
|
|||||||
name: tauri-build-windows-x64
|
name: tauri-build-windows-x64
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
ref:
|
ref:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
default: 'refs/heads/main'
|
default: 'refs/heads/main'
|
||||||
public_provider:
|
public_provider:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
default: none
|
default: none
|
||||||
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
||||||
new_version:
|
new_version:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
cortex_api_port:
|
cortex_api_port:
|
||||||
required: false
|
required: false
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
upload_url:
|
upload_url:
|
||||||
required: false
|
required: false
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
channel:
|
channel:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
default: 'nightly'
|
default: 'nightly'
|
||||||
description: 'The channel to use for this job'
|
description: 'The channel to use for this job'
|
||||||
secrets:
|
secrets:
|
||||||
DELTA_AWS_S3_BUCKET_NAME:
|
DELTA_AWS_S3_BUCKET_NAME:
|
||||||
required: false
|
required: false
|
||||||
DELTA_AWS_ACCESS_KEY_ID:
|
DELTA_AWS_ACCESS_KEY_ID:
|
||||||
required: false
|
required: false
|
||||||
DELTA_AWS_SECRET_ACCESS_KEY:
|
DELTA_AWS_SECRET_ACCESS_KEY:
|
||||||
required: false
|
required: false
|
||||||
AZURE_KEY_VAULT_URI:
|
AZURE_KEY_VAULT_URI:
|
||||||
required: false
|
required: false
|
||||||
AZURE_CLIENT_ID:
|
AZURE_CLIENT_ID:
|
||||||
required: false
|
required: false
|
||||||
AZURE_TENANT_ID:
|
AZURE_TENANT_ID:
|
||||||
required: false
|
required: false
|
||||||
AZURE_CLIENT_SECRET:
|
AZURE_CLIENT_SECRET:
|
||||||
required: false
|
required: false
|
||||||
AZURE_CERT_NAME:
|
AZURE_CERT_NAME:
|
||||||
required: false
|
required: false
|
||||||
TAURI_SIGNING_PRIVATE_KEY:
|
TAURI_SIGNING_PRIVATE_KEY:
|
||||||
required: false
|
required: false
|
||||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
|
||||||
required: false
|
required: false
|
||||||
TAURI_SIGNING_PUBLIC_KEY:
|
TAURI_SIGNING_PUBLIC_KEY:
|
||||||
required: false
|
required: false
|
||||||
outputs:
|
outputs:
|
||||||
WIN_SIG:
|
WIN_SIG:
|
||||||
value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }}
|
value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }}
|
||||||
FILE_NAME:
|
FILE_NAME:
|
||||||
value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }}
|
value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-windows-x64:
|
build-windows-x64:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
outputs:
|
outputs:
|
||||||
WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }}
|
WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }}
|
||||||
FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }}
|
FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- name: Getting the repo
|
- name: Getting the repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.ref }}
|
ref: ${{ inputs.ref }}
|
||||||
|
|
||||||
- name: Replace Icons for Beta Build
|
- name: Replace Icons for Beta Build
|
||||||
if: inputs.channel != 'stable'
|
if: inputs.channel != 'stable'
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
|
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
|
||||||
|
|
||||||
- name: Installing node
|
- name: Installing node
|
||||||
uses: actions/setup-node@v1
|
uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 20
|
||||||
|
|
||||||
- name: Install jq
|
- name: Install jq
|
||||||
uses: dcarbone/install-jq-action@v2.0.1
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
- name: Install ctoml
|
- name: Install ctoml
|
||||||
run: |
|
run: |
|
||||||
cargo install ctoml
|
cargo install ctoml
|
||||||
|
|
||||||
- name: Update app version base on tag
|
- name: Update app version base on tag
|
||||||
id: version_update
|
id: version_update
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
echo "Version: ${{ inputs.new_version }}"
|
echo "Version: ${{ inputs.new_version }}"
|
||||||
# Update tauri.conf.json
|
# Update tauri.conf.json
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true | .bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true | .bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||||
mv /tmp/package.json web/package.json
|
mv /tmp/package.json web-app/package.json
|
||||||
|
|
||||||
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
echo "---------Cargo.toml---------"
|
echo "---------Cargo.toml---------"
|
||||||
cat ./src-tauri/Cargo.toml
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
generate_build_version() {
|
generate_build_version() {
|
||||||
### Examble
|
### Examble
|
||||||
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
|
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
|
||||||
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
|
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
|
||||||
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
|
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
|
||||||
local new_version="$1"
|
local new_version="$1"
|
||||||
local base_version
|
local base_version
|
||||||
local t_value
|
local t_value
|
||||||
|
|
||||||
# Check if it has a "-"
|
# Check if it has a "-"
|
||||||
if [[ "$new_version" == *-* ]]; then
|
if [[ "$new_version" == *-* ]]; then
|
||||||
base_version="${new_version%%-*}" # part before -
|
base_version="${new_version%%-*}" # part before -
|
||||||
suffix="${new_version#*-}" # part after -
|
suffix="${new_version#*-}" # part after -
|
||||||
|
|
||||||
# Check if it is rcX-beta
|
# Check if it is rcX-beta
|
||||||
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
|
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
|
||||||
t_value="${BASH_REMATCH[1]}"
|
t_value="${BASH_REMATCH[1]}"
|
||||||
else
|
else
|
||||||
t_value="$suffix"
|
t_value="$suffix"
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
base_version="$new_version"
|
base_version="$new_version"
|
||||||
t_value="0"
|
t_value="0"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Export two values
|
# Export two values
|
||||||
new_base_version="$base_version"
|
new_base_version="$base_version"
|
||||||
new_build_version="${base_version}.${t_value}"
|
new_build_version="${base_version}.${t_value}"
|
||||||
}
|
}
|
||||||
generate_build_version ${{ inputs.new_version }}
|
generate_build_version ${{ inputs.new_version }}
|
||||||
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
|
||||||
# Change app name for beta and nightly builds
|
# Change app name for beta and nightly builds
|
||||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
chmod +x .github/scripts/rename-tauri-app.sh
|
chmod +x .github/scripts/rename-tauri-app.sh
|
||||||
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||||
|
|
||||||
echo "---------tauri.conf.json---------"
|
echo "---------tauri.conf.json---------"
|
||||||
cat ./src-tauri/tauri.conf.json
|
cat ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
# Update Cargo.toml
|
# Update Cargo.toml
|
||||||
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||||
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
echo "------------------"
|
echo "------------------"
|
||||||
cat ./src-tauri/Cargo.toml
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
chmod +x .github/scripts/rename-workspace.sh
|
chmod +x .github/scripts/rename-workspace.sh
|
||||||
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||||
cat ./package.json
|
cat ./package.json
|
||||||
|
|
||||||
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
fi
|
fi
|
||||||
echo "---------nsis.template---------"
|
echo "---------nsis.template---------"
|
||||||
cat ./src-tauri/tauri.bundle.windows.nsis.template
|
cat ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
|
||||||
- name: Install AzureSignTool
|
- name: Install AzureSignTool
|
||||||
run: |
|
run: |
|
||||||
dotnet tool install --global --version 6.0.0 AzureSignTool
|
dotnet tool install --global --version 6.0.0 AzureSignTool
|
||||||
|
|
||||||
- name: Build app
|
- name: Build app
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
make build-tauri
|
curl -L -o ./src-tauri/binaries/vcomp140.dll https://catalog.jan.ai/vcomp140.dll
|
||||||
env:
|
curl -L -o ./src-tauri/binaries/msvcp140_codecvt_ids.dll https://catalog.jan.ai/msvcp140_codecvt_ids.dll
|
||||||
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
|
ls ./src-tauri/binaries
|
||||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
|
make build-tauri
|
||||||
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
|
env:
|
||||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
|
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
|
||||||
AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }}
|
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
|
||||||
AWS_DEFAULT_REGION: auto
|
AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }}
|
||||||
AWS_EC2_METADATA_DISABLED: 'true'
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
AWS_MAX_ATTEMPTS: '5'
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
||||||
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
|
AWS_DEFAULT_REGION: auto
|
||||||
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
AWS_EC2_METADATA_DISABLED: 'true'
|
||||||
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
|
AWS_MAX_ATTEMPTS: '5'
|
||||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
|
||||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
||||||
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
|
# CORTEX_API_PORT: ${{ inputs.cortex_api_port }}
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
||||||
- name: Upload Artifact
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
||||||
uses: actions/upload-artifact@v4
|
TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }}
|
||||||
with:
|
|
||||||
name: jan-windows-${{ inputs.new_version }}
|
- name: Upload Artifact
|
||||||
path: |
|
uses: actions/upload-artifact@v4
|
||||||
./src-tauri/target/release/bundle/nsis/*.exe
|
with:
|
||||||
|
name: jan-windows-${{ inputs.new_version }}
|
||||||
## create zip file and latest.yml for windows electron auto updater
|
path: |
|
||||||
- name: create zip file and latest.yml for windows electron auto updater
|
./src-tauri/target/release/bundle/nsis/*.exe
|
||||||
shell: bash
|
|
||||||
run: |
|
## create zip file and latest.yml for windows electron auto updater
|
||||||
cd ./src-tauri/target/release/bundle/nsis
|
- name: create zip file and latest.yml for windows electron auto updater
|
||||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
shell: bash
|
||||||
FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe
|
run: |
|
||||||
WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig)
|
cd ./src-tauri/target/release/bundle/nsis
|
||||||
else
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe
|
FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe
|
||||||
WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig)
|
WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig)
|
||||||
fi
|
else
|
||||||
|
FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe
|
||||||
FILE_SIZE=$(stat -c %s $FILE_NAME)
|
WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig)
|
||||||
echo "size: $FILE_SIZE"
|
fi
|
||||||
|
|
||||||
SH512_CHECKSUM=$(python3 ../../../../../.github/scripts/electron-checksum.py $FILE_NAME)
|
FILE_SIZE=$(stat -c %s $FILE_NAME)
|
||||||
echo "sha512: $SH512_CHECKSUM"
|
echo "size: $FILE_SIZE"
|
||||||
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
|
||||||
echo "releaseDate: $CURRENT_TIME"
|
SH512_CHECKSUM=$(python3 ../../../../../.github/scripts/electron-checksum.py $FILE_NAME)
|
||||||
|
echo "sha512: $SH512_CHECKSUM"
|
||||||
# Create latest.yml file
|
CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
||||||
echo "version: ${{ inputs.new_version }}" > latest.yml
|
echo "releaseDate: $CURRENT_TIME"
|
||||||
echo "files:" >> latest.yml
|
|
||||||
echo " - url: $FILE_NAME" >> latest.yml
|
# Create latest.yml file
|
||||||
echo " sha512: $SH512_CHECKSUM" >> latest.yml
|
echo "version: ${{ inputs.new_version }}" > latest.yml
|
||||||
echo " size: $FILE_NAME" >> latest.yml
|
echo "files:" >> latest.yml
|
||||||
echo "path: $FILE_NAME" >> latest.yml
|
echo " - url: $FILE_NAME" >> latest.yml
|
||||||
echo "sha512: $SH512_CHECKSUM" >> latest.yml
|
echo " sha512: $SH512_CHECKSUM" >> latest.yml
|
||||||
echo "releaseDate: $CURRENT_TIME" >> latest.yml
|
echo " size: $FILE_SIZE" >> latest.yml
|
||||||
|
echo "path: $FILE_NAME" >> latest.yml
|
||||||
cat latest.yml
|
echo "sha512: $SH512_CHECKSUM" >> latest.yml
|
||||||
cp latest.yml beta.yml
|
echo "releaseDate: $CURRENT_TIME" >> latest.yml
|
||||||
|
|
||||||
echo "::set-output name=WIN_SIG::$WIN_SIG"
|
cat latest.yml
|
||||||
echo "::set-output name=FILE_NAME::$FILE_NAME"
|
cp latest.yml beta.yml
|
||||||
id: metadata
|
|
||||||
|
echo "::set-output name=WIN_SIG::$WIN_SIG"
|
||||||
## Upload to s3 for nightly and beta
|
echo "::set-output name=FILE_NAME::$FILE_NAME"
|
||||||
- name: upload to aws s3 if public provider is aws
|
id: metadata
|
||||||
shell: bash
|
|
||||||
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
|
## Upload to s3 for nightly and beta
|
||||||
run: |
|
- name: upload to aws s3 if public provider is aws
|
||||||
cd ./src-tauri/target/release/bundle/nsis
|
shell: bash
|
||||||
|
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
|
||||||
# Upload for electron updater for nightly
|
run: |
|
||||||
aws s3 cp ./latest.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest.yml
|
cd ./src-tauri/target/release/bundle/nsis
|
||||||
aws s3 cp ./beta.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta.yml
|
|
||||||
|
# Upload for electron updater for nightly
|
||||||
# Upload for tauri updater
|
aws s3 cp ./latest.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest.yml
|
||||||
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}
|
aws s3 cp ./beta.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta.yml
|
||||||
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }}.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}.sig
|
|
||||||
env:
|
# Upload for tauri updater
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }}.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}.sig
|
||||||
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
env:
|
||||||
AWS_EC2_METADATA_DISABLED: 'true'
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
||||||
## Upload to github release for stable release
|
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
||||||
- name: Upload release assert if public provider is github
|
AWS_EC2_METADATA_DISABLED: 'true'
|
||||||
if: inputs.channel == 'stable'
|
|
||||||
env:
|
## Upload to github release for stable release
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
- name: Upload release assert if public provider is github
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
if: inputs.channel == 'stable'
|
||||||
with:
|
env:
|
||||||
upload_url: ${{ inputs.upload_url }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
asset_path: ./src-tauri/target/release/bundle/nsis/latest.yml
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
asset_name: latest.yml
|
with:
|
||||||
asset_content_type: text/yaml
|
upload_url: ${{ inputs.upload_url }}
|
||||||
|
asset_path: ./src-tauri/target/release/bundle/nsis/latest.yml
|
||||||
- name: Upload release asset if public provider is github
|
asset_name: latest.yml
|
||||||
if: inputs.channel == 'beta'
|
asset_content_type: text/yaml
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
- name: Upload release assert if public provider is github
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
if: inputs.channel == 'beta'
|
||||||
with:
|
env:
|
||||||
upload_url: ${{ inputs.upload_url }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
asset_path: ./src-tauri/target/release/bundle/nsis/beta.yml
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
asset_name: beta.yml
|
with:
|
||||||
asset_content_type: text/yaml
|
upload_url: ${{ inputs.upload_url }}
|
||||||
|
asset_path: ./src-tauri/target/release/bundle/nsis/beta.yml
|
||||||
- name: Upload release assert if public provider is github
|
asset_name: beta.yml
|
||||||
if: inputs.public_provider == 'github'
|
asset_content_type: text/yaml
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
- name: Upload release assert if public provider is github
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
if: inputs.public_provider == 'github'
|
||||||
with:
|
env:
|
||||||
upload_url: ${{ inputs.upload_url }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }}
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
|
with:
|
||||||
asset_content_type: application/octet-stream
|
upload_url: ${{ inputs.upload_url }}
|
||||||
|
asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
|
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
|
asset_content_type: application/octet-stream
|
||||||
|
|||||||
21
.gitignore
vendored
@ -22,19 +22,6 @@ coverage
|
|||||||
*.log
|
*.log
|
||||||
core/lib/**
|
core/lib/**
|
||||||
|
|
||||||
# Nitro binary files
|
|
||||||
extensions/*-extension/bin/*/nitro
|
|
||||||
extensions/*-extension/bin/*/*.metal
|
|
||||||
extensions/*-extension/bin/*/*.exe
|
|
||||||
extensions/*-extension/bin/*/*.dll
|
|
||||||
extensions/*-extension/bin/*/*.exp
|
|
||||||
extensions/*-extension/bin/*/*.lib
|
|
||||||
extensions/*-extension/bin/saved-*
|
|
||||||
extensions/*-extension/bin/*.tar.gz
|
|
||||||
extensions/*-extension/bin/vulkaninfoSDK.exe
|
|
||||||
extensions/*-extension/bin/vulkaninfo
|
|
||||||
|
|
||||||
|
|
||||||
# Turborepo
|
# Turborepo
|
||||||
.turbo
|
.turbo
|
||||||
electron/test-data
|
electron/test-data
|
||||||
@ -50,3 +37,11 @@ electron/shared/**
|
|||||||
# docs
|
# docs
|
||||||
docs/yarn.lock
|
docs/yarn.lock
|
||||||
electron/.version.bak
|
electron/.version.bak
|
||||||
|
src-tauri/binaries/engines/cortex.llamacpp
|
||||||
|
src-tauri/resources/themes
|
||||||
|
src-tauri/resources/lib
|
||||||
|
src-tauri/Cargo.lock
|
||||||
|
src-tauri/icons
|
||||||
|
!src-tauri/icons/icon.png
|
||||||
|
src-tauri/gen/apple
|
||||||
|
src-tauri/resources/bin
|
||||||
|
|||||||
103
Makefile
@ -24,95 +24,44 @@ ifeq ($(OS),Windows_NT)
|
|||||||
echo "skip"
|
echo "skip"
|
||||||
endif
|
endif
|
||||||
yarn install
|
yarn install
|
||||||
yarn build:joi
|
|
||||||
yarn build:core
|
yarn build:core
|
||||||
yarn build:server
|
|
||||||
yarn build:extensions
|
yarn build:extensions
|
||||||
|
|
||||||
check-file-counts: install-and-build
|
dev: install-and-build
|
||||||
ifeq ($(OS),Windows_NT)
|
yarn install:cortex
|
||||||
powershell -Command "if ((Get-ChildItem -Path pre-install -Filter *.tgz | Measure-Object | Select-Object -ExpandProperty Count) -ne (Get-ChildItem -Path extensions -Directory | Where-Object Name -like *-extension* | Measure-Object | Select-Object -ExpandProperty Count)) { Write-Host 'Number of .tgz files in pre-install does not match the number of subdirectories in extensions with package.json'; exit 1 } else { Write-Host 'Extension build successful' }"
|
yarn download:bin
|
||||||
else
|
yarn copy:lib
|
||||||
@tgz_count=$$(find pre-install -type f -name "*.tgz" | wc -l); dir_count=$$(find extensions -mindepth 1 -maxdepth 1 -type d -exec test -e '{}/package.json' \; -print | wc -l); if [ $$tgz_count -ne $$dir_count ]; then echo "Number of .tgz files in pre-install ($$tgz_count) does not match the number of subdirectories in extension ($$dir_count)"; exit 1; else echo "Extension build successful"; fi
|
|
||||||
endif
|
|
||||||
|
|
||||||
dev: check-file-counts
|
|
||||||
yarn dev
|
yarn dev
|
||||||
|
|
||||||
|
# Deprecated soon
|
||||||
|
dev-tauri: install-and-build
|
||||||
|
yarn install:cortex
|
||||||
|
yarn download:bin
|
||||||
|
yarn copy:lib
|
||||||
|
yarn dev:tauri
|
||||||
|
|
||||||
# Linting
|
# Linting
|
||||||
lint: check-file-counts
|
lint: install-and-build
|
||||||
yarn lint
|
yarn lint
|
||||||
|
|
||||||
update-playwright-config:
|
|
||||||
ifeq ($(OS),Windows_NT)
|
|
||||||
echo -e "const RPconfig = {\n\
|
|
||||||
apiKey: '$(REPORT_PORTAL_API_KEY)',\n\
|
|
||||||
endpoint: '$(REPORT_PORTAL_URL)',\n\
|
|
||||||
project: '$(REPORT_PORTAL_PROJECT_NAME)',\n\
|
|
||||||
launch: '$(REPORT_PORTAL_LAUNCH_NAME)',\n\
|
|
||||||
attributes: [\n\
|
|
||||||
{\n\
|
|
||||||
key: 'key',\n\
|
|
||||||
value: 'value',\n\
|
|
||||||
},\n\
|
|
||||||
{\n\
|
|
||||||
value: 'value',\n\
|
|
||||||
},\n\
|
|
||||||
],\n\
|
|
||||||
description: '$(REPORT_PORTAL_DESCRIPTION)',\n\
|
|
||||||
}\n$$(cat electron/playwright.config.ts)" > electron/playwright.config.ts;
|
|
||||||
sed -i "s/^ reporter: .*/ reporter: [['@reportportal\/agent-js-playwright', RPconfig]],/" electron/playwright.config.ts
|
|
||||||
|
|
||||||
else ifeq ($(shell uname -s),Linux)
|
|
||||||
echo "const RPconfig = {\n\
|
|
||||||
apiKey: '$(REPORT_PORTAL_API_KEY)',\n\
|
|
||||||
endpoint: '$(REPORT_PORTAL_URL)',\n\
|
|
||||||
project: '$(REPORT_PORTAL_PROJECT_NAME)',\n\
|
|
||||||
launch: '$(REPORT_PORTAL_LAUNCH_NAME)',\n\
|
|
||||||
attributes: [\n\
|
|
||||||
{\n\
|
|
||||||
key: 'key',\n\
|
|
||||||
value: 'value',\n\
|
|
||||||
},\n\
|
|
||||||
{\n\
|
|
||||||
value: 'value',\n\
|
|
||||||
},\n\
|
|
||||||
],\n\
|
|
||||||
description: '$(REPORT_PORTAL_DESCRIPTION)',\n\
|
|
||||||
}\n$$(cat electron/playwright.config.ts)" > electron/playwright.config.ts;
|
|
||||||
sed -i "s/^ reporter: .*/ reporter: [['@reportportal\/agent-js-playwright', RPconfig]],/" electron/playwright.config.ts
|
|
||||||
else
|
|
||||||
echo "const RPconfig = {\n\
|
|
||||||
apiKey: '$(REPORT_PORTAL_API_KEY)',\n\
|
|
||||||
endpoint: '$(REPORT_PORTAL_URL)',\n\
|
|
||||||
project: '$(REPORT_PORTAL_PROJECT_NAME)',\n\
|
|
||||||
launch: '$(REPORT_PORTAL_LAUNCH_NAME)',\n\
|
|
||||||
attributes: [\n\
|
|
||||||
{\n\
|
|
||||||
key: 'key',\n\
|
|
||||||
value: 'value',\n\
|
|
||||||
},\n\
|
|
||||||
{\n\
|
|
||||||
value: 'value',\n\
|
|
||||||
},\n\
|
|
||||||
],\n\
|
|
||||||
description: '$(REPORT_PORTAL_DESCRIPTION)',\n\
|
|
||||||
}\n$$(cat electron/playwright.config.ts)" > electron/playwright.config.ts;
|
|
||||||
sed -i '' "s|^ reporter: .*| reporter: [['@reportportal\/agent-js-playwright', RPconfig]],|" electron/playwright.config.ts
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
test: lint
|
test: lint
|
||||||
yarn build:test
|
# yarn build:test
|
||||||
yarn test:coverage
|
# yarn test:coverage
|
||||||
|
# Need e2e setup for tauri backend
|
||||||
yarn test
|
yarn test
|
||||||
|
|
||||||
# Builds and publishes the app
|
# Builds and publishes the app
|
||||||
build-and-publish: check-file-counts
|
build-and-publish: install-and-build
|
||||||
yarn build:publish
|
yarn build
|
||||||
|
|
||||||
# Build
|
# Build
|
||||||
build: check-file-counts
|
build: install-and-build
|
||||||
|
yarn build
|
||||||
|
|
||||||
|
# Deprecated soon
|
||||||
|
build-tauri: install-and-build
|
||||||
|
yarn copy:lib
|
||||||
yarn build
|
yarn build
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
@ -122,6 +71,8 @@ ifeq ($(OS),Windows_NT)
|
|||||||
-powershell -Command "Remove-Item -Recurse -Force ./pre-install/*.tgz"
|
-powershell -Command "Remove-Item -Recurse -Force ./pre-install/*.tgz"
|
||||||
-powershell -Command "Remove-Item -Recurse -Force ./extensions/*/*.tgz"
|
-powershell -Command "Remove-Item -Recurse -Force ./extensions/*/*.tgz"
|
||||||
-powershell -Command "Remove-Item -Recurse -Force ./electron/pre-install/*.tgz"
|
-powershell -Command "Remove-Item -Recurse -Force ./electron/pre-install/*.tgz"
|
||||||
|
-powershell -Command "Remove-Item -Recurse -Force ./src-tauri/resources"
|
||||||
|
-powershell -Command "Remove-Item -Recurse -Force ./src-tauri/target"
|
||||||
-powershell -Command "if (Test-Path \"$($env:USERPROFILE)\jan\extensions\") { Remove-Item -Path \"$($env:USERPROFILE)\jan\extensions\" -Recurse -Force }"
|
-powershell -Command "if (Test-Path \"$($env:USERPROFILE)\jan\extensions\") { Remove-Item -Path \"$($env:USERPROFILE)\jan\extensions\" -Recurse -Force }"
|
||||||
else ifeq ($(shell uname -s),Linux)
|
else ifeq ($(shell uname -s),Linux)
|
||||||
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
||||||
@ -136,6 +87,8 @@ else ifeq ($(shell uname -s),Linux)
|
|||||||
rm -rf ./pre-install/*.tgz
|
rm -rf ./pre-install/*.tgz
|
||||||
rm -rf ./extensions/*/*.tgz
|
rm -rf ./extensions/*/*.tgz
|
||||||
rm -rf ./electron/pre-install/*.tgz
|
rm -rf ./electron/pre-install/*.tgz
|
||||||
|
rm -rf ./src-tauri/resources
|
||||||
|
rm -rf ./src-tauri/target
|
||||||
rm -rf "~/jan/extensions"
|
rm -rf "~/jan/extensions"
|
||||||
rm -rf "~/.cache/jan*"
|
rm -rf "~/.cache/jan*"
|
||||||
else
|
else
|
||||||
@ -150,6 +103,8 @@ else
|
|||||||
rm -rf ./pre-install/*.tgz
|
rm -rf ./pre-install/*.tgz
|
||||||
rm -rf ./extensions/*/*.tgz
|
rm -rf ./extensions/*/*.tgz
|
||||||
rm -rf ./electron/pre-install/*.tgz
|
rm -rf ./electron/pre-install/*.tgz
|
||||||
|
rm -rf ./src-tauri/resources
|
||||||
|
rm -rf ./src-tauri/target
|
||||||
rm -rf ~/jan/extensions
|
rm -rf ~/jan/extensions
|
||||||
rm -rf ~/Library/Caches/jan*
|
rm -rf ~/Library/Caches/jan*
|
||||||
endif
|
endif
|
||||||
|
|||||||
@ -8,31 +8,13 @@
|
|||||||
],
|
],
|
||||||
"homepage": "https://jan.ai",
|
"homepage": "https://jan.ai",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"browser": "dist/index.js",
|
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"module": "dist/node/index.cjs.js",
|
|
||||||
"typings": "dist/types/index.d.ts",
|
"typings": "dist/types/index.d.ts",
|
||||||
"files": [
|
"files": [
|
||||||
"dist",
|
"dist",
|
||||||
"types"
|
"types"
|
||||||
],
|
],
|
||||||
"author": "Jan <service@jan.ai>",
|
"author": "Jan <service@jan.ai>",
|
||||||
"exports": {
|
|
||||||
".": "./dist/index.js",
|
|
||||||
"./node": "./dist/node/index.cjs.js"
|
|
||||||
},
|
|
||||||
"typesVersions": {
|
|
||||||
"*": {
|
|
||||||
".": [
|
|
||||||
"./dist/index.js.map",
|
|
||||||
"./dist/types/index.d.ts"
|
|
||||||
],
|
|
||||||
"node": [
|
|
||||||
"./dist/node/index.cjs.js.map",
|
|
||||||
"./dist/types/node/index.d.ts"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"lint": "tslint --project tsconfig.json -t codeFrame 'src/**/*.ts' 'test/**/*.ts'",
|
"lint": "tslint --project tsconfig.json -t codeFrame 'src/**/*.ts' 'test/**/*.ts'",
|
||||||
"test": "jest",
|
"test": "jest",
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
/**
|
||||||
|
* @jest-environment jsdom
|
||||||
|
*/
|
||||||
import { openExternalUrl } from './core'
|
import { openExternalUrl } from './core'
|
||||||
import { joinPath } from './core'
|
import { joinPath } from './core'
|
||||||
import { openFileExplorer } from './core'
|
import { openFileExplorer } from './core'
|
||||||
@ -25,7 +28,7 @@ describe('test core apis', () => {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
const result = await joinPath(paths)
|
const result = await joinPath(paths)
|
||||||
expect(globalThis.core.api.joinPath).toHaveBeenCalledWith(paths)
|
expect(globalThis.core.api.joinPath).toHaveBeenCalledWith({ args: paths })
|
||||||
expect(result).toBe('/path/one/path/two')
|
expect(result).toBe('/path/one/path/two')
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -37,7 +40,7 @@ describe('test core apis', () => {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
const result = await openFileExplorer(path)
|
const result = await openFileExplorer(path)
|
||||||
expect(globalThis.core.api.openFileExplorer).toHaveBeenCalledWith(path)
|
expect(globalThis.core.api.openFileExplorer).toHaveBeenCalledWith({ path })
|
||||||
expect(result).toBe('opened')
|
expect(result).toBe('opened')
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -51,20 +54,6 @@ describe('test core apis', () => {
|
|||||||
expect(globalThis.core.api.getJanDataFolderPath).toHaveBeenCalled()
|
expect(globalThis.core.api.getJanDataFolderPath).toHaveBeenCalled()
|
||||||
expect(result).toBe('/path/to/jan/data')
|
expect(result).toBe('/path/to/jan/data')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should execute function on main process', async () => {
|
|
||||||
const extension = 'testExtension'
|
|
||||||
const method = 'testMethod'
|
|
||||||
const args = ['arg1', 'arg2']
|
|
||||||
globalThis.core = {
|
|
||||||
api: {
|
|
||||||
invokeExtensionFunc: jest.fn().mockResolvedValue('result'),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
const result = await executeOnMain(extension, method, ...args)
|
|
||||||
expect(globalThis.core.api.invokeExtensionFunc).toHaveBeenCalledWith(extension, method, ...args)
|
|
||||||
expect(result).toBe('result')
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('dirName - just a pass thru api', () => {
|
describe('dirName - just a pass thru api', () => {
|
||||||
|
|||||||
@ -13,8 +13,11 @@ const executeOnMain: (extension: string, method: string, ...args: any[]) => Prom
|
|||||||
extension,
|
extension,
|
||||||
method,
|
method,
|
||||||
...args
|
...args
|
||||||
) => globalThis.core?.api?.invokeExtensionFunc(extension, method, ...args)
|
) => {
|
||||||
|
if ('electronAPI' in window && window.electronAPI)
|
||||||
|
return globalThis.core?.api?.invokeExtensionFunc(extension, method, ...args)
|
||||||
|
return () => {}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets Jan's data folder path.
|
* Gets Jan's data folder path.
|
||||||
@ -29,15 +32,15 @@ const getJanDataFolderPath = (): Promise<string> => globalThis.core.api?.getJanD
|
|||||||
* @returns {Promise<any>} A promise that resolves when the file explorer is opened.
|
* @returns {Promise<any>} A promise that resolves when the file explorer is opened.
|
||||||
*/
|
*/
|
||||||
const openFileExplorer: (path: string) => Promise<any> = (path) =>
|
const openFileExplorer: (path: string) => Promise<any> = (path) =>
|
||||||
globalThis.core.api?.openFileExplorer(path)
|
globalThis.core.api?.openFileExplorer({ path })
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Joins multiple paths together.
|
* Joins multiple paths together.
|
||||||
* @param paths - The paths to join.
|
* @param paths - The paths to join.
|
||||||
* @returns {Promise<string>} A promise that resolves with the joined path.
|
* @returns {Promise<string>} A promise that resolves with the joined path.
|
||||||
*/
|
*/
|
||||||
const joinPath: (paths: string[]) => Promise<string> = (paths) =>
|
const joinPath: (args: string[]) => Promise<string> = (args) =>
|
||||||
globalThis.core.api?.joinPath(paths)
|
globalThis.core.api?.joinPath({ args })
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get dirname of a file path.
|
* Get dirname of a file path.
|
||||||
|
|||||||
@ -1,7 +1,5 @@
|
|||||||
import { BaseExtension } from './extension'
|
import { BaseExtension } from './extension'
|
||||||
import { SettingComponentProps } from '../types'
|
import { SettingComponentProps } from '../types'
|
||||||
import { getJanDataFolderPath, joinPath } from './core'
|
|
||||||
import { fs } from './fs'
|
|
||||||
jest.mock('./core')
|
jest.mock('./core')
|
||||||
jest.mock('./fs')
|
jest.mock('./fs')
|
||||||
|
|
||||||
@ -90,18 +88,32 @@ describe('BaseExtension', () => {
|
|||||||
{ key: 'setting2', controllerProps: { value: 'value2' } } as any,
|
{ key: 'setting2', controllerProps: { value: 'value2' } } as any,
|
||||||
]
|
]
|
||||||
|
|
||||||
;(getJanDataFolderPath as jest.Mock).mockResolvedValue('/data')
|
const localStorageMock = (() => {
|
||||||
;(joinPath as jest.Mock).mockResolvedValue('/data/settings/TestExtension')
|
let store: Record<string, string> = {}
|
||||||
;(fs.existsSync as jest.Mock).mockResolvedValue(false)
|
|
||||||
;(fs.mkdir as jest.Mock).mockResolvedValue(undefined)
|
|
||||||
;(fs.writeFileSync as jest.Mock).mockResolvedValue(undefined)
|
|
||||||
|
|
||||||
|
return {
|
||||||
|
getItem: (key: string) => store[key] || null,
|
||||||
|
setItem: (key: string, value: string) => {
|
||||||
|
store[key] = value
|
||||||
|
},
|
||||||
|
removeItem: (key: string) => {
|
||||||
|
delete store[key]
|
||||||
|
},
|
||||||
|
clear: () => {
|
||||||
|
store = {}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
|
||||||
|
Object.defineProperty(global, 'localStorage', {
|
||||||
|
value: localStorageMock,
|
||||||
|
})
|
||||||
|
const mock = jest.spyOn(localStorage, 'setItem')
|
||||||
await baseExtension.registerSettings(settings)
|
await baseExtension.registerSettings(settings)
|
||||||
|
|
||||||
expect(fs.mkdir).toHaveBeenCalledWith('/data/settings/TestExtension')
|
expect(mock).toHaveBeenCalledWith(
|
||||||
expect(fs.writeFileSync).toHaveBeenCalledWith(
|
'TestExtension',
|
||||||
'/data/settings/TestExtension',
|
JSON.stringify(settings)
|
||||||
JSON.stringify(settings, null, 2)
|
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -125,17 +137,15 @@ describe('BaseExtension', () => {
|
|||||||
]
|
]
|
||||||
|
|
||||||
jest.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings)
|
jest.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings)
|
||||||
;(getJanDataFolderPath as jest.Mock).mockResolvedValue('/data')
|
const mockSetItem = jest.spyOn(localStorage, 'setItem')
|
||||||
;(joinPath as jest.Mock).mockResolvedValue('/data/settings/TestExtension/settings.json')
|
|
||||||
;(fs.writeFileSync as jest.Mock).mockResolvedValue(undefined)
|
|
||||||
|
|
||||||
await baseExtension.updateSettings([
|
await baseExtension.updateSettings([
|
||||||
{ key: 'setting1', controllerProps: { value: 'newValue' } } as any,
|
{ key: 'setting1', controllerProps: { value: 'newValue' } } as any,
|
||||||
])
|
])
|
||||||
|
|
||||||
expect(fs.writeFileSync).toHaveBeenCalledWith(
|
expect(mockSetItem).toHaveBeenCalledWith(
|
||||||
'/data/settings/TestExtension/settings.json',
|
'TestExtension',
|
||||||
JSON.stringify([{ key: 'setting1', controllerProps: { value: 'newValue' } }], null, 2)
|
JSON.stringify([{ key: 'setting1', controllerProps: { value: 'newValue' } }])
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,7 +1,4 @@
|
|||||||
import { Model, ModelEvent, SettingComponentProps } from '../types'
|
import { Model, SettingComponentProps } from '../types'
|
||||||
import { getJanDataFolderPath, joinPath } from './core'
|
|
||||||
import { events } from './events'
|
|
||||||
import { fs } from './fs'
|
|
||||||
import { ModelManager } from './models'
|
import { ModelManager } from './models'
|
||||||
|
|
||||||
export enum ExtensionTypeEnum {
|
export enum ExtensionTypeEnum {
|
||||||
@ -117,22 +114,14 @@ export abstract class BaseExtension implements ExtensionType {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const extensionSettingFolderPath = await joinPath([
|
|
||||||
await getJanDataFolderPath(),
|
|
||||||
'settings',
|
|
||||||
this.name,
|
|
||||||
])
|
|
||||||
settings.forEach((setting) => {
|
settings.forEach((setting) => {
|
||||||
setting.extensionName = this.name
|
setting.extensionName = this.name
|
||||||
})
|
})
|
||||||
try {
|
try {
|
||||||
if (!(await fs.existsSync(extensionSettingFolderPath)))
|
const oldSettingsJson = localStorage.getItem(this.name)
|
||||||
await fs.mkdir(extensionSettingFolderPath)
|
|
||||||
const settingFilePath = await joinPath([extensionSettingFolderPath, this.settingFileName])
|
|
||||||
|
|
||||||
// Persists new settings
|
// Persists new settings
|
||||||
if (await fs.existsSync(settingFilePath)) {
|
if (oldSettingsJson) {
|
||||||
const oldSettings = JSON.parse(await fs.readFileSync(settingFilePath, 'utf-8'))
|
const oldSettings = JSON.parse(oldSettingsJson)
|
||||||
settings.forEach((setting) => {
|
settings.forEach((setting) => {
|
||||||
// Keep setting value
|
// Keep setting value
|
||||||
if (setting.controllerProps && Array.isArray(oldSettings))
|
if (setting.controllerProps && Array.isArray(oldSettings))
|
||||||
@ -141,7 +130,7 @@ export abstract class BaseExtension implements ExtensionType {
|
|||||||
)?.controllerProps?.value
|
)?.controllerProps?.value
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
await fs.writeFileSync(settingFilePath, JSON.stringify(settings, null, 2))
|
localStorage.setItem(this.name, JSON.stringify(settings))
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
console.error(err)
|
||||||
}
|
}
|
||||||
@ -180,17 +169,10 @@ export abstract class BaseExtension implements ExtensionType {
|
|||||||
async getSettings(): Promise<SettingComponentProps[]> {
|
async getSettings(): Promise<SettingComponentProps[]> {
|
||||||
if (!this.name) return []
|
if (!this.name) return []
|
||||||
|
|
||||||
const settingPath = await joinPath([
|
|
||||||
await getJanDataFolderPath(),
|
|
||||||
this.settingFolderName,
|
|
||||||
this.name,
|
|
||||||
this.settingFileName,
|
|
||||||
])
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (!(await fs.existsSync(settingPath))) return []
|
const settingsString = localStorage.getItem(this.name)
|
||||||
const content = await fs.readFileSync(settingPath, 'utf-8')
|
if (!settingsString) return []
|
||||||
const settings: SettingComponentProps[] = JSON.parse(content)
|
const settings: SettingComponentProps[] = JSON.parse(settingsString)
|
||||||
return settings
|
return settings
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.warn(err)
|
console.warn(err)
|
||||||
@ -220,19 +202,7 @@ export abstract class BaseExtension implements ExtensionType {
|
|||||||
|
|
||||||
if (!updatedSettings.length) updatedSettings = componentProps as SettingComponentProps[]
|
if (!updatedSettings.length) updatedSettings = componentProps as SettingComponentProps[]
|
||||||
|
|
||||||
const settingFolder = await joinPath([
|
localStorage.setItem(this.name, JSON.stringify(updatedSettings))
|
||||||
await getJanDataFolderPath(),
|
|
||||||
this.settingFolderName,
|
|
||||||
this.name,
|
|
||||||
])
|
|
||||||
|
|
||||||
if (!(await fs.existsSync(settingFolder))) {
|
|
||||||
await fs.mkdir(settingFolder)
|
|
||||||
}
|
|
||||||
|
|
||||||
const settingPath = await joinPath([settingFolder, this.settingFileName])
|
|
||||||
|
|
||||||
await fs.writeFileSync(settingPath, JSON.stringify(updatedSettings, null, 2))
|
|
||||||
|
|
||||||
updatedSettings.forEach((setting) => {
|
updatedSettings.forEach((setting) => {
|
||||||
this.onSettingUpdate<typeof setting.controllerProps.value>(
|
this.onSettingUpdate<typeof setting.controllerProps.value>(
|
||||||
|
|||||||
@ -31,21 +31,21 @@ export abstract class AIEngine extends BaseExtension {
|
|||||||
/**
|
/**
|
||||||
* Loads the model.
|
* Loads the model.
|
||||||
*/
|
*/
|
||||||
async loadModel(model: Model): Promise<any> {
|
async loadModel(model: Partial<Model>, abortController?: AbortController): Promise<any> {
|
||||||
if (model.engine.toString() !== this.provider) return Promise.resolve()
|
if (model?.engine?.toString() !== this.provider) return Promise.resolve()
|
||||||
events.emit(ModelEvent.OnModelReady, model)
|
events.emit(ModelEvent.OnModelReady, model)
|
||||||
return Promise.resolve()
|
return Promise.resolve()
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Stops the model.
|
* Stops the model.
|
||||||
*/
|
*/
|
||||||
async unloadModel(model?: Model): Promise<any> {
|
async unloadModel(model?: Partial<Model>): Promise<any> {
|
||||||
if (model?.engine && model.engine.toString() !== this.provider) return Promise.resolve()
|
if (model?.engine && model.engine.toString() !== this.provider) return Promise.resolve()
|
||||||
events.emit(ModelEvent.OnModelStopped, model ?? {})
|
events.emit(ModelEvent.OnModelStopped, model ?? {})
|
||||||
return Promise.resolve()
|
return Promise.resolve()
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/**
|
||||||
* Inference request
|
* Inference request
|
||||||
*/
|
*/
|
||||||
inference(data: MessageRequest) {}
|
inference(data: MessageRequest) {}
|
||||||
|
|||||||
@ -1,4 +1,3 @@
|
|||||||
import { InferenceEngine } from '../../../types'
|
|
||||||
import { AIEngine } from './AIEngine'
|
import { AIEngine } from './AIEngine'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -6,6 +5,7 @@ import { AIEngine } from './AIEngine'
|
|||||||
*/
|
*/
|
||||||
export class EngineManager {
|
export class EngineManager {
|
||||||
public engines = new Map<string, AIEngine>()
|
public engines = new Map<string, AIEngine>()
|
||||||
|
public controller: AbortController | null = null
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Registers an engine.
|
* Registers an engine.
|
||||||
@ -21,22 +21,6 @@ export class EngineManager {
|
|||||||
* @returns The engine, if found.
|
* @returns The engine, if found.
|
||||||
*/
|
*/
|
||||||
get<T extends AIEngine>(provider: string): T | undefined {
|
get<T extends AIEngine>(provider: string): T | undefined {
|
||||||
// Backward compatible provider
|
|
||||||
// nitro is migrated to cortex
|
|
||||||
if (
|
|
||||||
[
|
|
||||||
InferenceEngine.nitro,
|
|
||||||
InferenceEngine.cortex,
|
|
||||||
InferenceEngine.cortex_llamacpp,
|
|
||||||
InferenceEngine.cortex_onnx,
|
|
||||||
InferenceEngine.cortex_tensorrtllm,
|
|
||||||
InferenceEngine.cortex_onnx,
|
|
||||||
]
|
|
||||||
.map((e) => e.toString())
|
|
||||||
.includes(provider)
|
|
||||||
)
|
|
||||||
provider = InferenceEngine.cortex
|
|
||||||
|
|
||||||
return this.engines.get(provider) as T | undefined
|
return this.engines.get(provider) as T | undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -29,7 +29,7 @@ export abstract class LocalOAIEngine extends OAIEngine {
|
|||||||
/**
|
/**
|
||||||
* Load the model.
|
* Load the model.
|
||||||
*/
|
*/
|
||||||
override async loadModel(model: Model & { file_path?: string }): Promise<void> {
|
override async loadModel(model: Model & { file_path?: string }, abortController?: AbortController): Promise<void> {
|
||||||
if (model.engine.toString() !== this.provider) return
|
if (model.engine.toString() !== this.provider) return
|
||||||
const modelFolder = 'file_path' in model && model.file_path ? await dirName(model.file_path) : await this.getModelFilePath(model.id)
|
const modelFolder = 'file_path' in model && model.file_path ? await dirName(model.file_path) : await this.getModelFilePath(model.id)
|
||||||
const systemInfo = await systemInformation()
|
const systemInfo = await systemInformation()
|
||||||
|
|||||||
@ -12,11 +12,7 @@ import {
|
|||||||
ChatCompletionRole,
|
ChatCompletionRole,
|
||||||
ContentType,
|
ContentType,
|
||||||
} from '../../../types'
|
} from '../../../types'
|
||||||
import { requestInference } from './helpers/sse'
|
|
||||||
import { ulid } from 'ulidx'
|
|
||||||
|
|
||||||
jest.mock('./helpers/sse')
|
|
||||||
jest.mock('ulidx')
|
|
||||||
jest.mock('../../events')
|
jest.mock('../../events')
|
||||||
|
|
||||||
class TestOAIEngine extends OAIEngine {
|
class TestOAIEngine extends OAIEngine {
|
||||||
@ -48,79 +44,6 @@ describe('OAIEngine', () => {
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should handle inference request', async () => {
|
|
||||||
const data: MessageRequest = {
|
|
||||||
model: { engine: 'test-provider', id: 'test-model' } as any,
|
|
||||||
threadId: 'test-thread',
|
|
||||||
type: MessageRequestType.Thread,
|
|
||||||
assistantId: 'test-assistant',
|
|
||||||
messages: [{ role: ChatCompletionRole.User, content: 'Hello' }],
|
|
||||||
}
|
|
||||||
|
|
||||||
;(ulid as jest.Mock).mockReturnValue('test-id')
|
|
||||||
;(requestInference as jest.Mock).mockReturnValue({
|
|
||||||
subscribe: ({ next, complete }: any) => {
|
|
||||||
next('test response')
|
|
||||||
complete()
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await engine.inference(data)
|
|
||||||
|
|
||||||
expect(requestInference).toHaveBeenCalledWith(
|
|
||||||
'http://test-inference-url',
|
|
||||||
expect.objectContaining({ model: 'test-model' }),
|
|
||||||
expect.any(Object),
|
|
||||||
expect.any(AbortController),
|
|
||||||
{ Authorization: 'Bearer test-token' },
|
|
||||||
undefined
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(events.emit).toHaveBeenCalledWith(
|
|
||||||
MessageEvent.OnMessageResponse,
|
|
||||||
expect.objectContaining({ id: 'test-id' })
|
|
||||||
)
|
|
||||||
expect(events.emit).toHaveBeenCalledWith(
|
|
||||||
MessageEvent.OnMessageUpdate,
|
|
||||||
expect.objectContaining({
|
|
||||||
content: [
|
|
||||||
{
|
|
||||||
type: ContentType.Text,
|
|
||||||
text: { value: 'test response', annotations: [] },
|
|
||||||
},
|
|
||||||
],
|
|
||||||
status: MessageStatus.Ready,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should handle inference error', async () => {
|
|
||||||
const data: MessageRequest = {
|
|
||||||
model: { engine: 'test-provider', id: 'test-model' } as any,
|
|
||||||
threadId: 'test-thread',
|
|
||||||
type: MessageRequestType.Thread,
|
|
||||||
assistantId: 'test-assistant',
|
|
||||||
messages: [{ role: ChatCompletionRole.User, content: 'Hello' }],
|
|
||||||
}
|
|
||||||
|
|
||||||
;(ulid as jest.Mock).mockReturnValue('test-id')
|
|
||||||
;(requestInference as jest.Mock).mockReturnValue({
|
|
||||||
subscribe: ({ error }: any) => {
|
|
||||||
error({ message: 'test error', code: 500 })
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await engine.inference(data)
|
|
||||||
|
|
||||||
expect(events.emit).toHaveBeenLastCalledWith(
|
|
||||||
MessageEvent.OnMessageUpdate,
|
|
||||||
expect.objectContaining({
|
|
||||||
status: 'error',
|
|
||||||
error_code: 500,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should stop inference', () => {
|
it('should stop inference', () => {
|
||||||
engine.stopInference()
|
engine.stopInference()
|
||||||
expect(engine.isCancelled).toBe(true)
|
expect(engine.isCancelled).toBe(true)
|
||||||
|
|||||||
@ -1,18 +1,9 @@
|
|||||||
import { requestInference } from './helpers/sse'
|
|
||||||
import { ulid } from 'ulidx'
|
|
||||||
import { AIEngine } from './AIEngine'
|
import { AIEngine } from './AIEngine'
|
||||||
import {
|
import {
|
||||||
ChatCompletionRole,
|
|
||||||
ContentType,
|
|
||||||
InferenceEvent,
|
InferenceEvent,
|
||||||
MessageEvent,
|
MessageEvent,
|
||||||
MessageRequest,
|
MessageRequest,
|
||||||
MessageRequestType,
|
|
||||||
MessageStatus,
|
|
||||||
Model,
|
Model,
|
||||||
ModelInfo,
|
|
||||||
ThreadContent,
|
|
||||||
ThreadMessage,
|
|
||||||
} from '../../../types'
|
} from '../../../types'
|
||||||
import { events } from '../../events'
|
import { events } from '../../events'
|
||||||
|
|
||||||
@ -53,111 +44,6 @@ export abstract class OAIEngine extends AIEngine {
|
|||||||
*/
|
*/
|
||||||
override onUnload(): void {}
|
override onUnload(): void {}
|
||||||
|
|
||||||
/*
|
|
||||||
* Inference request
|
|
||||||
*/
|
|
||||||
override async inference(data: MessageRequest) {
|
|
||||||
if (!data.model?.id) {
|
|
||||||
events.emit(MessageEvent.OnMessageResponse, {
|
|
||||||
status: MessageStatus.Error,
|
|
||||||
content: [
|
|
||||||
{
|
|
||||||
type: ContentType.Text,
|
|
||||||
text: {
|
|
||||||
value: 'No model ID provided',
|
|
||||||
annotations: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const timestamp = Date.now() / 1000
|
|
||||||
const message: ThreadMessage = {
|
|
||||||
id: ulid(),
|
|
||||||
thread_id: data.threadId,
|
|
||||||
type: data.type,
|
|
||||||
assistant_id: data.assistantId,
|
|
||||||
role: ChatCompletionRole.Assistant,
|
|
||||||
content: [],
|
|
||||||
status: MessageStatus.Pending,
|
|
||||||
created_at: timestamp,
|
|
||||||
completed_at: timestamp,
|
|
||||||
object: 'thread.message',
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data.type !== MessageRequestType.Summary) {
|
|
||||||
events.emit(MessageEvent.OnMessageResponse, message)
|
|
||||||
}
|
|
||||||
|
|
||||||
this.isCancelled = false
|
|
||||||
this.controller = new AbortController()
|
|
||||||
|
|
||||||
const model: ModelInfo = {
|
|
||||||
...(this.loadedModel ? this.loadedModel : {}),
|
|
||||||
...data.model,
|
|
||||||
}
|
|
||||||
|
|
||||||
const header = await this.headers()
|
|
||||||
let requestBody = {
|
|
||||||
messages: data.messages ?? [],
|
|
||||||
model: model.id,
|
|
||||||
stream: true,
|
|
||||||
...model.parameters,
|
|
||||||
}
|
|
||||||
if (this.transformPayload) {
|
|
||||||
requestBody = this.transformPayload(requestBody)
|
|
||||||
}
|
|
||||||
|
|
||||||
requestInference(
|
|
||||||
this.inferenceUrl,
|
|
||||||
requestBody,
|
|
||||||
model,
|
|
||||||
this.controller,
|
|
||||||
header,
|
|
||||||
this.transformResponse
|
|
||||||
).subscribe({
|
|
||||||
next: (content: any) => {
|
|
||||||
const messageContent: ThreadContent = {
|
|
||||||
type: ContentType.Text,
|
|
||||||
text: {
|
|
||||||
value: content.trim(),
|
|
||||||
annotations: [],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
message.content = [messageContent]
|
|
||||||
events.emit(MessageEvent.OnMessageUpdate, message)
|
|
||||||
},
|
|
||||||
complete: async () => {
|
|
||||||
message.status = message.content.length
|
|
||||||
? MessageStatus.Ready
|
|
||||||
: MessageStatus.Error
|
|
||||||
events.emit(MessageEvent.OnMessageUpdate, message)
|
|
||||||
},
|
|
||||||
error: async (err: any) => {
|
|
||||||
if (this.isCancelled || message.content.length) {
|
|
||||||
message.status = MessageStatus.Stopped
|
|
||||||
events.emit(MessageEvent.OnMessageUpdate, message)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
message.status = MessageStatus.Error
|
|
||||||
message.content[0] = {
|
|
||||||
type: ContentType.Text,
|
|
||||||
text: {
|
|
||||||
value:
|
|
||||||
typeof message === 'string'
|
|
||||||
? err.message
|
|
||||||
: (JSON.stringify(err.message) ?? err.detail),
|
|
||||||
annotations: [],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
message.error_code = err.code
|
|
||||||
events.emit(MessageEvent.OnMessageUpdate, message)
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stops the inference.
|
* Stops the inference.
|
||||||
*/
|
*/
|
||||||
|
|||||||
@ -1,146 +0,0 @@
|
|||||||
import { lastValueFrom, Observable } from 'rxjs'
|
|
||||||
import { requestInference } from './sse'
|
|
||||||
|
|
||||||
import { ReadableStream } from 'stream/web'
|
|
||||||
describe('requestInference', () => {
|
|
||||||
it('should send a request to the inference server and return an Observable', () => {
|
|
||||||
// Mock the fetch function
|
|
||||||
const mockFetch: any = jest.fn(() =>
|
|
||||||
Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () =>
|
|
||||||
Promise.resolve({
|
|
||||||
choices: [{ message: { content: 'Generated response' } }],
|
|
||||||
}),
|
|
||||||
headers: new Headers(),
|
|
||||||
redirected: false,
|
|
||||||
status: 200,
|
|
||||||
statusText: 'OK',
|
|
||||||
// Add other required properties here
|
|
||||||
})
|
|
||||||
)
|
|
||||||
jest.spyOn(global, 'fetch').mockImplementation(mockFetch)
|
|
||||||
|
|
||||||
// Define the test inputs
|
|
||||||
const inferenceUrl = 'https://inference-server.com'
|
|
||||||
const requestBody = { message: 'Hello' }
|
|
||||||
const model = { id: 'model-id', parameters: { stream: false } }
|
|
||||||
|
|
||||||
// Call the function
|
|
||||||
const result = requestInference(inferenceUrl, requestBody, model)
|
|
||||||
|
|
||||||
// Assert the expected behavior
|
|
||||||
expect(result).toBeInstanceOf(Observable)
|
|
||||||
expect(lastValueFrom(result)).resolves.toEqual('Generated response')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('returns 401 error', () => {
|
|
||||||
// Mock the fetch function
|
|
||||||
const mockFetch: any = jest.fn(() =>
|
|
||||||
Promise.resolve({
|
|
||||||
ok: false,
|
|
||||||
json: () =>
|
|
||||||
Promise.resolve({
|
|
||||||
error: { message: 'Invalid API Key.', code: 'invalid_api_key' },
|
|
||||||
}),
|
|
||||||
headers: new Headers(),
|
|
||||||
redirected: false,
|
|
||||||
status: 401,
|
|
||||||
statusText: 'invalid_api_key',
|
|
||||||
// Add other required properties here
|
|
||||||
})
|
|
||||||
)
|
|
||||||
jest.spyOn(global, 'fetch').mockImplementation(mockFetch)
|
|
||||||
|
|
||||||
// Define the test inputs
|
|
||||||
const inferenceUrl = 'https://inference-server.com'
|
|
||||||
const requestBody = { message: 'Hello' }
|
|
||||||
const model = { id: 'model-id', parameters: { stream: false } }
|
|
||||||
|
|
||||||
// Call the function
|
|
||||||
const result = requestInference(inferenceUrl, requestBody, model)
|
|
||||||
|
|
||||||
// Assert the expected behavior
|
|
||||||
expect(result).toBeInstanceOf(Observable)
|
|
||||||
expect(lastValueFrom(result)).rejects.toEqual({
|
|
||||||
message: 'Invalid API Key.',
|
|
||||||
code: 'invalid_api_key',
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should handle a successful response with a transformResponse function', () => {
|
|
||||||
// Mock the fetch function
|
|
||||||
const mockFetch: any = jest.fn(() =>
|
|
||||||
Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () =>
|
|
||||||
Promise.resolve({
|
|
||||||
choices: [{ message: { content: 'Generated response' } }],
|
|
||||||
}),
|
|
||||||
headers: new Headers(),
|
|
||||||
redirected: false,
|
|
||||||
status: 200,
|
|
||||||
statusText: 'OK',
|
|
||||||
})
|
|
||||||
)
|
|
||||||
jest.spyOn(global, 'fetch').mockImplementation(mockFetch)
|
|
||||||
|
|
||||||
// Define the test inputs
|
|
||||||
const inferenceUrl = 'https://inference-server.com'
|
|
||||||
const requestBody = { message: 'Hello' }
|
|
||||||
const model = { id: 'model-id', parameters: { stream: false } }
|
|
||||||
const transformResponse = (data: any) =>
|
|
||||||
data.choices[0].message.content.toUpperCase()
|
|
||||||
|
|
||||||
// Call the function
|
|
||||||
const result = requestInference(
|
|
||||||
inferenceUrl,
|
|
||||||
requestBody,
|
|
||||||
model,
|
|
||||||
undefined,
|
|
||||||
undefined,
|
|
||||||
transformResponse
|
|
||||||
)
|
|
||||||
|
|
||||||
// Assert the expected behavior
|
|
||||||
expect(result).toBeInstanceOf(Observable)
|
|
||||||
expect(lastValueFrom(result)).resolves.toEqual('GENERATED RESPONSE')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should handle a successful response with streaming enabled', () => {
|
|
||||||
// Mock the fetch function
|
|
||||||
const mockFetch: any = jest.fn(() =>
|
|
||||||
Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
body: new ReadableStream({
|
|
||||||
start(controller) {
|
|
||||||
controller.enqueue(
|
|
||||||
new TextEncoder().encode(
|
|
||||||
'data: {"choices": [{"delta": {"content": "Streamed"}}]}'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
controller.enqueue(new TextEncoder().encode('data: [DONE]'))
|
|
||||||
controller.close()
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
headers: new Headers(),
|
|
||||||
redirected: false,
|
|
||||||
status: 200,
|
|
||||||
statusText: 'OK',
|
|
||||||
})
|
|
||||||
)
|
|
||||||
jest.spyOn(global, 'fetch').mockImplementation(mockFetch)
|
|
||||||
|
|
||||||
// Define the test inputs
|
|
||||||
const inferenceUrl = 'https://inference-server.com'
|
|
||||||
const requestBody = { message: 'Hello' }
|
|
||||||
const model = { id: 'model-id', parameters: { stream: true } }
|
|
||||||
|
|
||||||
// Call the function
|
|
||||||
const result = requestInference(inferenceUrl, requestBody, model)
|
|
||||||
|
|
||||||
// Assert the expected behavior
|
|
||||||
expect(result).toBeInstanceOf(Observable)
|
|
||||||
expect(lastValueFrom(result)).resolves.toEqual('Streamed')
|
|
||||||
})
|
|
||||||
@ -1,132 +0,0 @@
|
|||||||
import { Observable } from 'rxjs'
|
|
||||||
import { ErrorCode, ModelRuntimeParams } from '../../../../types'
|
|
||||||
/**
|
|
||||||
* Sends a request to the inference server to generate a response based on the recent messages.
|
|
||||||
* @param recentMessages - An array of recent messages to use as context for the inference.
|
|
||||||
* @returns An Observable that emits the generated response as a string.
|
|
||||||
*/
|
|
||||||
export function requestInference(
|
|
||||||
inferenceUrl: string,
|
|
||||||
requestBody: any,
|
|
||||||
model: {
|
|
||||||
id: string
|
|
||||||
parameters?: ModelRuntimeParams
|
|
||||||
},
|
|
||||||
controller?: AbortController,
|
|
||||||
headers?: HeadersInit,
|
|
||||||
transformResponse?: Function
|
|
||||||
): Observable<string> {
|
|
||||||
return new Observable((subscriber) => {
|
|
||||||
fetch(inferenceUrl, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'Access-Control-Allow-Origin': '*',
|
|
||||||
'Accept': model.parameters?.stream
|
|
||||||
? 'text/event-stream'
|
|
||||||
: 'application/json',
|
|
||||||
...headers,
|
|
||||||
},
|
|
||||||
body: JSON.stringify(requestBody),
|
|
||||||
signal: controller?.signal,
|
|
||||||
})
|
|
||||||
.then(async (response) => {
|
|
||||||
if (!response.ok) {
|
|
||||||
if (response.status === 401) {
|
|
||||||
throw {
|
|
||||||
code: ErrorCode.InvalidApiKey,
|
|
||||||
message: 'Invalid API Key.',
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let data = await response.json()
|
|
||||||
try {
|
|
||||||
handleError(data)
|
|
||||||
} catch (err) {
|
|
||||||
subscriber.error(err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// There could be overriden stream parameter in the model
|
|
||||||
// that is set in request body (transformed payload)
|
|
||||||
if (
|
|
||||||
requestBody?.stream === false ||
|
|
||||||
model.parameters?.stream === false
|
|
||||||
) {
|
|
||||||
const data = await response.json()
|
|
||||||
try {
|
|
||||||
handleError(data)
|
|
||||||
} catch (err) {
|
|
||||||
subscriber.error(err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (transformResponse) {
|
|
||||||
subscriber.next(transformResponse(data))
|
|
||||||
} else {
|
|
||||||
subscriber.next(
|
|
||||||
data.choices
|
|
||||||
? data.choices[0]?.message?.content
|
|
||||||
: (data.content[0]?.text ?? '')
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const stream = response.body
|
|
||||||
const decoder = new TextDecoder('utf-8')
|
|
||||||
const reader = stream?.getReader()
|
|
||||||
let content = ''
|
|
||||||
|
|
||||||
while (true && reader) {
|
|
||||||
const { done, value } = await reader.read()
|
|
||||||
if (done) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
const text = decoder.decode(value)
|
|
||||||
const lines = text.trim().split('\n')
|
|
||||||
let cachedLines = ''
|
|
||||||
for (const line of lines) {
|
|
||||||
try {
|
|
||||||
if (transformResponse) {
|
|
||||||
content += transformResponse(line)
|
|
||||||
subscriber.next(content ?? '')
|
|
||||||
} else {
|
|
||||||
const toParse = cachedLines + line
|
|
||||||
if (!line.includes('data: [DONE]')) {
|
|
||||||
const data = JSON.parse(toParse.replace('data: ', ''))
|
|
||||||
try {
|
|
||||||
handleError(data)
|
|
||||||
} catch (err) {
|
|
||||||
subscriber.error(err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
content += data.choices[0]?.delta?.content ?? ''
|
|
||||||
if (content.startsWith('assistant: ')) {
|
|
||||||
content = content.replace('assistant: ', '')
|
|
||||||
}
|
|
||||||
if (content !== '') subscriber.next(content)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
cachedLines = line
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
subscriber.complete()
|
|
||||||
})
|
|
||||||
.catch((err) => subscriber.error(err))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle error and normalize it to a common format.
|
|
||||||
* @param data
|
|
||||||
*/
|
|
||||||
const handleError = (data: any) => {
|
|
||||||
if (
|
|
||||||
data.error ||
|
|
||||||
data.message ||
|
|
||||||
data.detail ||
|
|
||||||
(Array.isArray(data) && data.length && data[0].error)
|
|
||||||
) {
|
|
||||||
throw data.error ?? data[0]?.error ?? data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,5 +1,4 @@
|
|||||||
import {
|
import {
|
||||||
InferenceEngine,
|
|
||||||
Engines,
|
Engines,
|
||||||
EngineVariant,
|
EngineVariant,
|
||||||
EngineReleased,
|
EngineReleased,
|
||||||
@ -28,7 +27,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
|
|||||||
* @param name - Inference engine name.
|
* @param name - Inference engine name.
|
||||||
* @returns A Promise that resolves to an array of installed engine.
|
* @returns A Promise that resolves to an array of installed engine.
|
||||||
*/
|
*/
|
||||||
abstract getInstalledEngines(name: InferenceEngine): Promise<EngineVariant[]>
|
abstract getInstalledEngines(name: string): Promise<EngineVariant[]>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param name - Inference engine name.
|
* @param name - Inference engine name.
|
||||||
@ -37,7 +36,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
|
|||||||
* @returns A Promise that resolves to an array of latest released engine by version.
|
* @returns A Promise that resolves to an array of latest released engine by version.
|
||||||
*/
|
*/
|
||||||
abstract getReleasedEnginesByVersion(
|
abstract getReleasedEnginesByVersion(
|
||||||
name: InferenceEngine,
|
name: string,
|
||||||
version: string,
|
version: string,
|
||||||
platform?: string
|
platform?: string
|
||||||
): Promise<EngineReleased[]>
|
): Promise<EngineReleased[]>
|
||||||
@ -48,7 +47,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
|
|||||||
* @returns A Promise that resolves to an array of latest released engine.
|
* @returns A Promise that resolves to an array of latest released engine.
|
||||||
*/
|
*/
|
||||||
abstract getLatestReleasedEngine(
|
abstract getLatestReleasedEngine(
|
||||||
name: InferenceEngine,
|
name: string,
|
||||||
platform?: string
|
platform?: string
|
||||||
): Promise<EngineReleased[]>
|
): Promise<EngineReleased[]>
|
||||||
|
|
||||||
@ -74,7 +73,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
|
|||||||
* @returns A Promise that resolves to unintall of engine.
|
* @returns A Promise that resolves to unintall of engine.
|
||||||
*/
|
*/
|
||||||
abstract uninstallEngine(
|
abstract uninstallEngine(
|
||||||
name: InferenceEngine,
|
name: string,
|
||||||
engineConfig: EngineConfig
|
engineConfig: EngineConfig
|
||||||
): Promise<{ messages: string }>
|
): Promise<{ messages: string }>
|
||||||
|
|
||||||
@ -83,7 +82,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
|
|||||||
* @returns A Promise that resolves to an object of default engine.
|
* @returns A Promise that resolves to an object of default engine.
|
||||||
*/
|
*/
|
||||||
abstract getDefaultEngineVariant(
|
abstract getDefaultEngineVariant(
|
||||||
name: InferenceEngine
|
name: string
|
||||||
): Promise<DefaultEngineVariant>
|
): Promise<DefaultEngineVariant>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -92,7 +91,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
|
|||||||
* @returns A Promise that resolves to set default engine.
|
* @returns A Promise that resolves to set default engine.
|
||||||
*/
|
*/
|
||||||
abstract setDefaultEngineVariant(
|
abstract setDefaultEngineVariant(
|
||||||
name: InferenceEngine,
|
name: string,
|
||||||
engineConfig: EngineConfig
|
engineConfig: EngineConfig
|
||||||
): Promise<{ messages: string }>
|
): Promise<{ messages: string }>
|
||||||
|
|
||||||
@ -100,7 +99,7 @@ export abstract class EngineManagementExtension extends BaseExtension {
|
|||||||
* @returns A Promise that resolves to update engine.
|
* @returns A Promise that resolves to update engine.
|
||||||
*/
|
*/
|
||||||
abstract updateEngine(
|
abstract updateEngine(
|
||||||
name: InferenceEngine,
|
name: string,
|
||||||
engineConfig?: EngineConfig
|
engineConfig?: EngineConfig
|
||||||
): Promise<{ messages: string }>
|
): Promise<{ messages: string }>
|
||||||
|
|
||||||
@ -112,5 +111,5 @@ export abstract class EngineManagementExtension extends BaseExtension {
|
|||||||
/**
|
/**
|
||||||
* @returns A Promise that resolves to an object of remote models list .
|
* @returns A Promise that resolves to an object of remote models list .
|
||||||
*/
|
*/
|
||||||
abstract getRemoteModels(name: InferenceEngine | string): Promise<any>
|
abstract getRemoteModels(name: string): Promise<any>
|
||||||
}
|
}
|
||||||
|
|||||||
@ -19,7 +19,7 @@ export abstract class HardwareManagementExtension extends BaseExtension {
|
|||||||
/**
|
/**
|
||||||
* @returns A Promise that resolves to an object of set active gpus.
|
* @returns A Promise that resolves to an object of set active gpus.
|
||||||
*/
|
*/
|
||||||
abstract setAvtiveGpu(data: { gpus: number[] }): Promise<{
|
abstract setActiveGpu(data: { gpus: number[] }): Promise<{
|
||||||
message: string
|
message: string
|
||||||
activated_gpus: number[]
|
activated_gpus: number[]
|
||||||
}>
|
}>
|
||||||
|
|||||||
@ -36,31 +36,31 @@ describe('fs module', () => {
|
|||||||
it('should call readFileSync with correct arguments', () => {
|
it('should call readFileSync with correct arguments', () => {
|
||||||
const args = ['path/to/file']
|
const args = ['path/to/file']
|
||||||
fs.readFileSync(...args)
|
fs.readFileSync(...args)
|
||||||
expect(globalThis.core.api.readFileSync).toHaveBeenCalledWith(...args)
|
expect(globalThis.core.api.readFileSync).toHaveBeenCalledWith({ args })
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should call existsSync with correct arguments', () => {
|
it('should call existsSync with correct arguments', () => {
|
||||||
const args = ['path/to/file']
|
const args = ['path/to/file']
|
||||||
fs.existsSync(...args)
|
fs.existsSync(...args)
|
||||||
expect(globalThis.core.api.existsSync).toHaveBeenCalledWith(...args)
|
expect(globalThis.core.api.existsSync).toHaveBeenCalledWith({ args })
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should call readdirSync with correct arguments', () => {
|
it('should call readdirSync with correct arguments', () => {
|
||||||
const args = ['path/to/directory']
|
const args = ['path/to/directory']
|
||||||
fs.readdirSync(...args)
|
fs.readdirSync(...args)
|
||||||
expect(globalThis.core.api.readdirSync).toHaveBeenCalledWith(...args)
|
expect(globalThis.core.api.readdirSync).toHaveBeenCalledWith({ args })
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should call mkdir with correct arguments', () => {
|
it('should call mkdir with correct arguments', () => {
|
||||||
const args = ['path/to/directory']
|
const args = ['path/to/directory']
|
||||||
fs.mkdir(...args)
|
fs.mkdir(...args)
|
||||||
expect(globalThis.core.api.mkdir).toHaveBeenCalledWith(...args)
|
expect(globalThis.core.api.mkdir).toHaveBeenCalledWith({ args })
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should call rm with correct arguments', () => {
|
it('should call rm with correct arguments', () => {
|
||||||
const args = ['path/to/directory']
|
const args = ['path/to/directory']
|
||||||
fs.rm(...args)
|
fs.rm(...args)
|
||||||
expect(globalThis.core.api.rm).toHaveBeenCalledWith(...args, { recursive: true, force: true })
|
expect(globalThis.core.api.rm).toHaveBeenCalledWith({ args })
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should call unlinkSync with correct arguments', () => {
|
it('should call unlinkSync with correct arguments', () => {
|
||||||
|
|||||||
@ -4,7 +4,7 @@ import { FileStat } from '../types'
|
|||||||
* Writes data to a file at the specified path.
|
* Writes data to a file at the specified path.
|
||||||
* @returns {Promise<any>} A Promise that resolves when the file is written successfully.
|
* @returns {Promise<any>} A Promise that resolves when the file is written successfully.
|
||||||
*/
|
*/
|
||||||
const writeFileSync = (...args: any[]) => globalThis.core.api?.writeFileSync(...args)
|
const writeFileSync = (...args: any[]) => globalThis.core.api?.writeFileSync({ args })
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Writes blob data to a file at the specified path.
|
* Writes blob data to a file at the specified path.
|
||||||
@ -19,29 +19,29 @@ const writeBlob: (path: string, data: string) => Promise<any> = (path, data) =>
|
|||||||
* Reads the contents of a file at the specified path.
|
* Reads the contents of a file at the specified path.
|
||||||
* @returns {Promise<any>} A Promise that resolves with the contents of the file.
|
* @returns {Promise<any>} A Promise that resolves with the contents of the file.
|
||||||
*/
|
*/
|
||||||
const readFileSync = (...args: any[]) => globalThis.core.api?.readFileSync(...args)
|
const readFileSync = (...args: any[]) => globalThis.core.api?.readFileSync({ args })
|
||||||
/**
|
/**
|
||||||
* Check whether the file exists
|
* Check whether the file exists
|
||||||
* @param {string} path
|
* @param {string} path
|
||||||
* @returns {boolean} A boolean indicating whether the path is a file.
|
* @returns {boolean} A boolean indicating whether the path is a file.
|
||||||
*/
|
*/
|
||||||
const existsSync = (...args: any[]) => globalThis.core.api?.existsSync(...args)
|
const existsSync = (...args: any[]) => globalThis.core.api?.existsSync({ args })
|
||||||
/**
|
/**
|
||||||
* List the directory files
|
* List the directory files
|
||||||
* @returns {Promise<any>} A Promise that resolves with the contents of the directory.
|
* @returns {Promise<any>} A Promise that resolves with the contents of the directory.
|
||||||
*/
|
*/
|
||||||
const readdirSync = (...args: any[]) => globalThis.core.api?.readdirSync(...args)
|
const readdirSync = (...args: any[]) => globalThis.core.api?.readdirSync({ args })
|
||||||
/**
|
/**
|
||||||
* Creates a directory at the specified path.
|
* Creates a directory at the specified path.
|
||||||
* @returns {Promise<any>} A Promise that resolves when the directory is created successfully.
|
* @returns {Promise<any>} A Promise that resolves when the directory is created successfully.
|
||||||
*/
|
*/
|
||||||
const mkdir = (...args: any[]) => globalThis.core.api?.mkdir(...args)
|
const mkdir = (...args: any[]) => globalThis.core.api?.mkdir({ args })
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Removes a directory at the specified path.
|
* Removes a directory at the specified path.
|
||||||
* @returns {Promise<any>} A Promise that resolves when the directory is removed successfully.
|
* @returns {Promise<any>} A Promise that resolves when the directory is removed successfully.
|
||||||
*/
|
*/
|
||||||
const rm = (...args: any[]) => globalThis.core.api?.rm(...args, { recursive: true, force: true })
|
const rm = (...args: any[]) => globalThis.core.api?.rm({ args })
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Deletes a file from the local file system.
|
* Deletes a file from the local file system.
|
||||||
@ -80,10 +80,8 @@ const getGgufFiles: (paths: string[]) => Promise<any> = (paths) =>
|
|||||||
* @param outsideJanDataFolder - Whether the file is outside the Jan data folder.
|
* @param outsideJanDataFolder - Whether the file is outside the Jan data folder.
|
||||||
* @returns {Promise<FileStat>} - A promise that resolves with the file's stats.
|
* @returns {Promise<FileStat>} - A promise that resolves with the file's stats.
|
||||||
*/
|
*/
|
||||||
const fileStat: (path: string, outsideJanDataFolder?: boolean) => Promise<FileStat | undefined> = (
|
const fileStat: (path: string) => Promise<FileStat | undefined> = (path) =>
|
||||||
path,
|
globalThis.core.api?.fileStat({ args: path })
|
||||||
outsideJanDataFolder
|
|
||||||
) => globalThis.core.api?.fileStat(path, outsideJanDataFolder)
|
|
||||||
|
|
||||||
// TODO: Export `dummy` fs functions automatically
|
// TODO: Export `dummy` fs functions automatically
|
||||||
// Currently adding these manually
|
// Currently adding these manually
|
||||||
|
|||||||
@ -3,7 +3,6 @@ import * as Events from './events'
|
|||||||
import * as FileSystem from './fs'
|
import * as FileSystem from './fs'
|
||||||
import * as Extension from './extension'
|
import * as Extension from './extension'
|
||||||
import * as Extensions from './extensions'
|
import * as Extensions from './extensions'
|
||||||
import * as Tools from './tools'
|
|
||||||
import * as Models from './models'
|
import * as Models from './models'
|
||||||
|
|
||||||
describe('Module Tests', () => {
|
describe('Module Tests', () => {
|
||||||
@ -27,10 +26,6 @@ describe('Module Tests', () => {
|
|||||||
expect(Extensions).toBeDefined()
|
expect(Extensions).toBeDefined()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should export all base tools', () => {
|
|
||||||
expect(Tools).toBeDefined()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should export all base tools', () => {
|
it('should export all base tools', () => {
|
||||||
expect(Models).toBeDefined()
|
expect(Models).toBeDefined()
|
||||||
})
|
})
|
||||||
|
|||||||
@ -28,12 +28,6 @@ export * from './extension'
|
|||||||
*/
|
*/
|
||||||
export * from './extensions'
|
export * from './extensions'
|
||||||
|
|
||||||
/**
|
|
||||||
* Export all base tools.
|
|
||||||
* @module
|
|
||||||
*/
|
|
||||||
export * from './tools'
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Export all base models.
|
* Export all base models.
|
||||||
* @module
|
* @module
|
||||||
|
|||||||
@ -38,10 +38,13 @@ export class ModelManager {
|
|||||||
return this.models.get(id) as T | undefined
|
return this.models.get(id) as T | undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The instance of the tool manager.
|
* Shared instance of ExtensionManager.
|
||||||
*/
|
*/
|
||||||
static instance(): ModelManager {
|
static instance() {
|
||||||
return (window.core?.modelManager as ModelManager) ?? new ModelManager()
|
if (!window.core.modelManager)
|
||||||
|
window.core.modelManager = new ModelManager()
|
||||||
|
return window.core.modelManager as ModelManager
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,5 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
it('should not throw any errors when imported', () => {
|
|
||||||
expect(() => require('./index')).not.toThrow();
|
|
||||||
})
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
export * from './manager'
|
|
||||||
export * from './tool'
|
|
||||||
@ -1,47 +0,0 @@
|
|||||||
import { AssistantTool, MessageRequest } from '../../types'
|
|
||||||
import { InferenceTool } from './tool'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Manages the registration and retrieval of inference tools.
|
|
||||||
*/
|
|
||||||
export class ToolManager {
|
|
||||||
public tools = new Map<string, InferenceTool>()
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Registers a tool.
|
|
||||||
* @param tool - The tool to register.
|
|
||||||
*/
|
|
||||||
register<T extends InferenceTool>(tool: T) {
|
|
||||||
this.tools.set(tool.name, tool)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Retrieves a tool by it's name.
|
|
||||||
* @param name - The name of the tool to retrieve.
|
|
||||||
* @returns The tool, if found.
|
|
||||||
*/
|
|
||||||
get<T extends InferenceTool>(name: string): T | undefined {
|
|
||||||
return this.tools.get(name) as T | undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
** Process the message request with the tools.
|
|
||||||
*/
|
|
||||||
process(request: MessageRequest, tools: AssistantTool[]): Promise<MessageRequest> {
|
|
||||||
return tools.reduce((prevPromise, currentTool) => {
|
|
||||||
return prevPromise.then((prevResult) => {
|
|
||||||
return currentTool.enabled
|
|
||||||
? this.get(currentTool.type)?.process(prevResult, currentTool) ??
|
|
||||||
Promise.resolve(prevResult)
|
|
||||||
: Promise.resolve(prevResult)
|
|
||||||
})
|
|
||||||
}, Promise.resolve(request))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The instance of the tool manager.
|
|
||||||
*/
|
|
||||||
static instance(): ToolManager {
|
|
||||||
return (window.core?.toolManager as ToolManager) ?? new ToolManager()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,63 +0,0 @@
|
|||||||
import { ToolManager } from '../../browser/tools/manager'
|
|
||||||
import { InferenceTool } from '../../browser/tools/tool'
|
|
||||||
import { AssistantTool, MessageRequest } from '../../types'
|
|
||||||
|
|
||||||
class MockInferenceTool implements InferenceTool {
|
|
||||||
name = 'mockTool'
|
|
||||||
process(request: MessageRequest, tool: AssistantTool): Promise<MessageRequest> {
|
|
||||||
return Promise.resolve(request)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
it('should register a tool', () => {
|
|
||||||
const manager = new ToolManager()
|
|
||||||
const tool = new MockInferenceTool()
|
|
||||||
manager.register(tool)
|
|
||||||
expect(manager.get(tool.name)).toBe(tool)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should retrieve a tool by its name', () => {
|
|
||||||
const manager = new ToolManager()
|
|
||||||
const tool = new MockInferenceTool()
|
|
||||||
manager.register(tool)
|
|
||||||
const retrievedTool = manager.get(tool.name)
|
|
||||||
expect(retrievedTool).toBe(tool)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should return undefined for a non-existent tool', () => {
|
|
||||||
const manager = new ToolManager()
|
|
||||||
const retrievedTool = manager.get('nonExistentTool')
|
|
||||||
expect(retrievedTool).toBeUndefined()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should process the message request with enabled tools', async () => {
|
|
||||||
const manager = new ToolManager()
|
|
||||||
const tool = new MockInferenceTool()
|
|
||||||
manager.register(tool)
|
|
||||||
|
|
||||||
const request: MessageRequest = { message: 'test' } as any
|
|
||||||
const tools: AssistantTool[] = [{ type: 'mockTool', enabled: true }] as any
|
|
||||||
|
|
||||||
const result = await manager.process(request, tools)
|
|
||||||
expect(result).toBe(request)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should skip processing for disabled tools', async () => {
|
|
||||||
const manager = new ToolManager()
|
|
||||||
const tool = new MockInferenceTool()
|
|
||||||
manager.register(tool)
|
|
||||||
|
|
||||||
const request: MessageRequest = { message: 'test' } as any
|
|
||||||
const tools: AssistantTool[] = [{ type: 'mockTool', enabled: false }] as any
|
|
||||||
|
|
||||||
const result = await manager.process(request, tools)
|
|
||||||
expect(result).toBe(request)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should throw an error when process is called without implementation', () => {
|
|
||||||
class TestTool extends InferenceTool {
|
|
||||||
name = 'testTool'
|
|
||||||
}
|
|
||||||
const tool = new TestTool()
|
|
||||||
expect(() => tool.process({} as MessageRequest)).toThrowError()
|
|
||||||
})
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
import { AssistantTool, MessageRequest } from '../../types'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Represents a base inference tool.
|
|
||||||
*/
|
|
||||||
export abstract class InferenceTool {
|
|
||||||
abstract name: string
|
|
||||||
/*
|
|
||||||
** Process a message request and return the processed message request.
|
|
||||||
*/
|
|
||||||
abstract process(request: MessageRequest, tool?: AssistantTool): Promise<MessageRequest>
|
|
||||||
}
|
|
||||||
@ -8,6 +8,7 @@ import {
|
|||||||
normalizeFilePath,
|
normalizeFilePath,
|
||||||
getJanDataFolderPath,
|
getJanDataFolderPath,
|
||||||
} from '../../helper'
|
} from '../../helper'
|
||||||
|
import { readdirSync, readFileSync } from 'fs'
|
||||||
|
|
||||||
export class App implements Processor {
|
export class App implements Processor {
|
||||||
observer?: Function
|
observer?: Function
|
||||||
@ -25,8 +26,8 @@ export class App implements Processor {
|
|||||||
/**
|
/**
|
||||||
* Joins multiple paths together, respect to the current OS.
|
* Joins multiple paths together, respect to the current OS.
|
||||||
*/
|
*/
|
||||||
joinPath(args: any[]) {
|
joinPath(args: any) {
|
||||||
return join(...args)
|
return join(...('args' in args ? args.args : args))
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -69,6 +70,9 @@ export class App implements Processor {
|
|||||||
writeLog(args)
|
writeLog(args)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get app configurations.
|
||||||
|
*/
|
||||||
getAppConfigurations() {
|
getAppConfigurations() {
|
||||||
return appConfiguration()
|
return appConfiguration()
|
||||||
}
|
}
|
||||||
|
|||||||
@ -21,18 +21,21 @@ export class FileSystem implements Processor {
|
|||||||
return import(FileSystem.moduleName).then((mdl) =>
|
return import(FileSystem.moduleName).then((mdl) =>
|
||||||
mdl[route](
|
mdl[route](
|
||||||
...args.map((arg: any, index: number) => {
|
...args.map((arg: any, index: number) => {
|
||||||
if(index !== 0) {
|
const arg0 = args[0]
|
||||||
|
if ('args' in arg0) arg = arg0.args
|
||||||
|
if (Array.isArray(arg)) arg = arg[0]
|
||||||
|
if (index !== 0) {
|
||||||
return arg
|
return arg
|
||||||
}
|
}
|
||||||
if (index === 0 && typeof arg !== 'string') {
|
if (index === 0 && typeof arg !== 'string') {
|
||||||
throw new Error(`Invalid argument ${JSON.stringify(args)}`)
|
throw new Error(`Invalid argument ${JSON.stringify(args)}`)
|
||||||
}
|
}
|
||||||
const path =
|
const path =
|
||||||
(arg.startsWith(`file:/`) || arg.startsWith(`file:\\`))
|
arg.startsWith(`file:/`) || arg.startsWith(`file:\\`)
|
||||||
? join(getJanDataFolderPath(), normalizeFilePath(arg))
|
? join(getJanDataFolderPath(), normalizeFilePath(arg))
|
||||||
: arg
|
: arg
|
||||||
|
|
||||||
if(path.startsWith(`http://`) || path.startsWith(`https://`)) {
|
if (path.startsWith(`http://`) || path.startsWith(`https://`)) {
|
||||||
return path
|
return path
|
||||||
}
|
}
|
||||||
const absolutePath = resolve(path)
|
const absolutePath = resolve(path)
|
||||||
@ -88,5 +91,4 @@ export class FileSystem implements Processor {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -94,8 +94,6 @@ export default class Extension {
|
|||||||
`Package ${this.origin} does not contain a valid manifest: ${error}`
|
`Package ${this.origin} does not contain a valid manifest: ${error}`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@ -18,9 +18,7 @@ export const getAppConfigurations = (): AppConfiguration => {
|
|||||||
|
|
||||||
if (!fs.existsSync(configurationFile)) {
|
if (!fs.existsSync(configurationFile)) {
|
||||||
// create default app config if we don't have one
|
// create default app config if we don't have one
|
||||||
console.debug(
|
console.debug(`App config not found, creating default config at ${configurationFile}`)
|
||||||
`App config not found, creating default config at ${configurationFile}`
|
|
||||||
)
|
|
||||||
fs.writeFileSync(configurationFile, JSON.stringify(appDefaultConfiguration))
|
fs.writeFileSync(configurationFile, JSON.stringify(appDefaultConfiguration))
|
||||||
return appDefaultConfiguration
|
return appDefaultConfiguration
|
||||||
}
|
}
|
||||||
@ -31,28 +29,23 @@ export const getAppConfigurations = (): AppConfiguration => {
|
|||||||
)
|
)
|
||||||
return appConfigurations
|
return appConfigurations
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(
|
console.error(`Failed to read app config, return default config instead! Err: ${err}`)
|
||||||
`Failed to read app config, return default config instead! Err: ${err}`
|
|
||||||
)
|
|
||||||
return defaultAppConfig()
|
return defaultAppConfig()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const getConfigurationFilePath = () =>
|
const getConfigurationFilePath = () =>
|
||||||
join(
|
join(
|
||||||
global.core?.appPath() ||
|
global.core?.appPath() || process.env[process.platform == 'win32' ? 'USERPROFILE' : 'HOME'],
|
||||||
process.env[process.platform == 'win32' ? 'USERPROFILE' : 'HOME'],
|
|
||||||
configurationFileName
|
configurationFileName
|
||||||
)
|
)
|
||||||
|
|
||||||
export const updateAppConfiguration = (
|
export const updateAppConfiguration = ({
|
||||||
|
configuration,
|
||||||
|
}: {
|
||||||
configuration: AppConfiguration
|
configuration: AppConfiguration
|
||||||
): Promise<void> => {
|
}): Promise<void> => {
|
||||||
const configurationFile = getConfigurationFilePath()
|
const configurationFile = getConfigurationFilePath()
|
||||||
console.debug(
|
|
||||||
'updateAppConfiguration, configurationFile: ',
|
|
||||||
configurationFile
|
|
||||||
)
|
|
||||||
|
|
||||||
fs.writeFileSync(configurationFile, JSON.stringify(configuration))
|
fs.writeFileSync(configurationFile, JSON.stringify(configuration))
|
||||||
return Promise.resolve()
|
return Promise.resolve()
|
||||||
@ -87,14 +80,11 @@ export const getJanExtensionsPath = (): string => {
|
|||||||
*/
|
*/
|
||||||
export const defaultAppConfig = (): AppConfiguration => {
|
export const defaultAppConfig = (): AppConfiguration => {
|
||||||
const { app } = require('electron')
|
const { app } = require('electron')
|
||||||
const defaultJanDataFolder = join(
|
const defaultJanDataFolder = join(app?.getPath('userData') ?? os?.homedir() ?? '', 'data')
|
||||||
app?.getPath('userData') ?? os?.homedir() ?? '',
|
|
||||||
'data'
|
|
||||||
)
|
|
||||||
return {
|
return {
|
||||||
data_folder:
|
data_folder:
|
||||||
process.env.CI === 'e2e'
|
process.env.CI === 'e2e'
|
||||||
? (process.env.APP_CONFIG_PATH ?? resolve('./test-data'))
|
? process.env.APP_CONFIG_PATH ?? resolve('./test-data')
|
||||||
: defaultJanDataFolder,
|
: defaultJanDataFolder,
|
||||||
quick_ask: false,
|
quick_ask: false,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -148,10 +148,7 @@ export const CoreRoutes = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
export const APIRoutes = [...CoreRoutes, ...Object.values(NativeRoute)]
|
export const APIRoutes = [...CoreRoutes, ...Object.values(NativeRoute)]
|
||||||
export const APIEvents = [
|
export const APIEvents = [...Object.values(AppEvent), ...Object.values(DownloadEvent)]
|
||||||
...Object.values(AppEvent),
|
|
||||||
...Object.values(DownloadEvent),
|
|
||||||
]
|
|
||||||
export type PayloadType = {
|
export type PayloadType = {
|
||||||
messages: ChatCompletionMessage[]
|
messages: ChatCompletionMessage[]
|
||||||
model: string
|
model: string
|
||||||
|
|||||||
@ -1,7 +1,5 @@
|
|||||||
import { InferenceEngine } from '../../types'
|
|
||||||
|
|
||||||
export type Engines = {
|
export type Engines = {
|
||||||
[key in InferenceEngine]: (EngineVariant & EngineConfig)[]
|
[key: string]: (EngineVariant & EngineConfig)[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export type EngineMetadata = {
|
export type EngineMetadata = {
|
||||||
@ -22,13 +20,13 @@ export type EngineMetadata = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export type EngineVariant = {
|
export type EngineVariant = {
|
||||||
engine: InferenceEngine
|
engine: string
|
||||||
name: string
|
name: string
|
||||||
version: string
|
version: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export type DefaultEngineVariant = {
|
export type DefaultEngineVariant = {
|
||||||
engine: InferenceEngine
|
engine: string
|
||||||
variant: string
|
variant: string
|
||||||
version: string
|
version: string
|
||||||
}
|
}
|
||||||
|
|||||||
@ -7,6 +7,7 @@ export enum ChatCompletionRole {
|
|||||||
System = 'system',
|
System = 'system',
|
||||||
Assistant = 'assistant',
|
Assistant = 'assistant',
|
||||||
User = 'user',
|
User = 'user',
|
||||||
|
Tool = 'tool',
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -18,6 +19,9 @@ export type ChatCompletionMessage = {
|
|||||||
content?: ChatCompletionMessageContent
|
content?: ChatCompletionMessageContent
|
||||||
/** The role of the author of this message. **/
|
/** The role of the author of this message. **/
|
||||||
role: ChatCompletionRole
|
role: ChatCompletionRole
|
||||||
|
type?: string
|
||||||
|
output?: string
|
||||||
|
tool_call_id?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ChatCompletionMessageContent =
|
export type ChatCompletionMessageContent =
|
||||||
|
|||||||
@ -36,6 +36,8 @@ export type ThreadMessage = {
|
|||||||
type?: string
|
type?: string
|
||||||
/** The error code which explain what error type. Used in conjunction with MessageStatus.Error */
|
/** The error code which explain what error type. Used in conjunction with MessageStatus.Error */
|
||||||
error_code?: ErrorCode
|
error_code?: ErrorCode
|
||||||
|
|
||||||
|
tool_call_id?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -43,6 +45,9 @@ export type ThreadMessage = {
|
|||||||
* @data_transfer_object
|
* @data_transfer_object
|
||||||
*/
|
*/
|
||||||
export type MessageRequest = {
|
export type MessageRequest = {
|
||||||
|
/**
|
||||||
|
* The id of the message request.
|
||||||
|
*/
|
||||||
id?: string
|
id?: string
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -71,6 +76,11 @@ export type MessageRequest = {
|
|||||||
// TODO: deprecate threadId field
|
// TODO: deprecate threadId field
|
||||||
thread?: Thread
|
thread?: Thread
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ChatCompletion tools
|
||||||
|
*/
|
||||||
|
tools?: MessageTool[]
|
||||||
|
|
||||||
/** Engine name to process */
|
/** Engine name to process */
|
||||||
engine?: string
|
engine?: string
|
||||||
|
|
||||||
@ -78,6 +88,24 @@ export type MessageRequest = {
|
|||||||
type?: string
|
type?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ChatCompletion Tool parameters
|
||||||
|
*/
|
||||||
|
export type MessageTool = {
|
||||||
|
type: string
|
||||||
|
function: MessageFunction
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ChatCompletion Tool's function parameters
|
||||||
|
*/
|
||||||
|
export type MessageFunction = {
|
||||||
|
name: string
|
||||||
|
description?: string
|
||||||
|
parameters?: Record<string, unknown>
|
||||||
|
strict?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The status of the message.
|
* The status of the message.
|
||||||
* @data_transfer_object
|
* @data_transfer_object
|
||||||
|
|||||||
@ -6,29 +6,7 @@ export type ModelInfo = {
|
|||||||
id: string
|
id: string
|
||||||
settings?: ModelSettingParams
|
settings?: ModelSettingParams
|
||||||
parameters?: ModelRuntimeParams
|
parameters?: ModelRuntimeParams
|
||||||
engine?: InferenceEngine
|
engine?: string
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Represents the inference engine.
|
|
||||||
* @stored
|
|
||||||
*/
|
|
||||||
export enum InferenceEngine {
|
|
||||||
anthropic = 'anthropic',
|
|
||||||
mistral = 'mistral',
|
|
||||||
martian = 'martian',
|
|
||||||
openrouter = 'openrouter',
|
|
||||||
nitro = 'nitro',
|
|
||||||
openai = 'openai',
|
|
||||||
groq = 'groq',
|
|
||||||
triton_trtllm = 'triton_trtllm',
|
|
||||||
nitro_tensorrt_llm = 'nitro-tensorrt-llm',
|
|
||||||
cohere = 'cohere',
|
|
||||||
nvidia = 'nvidia',
|
|
||||||
cortex = 'cortex',
|
|
||||||
cortex_llamacpp = 'llama-cpp',
|
|
||||||
cortex_onnx = 'onnxruntime',
|
|
||||||
cortex_tensorrtllm = 'tensorrt-llm',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Represents an artifact of a model, including its filename and URL
|
// Represents an artifact of a model, including its filename and URL
|
||||||
@ -105,7 +83,7 @@ export type Model = {
|
|||||||
/**
|
/**
|
||||||
* The model engine.
|
* The model engine.
|
||||||
*/
|
*/
|
||||||
engine: InferenceEngine
|
engine: string
|
||||||
}
|
}
|
||||||
|
|
||||||
// Represents metadata associated with a model
|
// Represents metadata associated with a model
|
||||||
|
|||||||
@ -27,8 +27,8 @@ export type Thread = {
|
|||||||
* @stored
|
* @stored
|
||||||
*/
|
*/
|
||||||
export type ThreadAssistantInfo = {
|
export type ThreadAssistantInfo = {
|
||||||
assistant_id: string
|
id: string
|
||||||
assistant_name: string
|
name: string
|
||||||
model: ModelInfo
|
model: ModelInfo
|
||||||
instructions?: string
|
instructions?: string
|
||||||
tools?: AssistantTool[]
|
tools?: AssistantTool[]
|
||||||
|
|||||||
@ -1,46 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
root: true,
|
|
||||||
parser: '@typescript-eslint/parser',
|
|
||||||
plugins: ['@typescript-eslint'],
|
|
||||||
env: {
|
|
||||||
node: true,
|
|
||||||
},
|
|
||||||
extends: [
|
|
||||||
'eslint:recommended',
|
|
||||||
'plugin:@typescript-eslint/recommended',
|
|
||||||
'plugin:react/recommended',
|
|
||||||
],
|
|
||||||
rules: {
|
|
||||||
'react/prop-types': 'off', // In favor of strong typing - no need to dedupe
|
|
||||||
'react/no-is-mounted': 'off',
|
|
||||||
'@typescript-eslint/no-non-null-assertion': 'off',
|
|
||||||
'@typescript-eslint/no-var-requires': 'off',
|
|
||||||
'@typescript-eslint/ban-ts-comment': 'off',
|
|
||||||
'@typescript-eslint/no-unused-vars': 'off',
|
|
||||||
'@typescript-eslint/no-explicit-any': 'off',
|
|
||||||
},
|
|
||||||
settings: {
|
|
||||||
react: {
|
|
||||||
createClass: 'createReactClass', // Regex for Component Factory to use,
|
|
||||||
// default to "createReactClass"
|
|
||||||
pragma: 'React', // Pragma to use, default to "React"
|
|
||||||
version: 'detect', // React version. "detect" automatically picks the version you have installed.
|
|
||||||
// You can also use `16.0`, `16.3`, etc, if you want to override the detected value.
|
|
||||||
// default to latest and warns if missing
|
|
||||||
// It will default to "detect" in the future
|
|
||||||
},
|
|
||||||
linkComponents: [
|
|
||||||
// Components used as alternatives to <a> for linking, eg. <Link to={ url } />
|
|
||||||
'Hyperlink',
|
|
||||||
{ name: 'Link', linkAttribute: 'to' },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
ignorePatterns: [
|
|
||||||
'build',
|
|
||||||
'renderer',
|
|
||||||
'node_modules',
|
|
||||||
'@global',
|
|
||||||
'playwright-report',
|
|
||||||
'test-data',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
@ -1,10 +0,0 @@
|
|||||||
export {}
|
|
||||||
|
|
||||||
declare global {
|
|
||||||
namespace NodeJS {
|
|
||||||
interface Global {
|
|
||||||
core: any
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var core: any | undefined
|
|
||||||
}
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
|
||||||
<plist version="1.0">
|
|
||||||
<dict>
|
|
||||||
<key>com.apple.security.cs.allow-jit</key>
|
|
||||||
<true/>
|
|
||||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
|
||||||
<true/>
|
|
||||||
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
|
|
||||||
<true/>
|
|
||||||
<key>com.apple.security.cs.disable-library-validation</key>
|
|
||||||
<true/>
|
|
||||||
</dict>
|
|
||||||
</plist>
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
import { Handler, RequestHandler } from '@janhq/core/node'
|
|
||||||
import { ipcMain } from 'electron'
|
|
||||||
import { windowManager } from '../managers/window'
|
|
||||||
|
|
||||||
export function injectHandler() {
|
|
||||||
const ipcWrapper: Handler = (
|
|
||||||
route: string,
|
|
||||||
listener: (...args: any[]) => any
|
|
||||||
) =>
|
|
||||||
ipcMain.handle(route, async (_event, ...args: any[]) => {
|
|
||||||
return listener(...args)
|
|
||||||
})
|
|
||||||
|
|
||||||
const handler = new RequestHandler(
|
|
||||||
ipcWrapper,
|
|
||||||
(channel: string, args: any) =>
|
|
||||||
windowManager.mainWindow?.webContents.send(channel, args)
|
|
||||||
)
|
|
||||||
handler.handle()
|
|
||||||
}
|
|
||||||
@ -1,327 +0,0 @@
|
|||||||
import { app, ipcMain, dialog, shell, nativeTheme } from 'electron'
|
|
||||||
import { autoUpdater } from 'electron-updater'
|
|
||||||
import { join } from 'path'
|
|
||||||
import { windowManager } from '../managers/window'
|
|
||||||
import {
|
|
||||||
ModuleManager,
|
|
||||||
getJanDataFolderPath,
|
|
||||||
getJanExtensionsPath,
|
|
||||||
init,
|
|
||||||
AppEvent,
|
|
||||||
NativeRoute,
|
|
||||||
SelectFileProp,
|
|
||||||
} from '@janhq/core/node'
|
|
||||||
import { SelectFileOption } from '@janhq/core'
|
|
||||||
import { menu } from '../utils/menu'
|
|
||||||
import { migrate } from '../utils/migration'
|
|
||||||
import { createUserSpace } from '../utils/path'
|
|
||||||
import { setupExtensions } from '../utils/extension'
|
|
||||||
|
|
||||||
const isMac = process.platform === 'darwin'
|
|
||||||
|
|
||||||
export function handleAppIPCs() {
|
|
||||||
/**
|
|
||||||
* Handles the "openAppDirectory" IPC message by opening the app's user data directory.
|
|
||||||
* The `shell.openPath` method is used to open the directory in the user's default file explorer.
|
|
||||||
* @param _event - The IPC event object.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.openAppDirectory, async (_event) => {
|
|
||||||
shell.openPath(getJanDataFolderPath())
|
|
||||||
})
|
|
||||||
|
|
||||||
ipcMain.handle(NativeRoute.appUpdateDownload, async (_event) => {
|
|
||||||
autoUpdater.downloadUpdate()
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "setNativeThemeLight" IPC message by setting the native theme source to "light".
|
|
||||||
* This will change the appearance of the app to the light theme.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.setNativeThemeLight, () => {
|
|
||||||
nativeTheme.themeSource = 'light'
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "setCloseApp" IPC message by closing the main application window.
|
|
||||||
* This effectively closes the application if no other windows are open.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.setCloseApp, () => {
|
|
||||||
windowManager.mainWindow?.close()
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "setMinimizeApp" IPC message by minimizing the main application window.
|
|
||||||
* The window will be minimized to the system's taskbar or dock.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.setMinimizeApp, () => {
|
|
||||||
windowManager.mainWindow?.minimize()
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "setMaximizeApp" IPC message. It toggles the maximization state of the main window.
|
|
||||||
* If the window is currently maximized, it will be un-maximized (restored to its previous size).
|
|
||||||
* If the window is not maximized, it will be maximized to fill the screen.
|
|
||||||
* @param _event - The IPC event object.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.setMaximizeApp, async (_event) => {
|
|
||||||
if (windowManager.mainWindow?.isMaximized()) {
|
|
||||||
windowManager.mainWindow.unmaximize()
|
|
||||||
} else {
|
|
||||||
windowManager.mainWindow?.maximize()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "setNativeThemeDark" IPC message by setting the native theme source to "dark".
|
|
||||||
* This will change the appearance of the app to the dark theme.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.setNativeThemeDark, () => {
|
|
||||||
nativeTheme.themeSource = 'dark'
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Opens a URL in the user's default browser.
|
|
||||||
* @param _event - The IPC event object.
|
|
||||||
* @param url - The URL to open.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.openExternalUrl, async (_event, url) => {
|
|
||||||
shell.openExternal(url)
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Opens a URL in the user's default browser.
|
|
||||||
* @param _event - The IPC event object.
|
|
||||||
* @param url - The URL to open.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.openFileExplore, async (_event, url) => {
|
|
||||||
shell.openPath(url)
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Relaunches the app in production - reload window in development.
|
|
||||||
* @param _event - The IPC event object.
|
|
||||||
* @param url - The URL to reload.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.relaunch, async (_event) => {
|
|
||||||
ModuleManager.instance.clearImportedModules()
|
|
||||||
|
|
||||||
if (app.isPackaged) {
|
|
||||||
app.relaunch()
|
|
||||||
app.exit()
|
|
||||||
} else {
|
|
||||||
for (const modulePath in ModuleManager.instance.requiredModules) {
|
|
||||||
delete require.cache[
|
|
||||||
require.resolve(join(getJanExtensionsPath(), modulePath))
|
|
||||||
]
|
|
||||||
}
|
|
||||||
init({
|
|
||||||
// Function to check from the main process that user wants to install a extension
|
|
||||||
confirmInstall: async (_extensions: string[]) => {
|
|
||||||
return true
|
|
||||||
},
|
|
||||||
// Path to install extension to
|
|
||||||
extensionsPath: getJanExtensionsPath(),
|
|
||||||
})
|
|
||||||
windowManager.mainWindow?.reload()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "selectDirectory" IPC message to open a dialog for selecting a directory.
|
|
||||||
* If no main window is found, logs an error and exits.
|
|
||||||
* @returns {string} The path of the selected directory, or nothing if canceled.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.selectDirectory, async () => {
|
|
||||||
const mainWindow = windowManager.mainWindow
|
|
||||||
if (!mainWindow) {
|
|
||||||
console.error('No main window found')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const { canceled, filePaths } = await dialog.showOpenDialog(mainWindow, {
|
|
||||||
title: 'Select a folder',
|
|
||||||
buttonLabel: 'Select Folder',
|
|
||||||
properties: ['openDirectory', 'createDirectory'],
|
|
||||||
})
|
|
||||||
if (canceled) {
|
|
||||||
return
|
|
||||||
} else {
|
|
||||||
return filePaths[0]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "selectFiles" IPC message to open a dialog for selecting files.
|
|
||||||
* Allows options for setting the dialog title, button label, and selection properties.
|
|
||||||
* Logs an error if no main window is found.
|
|
||||||
* @param _event - The IPC event object.
|
|
||||||
* @param option - Options for customizing file selection dialog.
|
|
||||||
* @returns {string[]} An array of selected file paths, or nothing if canceled.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(
|
|
||||||
NativeRoute.selectFiles,
|
|
||||||
async (_event, option?: SelectFileOption) => {
|
|
||||||
const mainWindow = windowManager.mainWindow
|
|
||||||
if (!mainWindow) {
|
|
||||||
console.error('No main window found')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const title = option?.title ?? 'Select files'
|
|
||||||
const buttonLabel = option?.buttonLabel ?? 'Select'
|
|
||||||
const props: SelectFileProp[] = ['openFile']
|
|
||||||
|
|
||||||
if (option?.allowMultiple) {
|
|
||||||
props.push('multiSelections')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (option?.selectDirectory) {
|
|
||||||
props.push('openDirectory')
|
|
||||||
}
|
|
||||||
console.debug(`Select files with props: ${props}`)
|
|
||||||
const { canceled, filePaths } = await dialog.showOpenDialog(mainWindow, {
|
|
||||||
title,
|
|
||||||
buttonLabel,
|
|
||||||
properties: props,
|
|
||||||
filters: option?.filters,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (canceled) return
|
|
||||||
|
|
||||||
return filePaths
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "hideQuickAskWindow" IPC message to hide the quick ask window.
|
|
||||||
* @returns A promise that resolves when the window is hidden.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(
|
|
||||||
NativeRoute.hideQuickAskWindow,
|
|
||||||
async (): Promise<void> => windowManager.hideQuickAskWindow()
|
|
||||||
)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "sendQuickAskInput" IPC message to send user input to the main window.
|
|
||||||
* @param _event - The IPC event object.
|
|
||||||
* @param input - User input string to be sent.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(
|
|
||||||
NativeRoute.sendQuickAskInput,
|
|
||||||
async (_event, input: string): Promise<void> => {
|
|
||||||
windowManager.mainWindow?.webContents.send(
|
|
||||||
AppEvent.onUserSubmitQuickAsk,
|
|
||||||
input
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "showOpenMenu" IPC message to show the context menu at given coordinates.
|
|
||||||
* Only applicable on non-Mac platforms.
|
|
||||||
* @param e - The event object.
|
|
||||||
* @param args - Contains coordinates where the menu should appear.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.showOpenMenu, function (e, args) {
|
|
||||||
if (!isMac && windowManager.mainWindow) {
|
|
||||||
menu.popup({
|
|
||||||
window: windowManager.mainWindow,
|
|
||||||
x: args.x,
|
|
||||||
y: args.y,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "hideMainWindow" IPC message to hide the main application window.
|
|
||||||
* @returns A promise that resolves when the window is hidden.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(
|
|
||||||
NativeRoute.hideMainWindow,
|
|
||||||
async (): Promise<void> => windowManager.hideMainWindow()
|
|
||||||
)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "showMainWindow" IPC message to show the main application window.
|
|
||||||
* @returns A promise that resolves when the window is shown.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(
|
|
||||||
NativeRoute.showMainWindow,
|
|
||||||
async (): Promise<void> => windowManager.showMainWindow()
|
|
||||||
)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "quickAskSizeUpdated" IPC message to update the size of the quick ask window.
|
|
||||||
* Resizes window by the given height offset.
|
|
||||||
* @param _event - The IPC event object.
|
|
||||||
* @param heightOffset - The amount of height to increase.
|
|
||||||
* @returns A promise that resolves when the window is resized.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(
|
|
||||||
NativeRoute.quickAskSizeUpdated,
|
|
||||||
async (_event, heightOffset: number): Promise<void> =>
|
|
||||||
windowManager.expandQuickAskWindow(heightOffset)
|
|
||||||
)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "ackDeepLink" IPC message to acknowledge a deep link.
|
|
||||||
* Triggers handling of deep link in the application.
|
|
||||||
* @param _event - The IPC event object.
|
|
||||||
* @returns A promise that resolves when the deep link is acknowledged.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.ackDeepLink, async (_event): Promise<void> => {
|
|
||||||
windowManager.ackDeepLink()
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "factoryReset" IPC message to reset the application to its initial state.
|
|
||||||
* Clears loaded modules, recreates user space, runs migrations, and sets up extensions.
|
|
||||||
* @param _event - The IPC event object.
|
|
||||||
* @returns A promise that resolves after the reset operations are complete.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.factoryReset, async (_event): Promise<void> => {
|
|
||||||
ModuleManager.instance.clearImportedModules()
|
|
||||||
return createUserSpace().then(migrate).then(setupExtensions)
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "startServer" IPC message to start the Jan API server.
|
|
||||||
* Initializes and starts server with provided configuration options.
|
|
||||||
* @param _event - The IPC event object.
|
|
||||||
* @param args - Configuration object containing host, port, CORS settings etc.
|
|
||||||
* @returns Promise that resolves when server starts successfully
|
|
||||||
*/
|
|
||||||
ipcMain.handle(
|
|
||||||
NativeRoute.startServer,
|
|
||||||
async (_event, args): Promise<void> => {
|
|
||||||
const { startServer } = require('@janhq/server')
|
|
||||||
return startServer({
|
|
||||||
host: args?.host,
|
|
||||||
port: args?.port,
|
|
||||||
isCorsEnabled: args?.isCorsEnabled,
|
|
||||||
isVerboseEnabled: args?.isVerboseEnabled,
|
|
||||||
prefix: args?.prefix,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "stopServer" IPC message to stop the Jan API server.
|
|
||||||
* Gracefully shuts down the server instance.
|
|
||||||
* @param _event - The IPC event object
|
|
||||||
* @returns Promise that resolves when server stops successfully
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.stopServer, async (_event): Promise<void> => {
|
|
||||||
/**
|
|
||||||
* Stop Jan API Server.
|
|
||||||
*/
|
|
||||||
const { stopServer } = require('@janhq/server')
|
|
||||||
return stopServer()
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the "appToken" IPC message to generate a random app ID.
|
|
||||||
*/
|
|
||||||
ipcMain.handle(NativeRoute.appToken, async (_event): Promise<string> => {
|
|
||||||
return process.env.appToken ?? 'cortex.cpp'
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,70 +0,0 @@
|
|||||||
import { app, dialog } from 'electron'
|
|
||||||
import { windowManager } from './../managers/window'
|
|
||||||
import {
|
|
||||||
ProgressInfo,
|
|
||||||
UpdateDownloadedEvent,
|
|
||||||
UpdateInfo,
|
|
||||||
autoUpdater,
|
|
||||||
} from 'electron-updater'
|
|
||||||
import { AppEvent } from '@janhq/core/node'
|
|
||||||
import { trayManager } from '../managers/tray'
|
|
||||||
|
|
||||||
export let waitingToInstallVersion: string | undefined = undefined
|
|
||||||
|
|
||||||
export function handleAppUpdates() {
|
|
||||||
/* Should not check for update during development */
|
|
||||||
if (!app.isPackaged) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
/* New Update Available */
|
|
||||||
autoUpdater.on('update-available', async (_info: UpdateInfo) => {
|
|
||||||
windowManager.mainWindow?.webContents.send(
|
|
||||||
AppEvent.onAppUpdateAvailable,
|
|
||||||
{}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
/* App Update Completion Message */
|
|
||||||
autoUpdater.on('update-downloaded', async (_info: UpdateDownloadedEvent) => {
|
|
||||||
windowManager.mainWindow?.webContents.send(
|
|
||||||
AppEvent.onAppUpdateDownloadSuccess,
|
|
||||||
{}
|
|
||||||
)
|
|
||||||
const action = await dialog.showMessageBox({
|
|
||||||
message: `Update downloaded. Please restart the application to apply the updates.`,
|
|
||||||
buttons: ['Restart', 'Later'],
|
|
||||||
})
|
|
||||||
if (action.response === 0) {
|
|
||||||
trayManager.destroyCurrentTray()
|
|
||||||
windowManager.closeQuickAskWindow()
|
|
||||||
waitingToInstallVersion = _info?.version
|
|
||||||
autoUpdater.quitAndInstall()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
/* App Update Error */
|
|
||||||
autoUpdater.on('error', (info: Error) => {
|
|
||||||
windowManager.mainWindow?.webContents.send(
|
|
||||||
AppEvent.onAppUpdateDownloadError,
|
|
||||||
{ failedToInstallVersion: waitingToInstallVersion, info }
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
/* App Update Progress */
|
|
||||||
autoUpdater.on('download-progress', (progress: ProgressInfo) => {
|
|
||||||
console.debug('app update progress: ', progress.percent)
|
|
||||||
windowManager.mainWindow?.webContents.send(
|
|
||||||
AppEvent.onAppUpdateDownloadUpdate,
|
|
||||||
{
|
|
||||||
...progress,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
autoUpdater.autoDownload = false
|
|
||||||
autoUpdater.autoInstallOnAppQuit = true
|
|
||||||
if (process.env.CI !== 'e2e') {
|
|
||||||
autoUpdater.checkForUpdates()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
Before Width: | Height: | Size: 1.3 KiB |
|
Before Width: | Height: | Size: 2.2 KiB |
|
Before Width: | Height: | Size: 76 KiB |
|
Before Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 806 B |
|
Before Width: | Height: | Size: 1.8 KiB |
|
Before Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 49 KiB |
|
Before Width: | Height: | Size: 835 B |
|
Before Width: | Height: | Size: 1.9 KiB |
|
Before Width: | Height: | Size: 28 KiB |
|
Before Width: | Height: | Size: 49 KiB |
@ -1,18 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
preset: 'ts-jest',
|
|
||||||
testEnvironment: 'node',
|
|
||||||
collectCoverageFrom: ['src/**/*.{ts,tsx}'],
|
|
||||||
modulePathIgnorePatterns: ['<rootDir>/tests'],
|
|
||||||
moduleNameMapper: {
|
|
||||||
'@/(.*)': '<rootDir>/src/$1',
|
|
||||||
},
|
|
||||||
runner: './testRunner.js',
|
|
||||||
transform: {
|
|
||||||
'^.+\\.tsx?$': [
|
|
||||||
'ts-jest',
|
|
||||||
{
|
|
||||||
diagnostics: false,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
161
electron/main.ts
@ -1,161 +0,0 @@
|
|||||||
import { app, BrowserWindow } from 'electron'
|
|
||||||
|
|
||||||
import { join, resolve } from 'path'
|
|
||||||
/**
|
|
||||||
* Managers
|
|
||||||
**/
|
|
||||||
import { windowManager } from './managers/window'
|
|
||||||
import { getAppConfigurations, log } from '@janhq/core/node'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* IPC Handlers
|
|
||||||
**/
|
|
||||||
import { injectHandler } from './handlers/common'
|
|
||||||
import { handleAppUpdates } from './handlers/update'
|
|
||||||
import { handleAppIPCs } from './handlers/native'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Utils
|
|
||||||
**/
|
|
||||||
import { setupMenu } from './utils/menu'
|
|
||||||
import { createUserSpace } from './utils/path'
|
|
||||||
import { migrate } from './utils/migration'
|
|
||||||
import { cleanUpAndQuit } from './utils/clean'
|
|
||||||
import { setupExtensions } from './utils/extension'
|
|
||||||
import { setupCore } from './utils/setup'
|
|
||||||
import { setupReactDevTool } from './utils/dev'
|
|
||||||
|
|
||||||
import { trayManager } from './managers/tray'
|
|
||||||
import { logSystemInfo } from './utils/system'
|
|
||||||
import { registerGlobalShortcuts } from './utils/shortcut'
|
|
||||||
import { registerLogger } from './utils/logger'
|
|
||||||
import { randomBytes } from 'crypto'
|
|
||||||
|
|
||||||
const preloadPath = join(__dirname, 'preload.js')
|
|
||||||
const preloadQuickAskPath = join(__dirname, 'preload.quickask.js')
|
|
||||||
const rendererPath = join(__dirname, '..', 'renderer')
|
|
||||||
const quickAskPath = join(rendererPath, 'search.html')
|
|
||||||
const mainPath = join(rendererPath, 'index.html')
|
|
||||||
|
|
||||||
const mainUrl = 'http://localhost:3000'
|
|
||||||
const quickAskUrl = `${mainUrl}/search`
|
|
||||||
|
|
||||||
const gotTheLock = app.requestSingleInstanceLock()
|
|
||||||
|
|
||||||
if (process.defaultApp) {
|
|
||||||
if (process.argv.length >= 2) {
|
|
||||||
app.setAsDefaultProtocolClient('jan', process.execPath, [
|
|
||||||
resolve(process.argv[1]),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
app.setAsDefaultProtocolClient('jan')
|
|
||||||
}
|
|
||||||
|
|
||||||
const createMainWindow = () => {
|
|
||||||
const startUrl = app.isPackaged ? `file://${mainPath}` : mainUrl
|
|
||||||
windowManager.createMainWindow(preloadPath, startUrl)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate a random token for the app
|
|
||||||
// This token is used for authentication when making request to cortex.cpp server
|
|
||||||
process.env.appToken = randomBytes(16).toString('hex')
|
|
||||||
|
|
||||||
app
|
|
||||||
.whenReady()
|
|
||||||
.then(() => {
|
|
||||||
if (!gotTheLock) {
|
|
||||||
app.quit()
|
|
||||||
throw new Error('Another instance of the app is already running')
|
|
||||||
} else {
|
|
||||||
app.on(
|
|
||||||
'second-instance',
|
|
||||||
(_event, commandLine, _workingDirectory): void => {
|
|
||||||
if (process.platform === 'win32' || process.platform === 'linux') {
|
|
||||||
// this is for handling deeplink on windows and linux
|
|
||||||
// since those OS will emit second-instance instead of open-url
|
|
||||||
const url = commandLine.pop()
|
|
||||||
if (url) {
|
|
||||||
windowManager.sendMainAppDeepLink(url)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
windowManager.showMainWindow()
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.then(setupCore)
|
|
||||||
.then(createUserSpace)
|
|
||||||
.then(registerLogger)
|
|
||||||
.then(migrate)
|
|
||||||
.then(setupExtensions)
|
|
||||||
.then(setupMenu)
|
|
||||||
.then(handleIPCs)
|
|
||||||
.then(() => process.env.CI !== 'e2e' && createQuickAskWindow())
|
|
||||||
.then(createMainWindow)
|
|
||||||
.then(handleAppUpdates)
|
|
||||||
.then(registerGlobalShortcuts)
|
|
||||||
.then(() => {
|
|
||||||
if (!app.isPackaged) {
|
|
||||||
setupReactDevTool()
|
|
||||||
windowManager.mainWindow?.webContents.openDevTools()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.then(() => process.env.CI !== 'e2e' && trayManager.createSystemTray())
|
|
||||||
.then(logSystemInfo)
|
|
||||||
.then(() => {
|
|
||||||
app.on('activate', () => {
|
|
||||||
if (!BrowserWindow.getAllWindows().length) {
|
|
||||||
createMainWindow()
|
|
||||||
} else {
|
|
||||||
windowManager.showMainWindow()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
app.on('open-url', (_event, url) => {
|
|
||||||
windowManager.sendMainAppDeepLink(url)
|
|
||||||
})
|
|
||||||
|
|
||||||
app.on('before-quit', function (_event) {
|
|
||||||
trayManager.destroyCurrentTray()
|
|
||||||
})
|
|
||||||
|
|
||||||
app.once('quit', () => {
|
|
||||||
cleanUpAndQuit()
|
|
||||||
})
|
|
||||||
|
|
||||||
app.once('window-all-closed', () => {
|
|
||||||
// Feature Toggle for Quick Ask
|
|
||||||
if (
|
|
||||||
getAppConfigurations().quick_ask &&
|
|
||||||
!windowManager.isQuickAskWindowDestroyed()
|
|
||||||
)
|
|
||||||
return
|
|
||||||
cleanUpAndQuit()
|
|
||||||
})
|
|
||||||
|
|
||||||
function createQuickAskWindow() {
|
|
||||||
// Feature Toggle for Quick Ask
|
|
||||||
if (!getAppConfigurations().quick_ask) return
|
|
||||||
const startUrl = app.isPackaged ? `file://${quickAskPath}` : quickAskUrl
|
|
||||||
windowManager.createQuickAskWindow(preloadQuickAskPath, startUrl)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles various IPC messages from the renderer process.
|
|
||||||
*/
|
|
||||||
function handleIPCs() {
|
|
||||||
// Inject core handlers for IPCs
|
|
||||||
injectHandler()
|
|
||||||
|
|
||||||
// Handle native IPCs
|
|
||||||
handleAppIPCs()
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
** Suppress Node error messages
|
|
||||||
*/
|
|
||||||
process.on('uncaughtException', function (err) {
|
|
||||||
log(`Error: ${err}`)
|
|
||||||
})
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
const DEFAULT_MIN_WIDTH = 400
|
|
||||||
const DEFAULT_MIN_HEIGHT = 600
|
|
||||||
|
|
||||||
export const mainWindowConfig: Electron.BrowserWindowConstructorOptions = {
|
|
||||||
skipTaskbar: false,
|
|
||||||
minWidth: DEFAULT_MIN_WIDTH,
|
|
||||||
minHeight: DEFAULT_MIN_HEIGHT,
|
|
||||||
show: true,
|
|
||||||
// we want to go frameless on windows and linux
|
|
||||||
transparent: process.platform === 'darwin',
|
|
||||||
frame: process.platform === 'darwin',
|
|
||||||
titleBarStyle: 'hiddenInset',
|
|
||||||
vibrancy: 'fullscreen-ui',
|
|
||||||
visualEffectState: 'active',
|
|
||||||
backgroundMaterial: 'acrylic',
|
|
||||||
autoHideMenuBar: true,
|
|
||||||
trafficLightPosition: {
|
|
||||||
x: 16,
|
|
||||||
y: 10,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
const DEFAULT_WIDTH = 556
|
|
||||||
|
|
||||||
const DEFAULT_HEIGHT = 60
|
|
||||||
|
|
||||||
export const quickAskWindowConfig: Electron.BrowserWindowConstructorOptions = {
|
|
||||||
width: DEFAULT_WIDTH,
|
|
||||||
height: DEFAULT_HEIGHT,
|
|
||||||
skipTaskbar: true,
|
|
||||||
acceptFirstMouse: true,
|
|
||||||
hasShadow: true,
|
|
||||||
alwaysOnTop: true,
|
|
||||||
show: false,
|
|
||||||
fullscreenable: false,
|
|
||||||
resizable: false,
|
|
||||||
center: true,
|
|
||||||
movable: true,
|
|
||||||
maximizable: false,
|
|
||||||
focusable: true,
|
|
||||||
transparent: false,
|
|
||||||
frame: false,
|
|
||||||
type: 'panel',
|
|
||||||
}
|
|
||||||
@ -1,51 +0,0 @@
|
|||||||
import { join } from 'path'
|
|
||||||
import { Tray, app, Menu } from 'electron'
|
|
||||||
import { windowManager } from '../managers/window'
|
|
||||||
import { getAppConfigurations } from '@janhq/core/node'
|
|
||||||
|
|
||||||
class TrayManager {
|
|
||||||
currentTray: Tray | undefined
|
|
||||||
|
|
||||||
createSystemTray = () => {
|
|
||||||
// Feature Toggle for Quick Ask
|
|
||||||
if (!getAppConfigurations().quick_ask) return
|
|
||||||
|
|
||||||
if (this.currentTray) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const iconPath = join(app.getAppPath(), 'icons', 'icon-tray.png')
|
|
||||||
const tray = new Tray(iconPath)
|
|
||||||
tray.setToolTip(app.getName())
|
|
||||||
|
|
||||||
tray.on('click', () => {
|
|
||||||
windowManager.showQuickAskWindow()
|
|
||||||
})
|
|
||||||
|
|
||||||
// Add context menu for windows only
|
|
||||||
if (process.platform === 'win32') {
|
|
||||||
const contextMenu = Menu.buildFromTemplate([
|
|
||||||
{
|
|
||||||
label: 'Open Jan',
|
|
||||||
type: 'normal',
|
|
||||||
click: () => windowManager.showMainWindow(),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Open Quick Ask',
|
|
||||||
type: 'normal',
|
|
||||||
click: () => windowManager.showQuickAskWindow(),
|
|
||||||
},
|
|
||||||
{ label: 'Quit', type: 'normal', click: () => app.quit() },
|
|
||||||
])
|
|
||||||
|
|
||||||
tray.setContextMenu(contextMenu)
|
|
||||||
}
|
|
||||||
this.currentTray = tray
|
|
||||||
}
|
|
||||||
|
|
||||||
destroyCurrentTray() {
|
|
||||||
this.currentTray?.destroy()
|
|
||||||
this.currentTray = undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const trayManager = new TrayManager()
|
|
||||||
@ -1,215 +0,0 @@
|
|||||||
import { BrowserWindow, app, shell } from 'electron'
|
|
||||||
import { quickAskWindowConfig } from './quickAskWindowConfig'
|
|
||||||
import { mainWindowConfig } from './mainWindowConfig'
|
|
||||||
import { getAppConfigurations, AppEvent } from '@janhq/core/node'
|
|
||||||
import { getBounds, saveBounds } from '../utils/setup'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Manages the current window instance.
|
|
||||||
*/
|
|
||||||
// TODO: refactor this
|
|
||||||
let isAppQuitting = false
|
|
||||||
|
|
||||||
class WindowManager {
|
|
||||||
public mainWindow?: BrowserWindow
|
|
||||||
private _quickAskWindow: BrowserWindow | undefined = undefined
|
|
||||||
private _quickAskWindowVisible = false
|
|
||||||
private _mainWindowVisible = false
|
|
||||||
|
|
||||||
private deeplink: string | undefined
|
|
||||||
/**
|
|
||||||
* Creates a new window instance.
|
|
||||||
* @returns The created window instance.
|
|
||||||
*/
|
|
||||||
async createMainWindow(preloadPath: string, startUrl: string) {
|
|
||||||
const bounds = await getBounds()
|
|
||||||
|
|
||||||
this.mainWindow = new BrowserWindow({
|
|
||||||
...mainWindowConfig,
|
|
||||||
width: bounds.width,
|
|
||||||
height: bounds.height,
|
|
||||||
show: false,
|
|
||||||
x: bounds.x,
|
|
||||||
y: bounds.y,
|
|
||||||
webPreferences: {
|
|
||||||
nodeIntegration: true,
|
|
||||||
preload: preloadPath,
|
|
||||||
webSecurity: false,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (process.platform === 'win32' || process.platform === 'linux') {
|
|
||||||
/// This is work around for windows deeplink.
|
|
||||||
/// second-instance event is not fired when app is not open, so the app
|
|
||||||
/// does not received the deeplink.
|
|
||||||
const commandLine = process.argv.slice(1)
|
|
||||||
if (commandLine.length > 0) {
|
|
||||||
const url = commandLine[0]
|
|
||||||
this.sendMainAppDeepLink(url)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.mainWindow.on('resized', () => {
|
|
||||||
saveBounds(this.mainWindow?.getBounds())
|
|
||||||
})
|
|
||||||
|
|
||||||
this.mainWindow.on('moved', () => {
|
|
||||||
saveBounds(this.mainWindow?.getBounds())
|
|
||||||
})
|
|
||||||
|
|
||||||
/* Load frontend app to the window */
|
|
||||||
this.mainWindow.loadURL(startUrl)
|
|
||||||
|
|
||||||
/* Open external links in the default browser */
|
|
||||||
this.mainWindow.webContents.setWindowOpenHandler(({ url }) => {
|
|
||||||
shell.openExternal(url)
|
|
||||||
return { action: 'deny' }
|
|
||||||
})
|
|
||||||
|
|
||||||
app.on('before-quit', function () {
|
|
||||||
isAppQuitting = true
|
|
||||||
})
|
|
||||||
|
|
||||||
windowManager.mainWindow?.on('close', function (evt) {
|
|
||||||
// Feature Toggle for Quick Ask
|
|
||||||
if (!getAppConfigurations().quick_ask) return
|
|
||||||
|
|
||||||
if (!isAppQuitting) {
|
|
||||||
evt.preventDefault()
|
|
||||||
windowManager.hideMainWindow()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
windowManager.mainWindow?.on('ready-to-show', function () {
|
|
||||||
windowManager.mainWindow?.show()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
createQuickAskWindow(preloadPath: string, startUrl: string): void {
|
|
||||||
this._quickAskWindow = new BrowserWindow({
|
|
||||||
...quickAskWindowConfig,
|
|
||||||
webPreferences: {
|
|
||||||
nodeIntegration: true,
|
|
||||||
preload: preloadPath,
|
|
||||||
webSecurity: false,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
this._quickAskWindow.loadURL(startUrl)
|
|
||||||
this._quickAskWindow.on('blur', () => {
|
|
||||||
this.hideQuickAskWindow()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
isMainWindowVisible(): boolean {
|
|
||||||
return this._mainWindowVisible
|
|
||||||
}
|
|
||||||
|
|
||||||
hideMainWindow(): void {
|
|
||||||
this.mainWindow?.hide()
|
|
||||||
this._mainWindowVisible = false
|
|
||||||
}
|
|
||||||
|
|
||||||
showMainWindow(): void {
|
|
||||||
this.mainWindow?.show()
|
|
||||||
this._mainWindowVisible = true
|
|
||||||
}
|
|
||||||
|
|
||||||
hideQuickAskWindow(): void {
|
|
||||||
this._quickAskWindow?.hide()
|
|
||||||
this._quickAskWindowVisible = false
|
|
||||||
}
|
|
||||||
|
|
||||||
showQuickAskWindow(): void {
|
|
||||||
this._quickAskWindow?.show()
|
|
||||||
this._quickAskWindowVisible = true
|
|
||||||
}
|
|
||||||
|
|
||||||
closeQuickAskWindow(): void {
|
|
||||||
if (this._quickAskWindow?.isDestroyed()) return
|
|
||||||
this._quickAskWindow?.close()
|
|
||||||
this._quickAskWindow?.destroy()
|
|
||||||
this._quickAskWindow = undefined
|
|
||||||
this._quickAskWindowVisible = false
|
|
||||||
}
|
|
||||||
|
|
||||||
isQuickAskWindowVisible(): boolean {
|
|
||||||
return this._quickAskWindowVisible
|
|
||||||
}
|
|
||||||
|
|
||||||
isQuickAskWindowDestroyed(): boolean {
|
|
||||||
return this._quickAskWindow?.isDestroyed() ?? true
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Expand the quick ask window
|
|
||||||
*/
|
|
||||||
expandQuickAskWindow(heightOffset: number): void {
|
|
||||||
const width = quickAskWindowConfig.width!
|
|
||||||
const height = quickAskWindowConfig.height! + heightOffset
|
|
||||||
this._quickAskWindow?.setMinimumSize(width, height)
|
|
||||||
this._quickAskWindow?.setSize(width, height, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Send the selected text to the quick ask window.
|
|
||||||
*/
|
|
||||||
sendQuickAskSelectedText(selectedText: string): void {
|
|
||||||
this._quickAskWindow?.webContents.send(
|
|
||||||
AppEvent.onSelectedText,
|
|
||||||
selectedText
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Try to send the deep link to the main app.
|
|
||||||
*/
|
|
||||||
sendMainAppDeepLink(url: string): void {
|
|
||||||
this.deeplink = url
|
|
||||||
const interval = setInterval(() => {
|
|
||||||
if (!this.deeplink) clearInterval(interval)
|
|
||||||
const mainWindow = this.mainWindow
|
|
||||||
if (mainWindow) {
|
|
||||||
mainWindow.webContents.send(AppEvent.onDeepLink, this.deeplink)
|
|
||||||
if (mainWindow.isMinimized()) mainWindow.restore()
|
|
||||||
mainWindow.focus()
|
|
||||||
}
|
|
||||||
}, 500)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Send main view state to the main app.
|
|
||||||
*/
|
|
||||||
sendMainViewState(route: string) {
|
|
||||||
if (this.mainWindow && !this.mainWindow.isDestroyed()) {
|
|
||||||
this.mainWindow.webContents.send(AppEvent.onMainViewStateChange, route)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up all windows.
|
|
||||||
*/
|
|
||||||
cleanUp(): void {
|
|
||||||
if (!this.mainWindow?.isDestroyed()) {
|
|
||||||
this.mainWindow?.close()
|
|
||||||
this.mainWindow?.destroy()
|
|
||||||
this.mainWindow = undefined
|
|
||||||
this._mainWindowVisible = false
|
|
||||||
}
|
|
||||||
if (!this._quickAskWindow?.isDestroyed()) {
|
|
||||||
this._quickAskWindow?.close()
|
|
||||||
this._quickAskWindow?.destroy()
|
|
||||||
this._quickAskWindow = undefined
|
|
||||||
this._quickAskWindowVisible = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Acknowledges that the window has received a deep link. We can remove it.
|
|
||||||
*/
|
|
||||||
ackDeepLink() {
|
|
||||||
this.deeplink = undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const windowManager = new WindowManager()
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
const yaml = require('js-yaml')
|
|
||||||
const fs = require('fs')
|
|
||||||
|
|
||||||
// get two file paths from arguments:
|
|
||||||
const [, , ...args] = process.argv
|
|
||||||
const file1 = args[0]
|
|
||||||
const file2 = args[1]
|
|
||||||
const file3 = args[2]
|
|
||||||
|
|
||||||
// check that all arguments are present and throw error instead
|
|
||||||
if (!file1 || !file2 || !file3) {
|
|
||||||
throw new Error(
|
|
||||||
'Please provide 3 file paths as arguments: path to file1, to file2 and destination path'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const doc1 = yaml.load(fs.readFileSync(file1, 'utf8'))
|
|
||||||
console.log('doc1: ', doc1)
|
|
||||||
|
|
||||||
const doc2 = yaml.load(fs.readFileSync(file2, 'utf8'))
|
|
||||||
console.log('doc2: ', doc2)
|
|
||||||
|
|
||||||
const merged = { ...doc1, ...doc2 }
|
|
||||||
merged.files.push(...doc1.files)
|
|
||||||
|
|
||||||
console.log('merged', merged)
|
|
||||||
|
|
||||||
const mergedYml = yaml.dump(merged)
|
|
||||||
fs.writeFileSync(file3, mergedYml, 'utf8')
|
|
||||||
@ -1,146 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "jan",
|
|
||||||
"version": "0.1.1740752217",
|
|
||||||
"main": "./build/main.js",
|
|
||||||
"author": "Jan <service@jan.ai>",
|
|
||||||
"license": "MIT",
|
|
||||||
"productName": "Jan",
|
|
||||||
"homepage": "https://github.com/menloresearch/jan/tree/main/electron",
|
|
||||||
"description": "Use offline LLMs with your own data. Run open source models like Llama2 or Falcon on your internal computers/servers.",
|
|
||||||
"build": {
|
|
||||||
"appId": "jan.ai.app",
|
|
||||||
"productName": "Jan",
|
|
||||||
"files": [
|
|
||||||
"renderer/**/*",
|
|
||||||
"build/**/*.{js,map}",
|
|
||||||
"pre-install",
|
|
||||||
"themes",
|
|
||||||
"scripts/**/*",
|
|
||||||
"icons/**/*",
|
|
||||||
"themes",
|
|
||||||
"shared"
|
|
||||||
],
|
|
||||||
"asarUnpack": [
|
|
||||||
"pre-install",
|
|
||||||
"themes",
|
|
||||||
"docs",
|
|
||||||
"scripts",
|
|
||||||
"icons",
|
|
||||||
"themes",
|
|
||||||
"shared"
|
|
||||||
],
|
|
||||||
"publish": [
|
|
||||||
{
|
|
||||||
"provider": "github",
|
|
||||||
"owner": "janhq",
|
|
||||||
"repo": "jan"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"extends": null,
|
|
||||||
"mac": {
|
|
||||||
"type": "distribution",
|
|
||||||
"entitlements": "./entitlements.mac.plist",
|
|
||||||
"entitlementsInherit": "./entitlements.mac.plist",
|
|
||||||
"notarize": {
|
|
||||||
"teamId": "F8AH6NHVY5"
|
|
||||||
},
|
|
||||||
"icon": "icons/icon.png"
|
|
||||||
},
|
|
||||||
"linux": {
|
|
||||||
"target": [
|
|
||||||
"deb"
|
|
||||||
],
|
|
||||||
"category": "Utility",
|
|
||||||
"icon": "icons/"
|
|
||||||
},
|
|
||||||
"win": {
|
|
||||||
"icon": "icons/icon.png",
|
|
||||||
"target": [
|
|
||||||
"nsis"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nsis": {
|
|
||||||
"oneClick": true,
|
|
||||||
"installerIcon": "icons/icon.ico",
|
|
||||||
"uninstallerIcon": "icons/icon.ico",
|
|
||||||
"include": "scripts/uninstaller.nsh",
|
|
||||||
"deleteAppDataOnUninstall": true
|
|
||||||
},
|
|
||||||
"protocols": [
|
|
||||||
{
|
|
||||||
"name": "Jan",
|
|
||||||
"schemes": [
|
|
||||||
"jan"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"artifactName": "jan-${os}-${arch}-${version}.${ext}"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"lint": "eslint . --ext \".js,.jsx,.ts,.tsx\"",
|
|
||||||
"test:e2e": "DEBUG=pw:browser xvfb-maybe -- playwright test --workers=1",
|
|
||||||
"copy:assets": "rimraf --glob \"./pre-install/*.tgz\" && cpx \"../pre-install/*.tgz\" \"./pre-install\"",
|
|
||||||
"version-patch": "run-script-os",
|
|
||||||
"version-patch:darwin:linux": "jq '.version' package.json | tr -d '\"' > .version.bak && jq --arg ver \"0.1.$(date +%s)\" '.version = $ver' package.json > package.tmp && mv package.tmp package.json",
|
|
||||||
"version-patch:win32": "node -e \"const fs=require('fs');const pkg=require('./package.json');const bak=pkg.version;fs.writeFileSync('.version.bak',bak);pkg.version='0.1.'+Math.floor(Date.now()/1000);fs.writeFileSync('package.json',JSON.stringify(pkg,null,2));\"",
|
|
||||||
"version-restore": "run-script-os",
|
|
||||||
"version-restore:darwin:linux": "jq --arg ver $(cat .version.bak) '.version = $ver' package.json > package.tmp && mv package.tmp package.json && rm .version.bak",
|
|
||||||
"version-restore:win32": "node -e \"const fs=require('fs');const pkg=require('./package.json');const bak=fs.readFileSync('.version.bak','utf8');pkg.version=bak;fs.writeFileSync('package.json',JSON.stringify(pkg,null,2));\"",
|
|
||||||
"dev:darwin:linux": "yarn copy:assets && tsc -p . && yarn version-patch && electron . && yarn version-restore",
|
|
||||||
"dev:windows": "yarn copy:assets && tsc -p . && electron .",
|
|
||||||
"dev": "run-script-os",
|
|
||||||
"compile": "tsc -p .",
|
|
||||||
"start": "electron .",
|
|
||||||
"build": "yarn copy:assets && run-script-os",
|
|
||||||
"build:test": "yarn copy:assets && run-script-os",
|
|
||||||
"build:test:darwin": "tsc -p . && electron-builder -p never -m --dir",
|
|
||||||
"build:test:win32": "tsc -p . && electron-builder -p never -w --dir",
|
|
||||||
"build:test:linux": "tsc -p . && electron-builder -p never -l --dir",
|
|
||||||
"build:darwin": "tsc -p . && electron-builder -p never -m --universal",
|
|
||||||
"build:win32": "tsc -p . && electron-builder -p never -w",
|
|
||||||
"build:linux": "tsc -p . && electron-builder -p never -l deb -l AppImage",
|
|
||||||
"build:publish": "yarn copy:assets && run-script-os",
|
|
||||||
"build:publish:darwin": "tsc -p . && electron-builder -p always -m --universal",
|
|
||||||
"build:publish:win32": "tsc -p . && electron-builder -p always -w",
|
|
||||||
"build:publish:linux": "tsc -p . && electron-builder -p always -l deb -l AppImage"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@alumna/reflect": "^1.1.3",
|
|
||||||
"@janhq/core": "link:../core",
|
|
||||||
"@janhq/server": "link:../server",
|
|
||||||
"@kirillvakalov/nut-tree__nut-js": "4.2.1-2",
|
|
||||||
"@npmcli/arborist": "^7.1.0",
|
|
||||||
"electron-store": "^8.1.0",
|
|
||||||
"electron-updater": "^6.1.7",
|
|
||||||
"fs-extra": "^11.2.0",
|
|
||||||
"pacote": "^21.0.0",
|
|
||||||
"request": "^2.88.2",
|
|
||||||
"request-progress": "^3.0.0",
|
|
||||||
"ulidx": "^2.3.0"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@electron/notarize": "^2.5.0",
|
|
||||||
"@playwright/test": "^1.38.1",
|
|
||||||
"@reportportal/agent-js-playwright": "^5.1.7",
|
|
||||||
"@types/npmcli__arborist": "^5.6.4",
|
|
||||||
"@types/pacote": "^11.1.7",
|
|
||||||
"@types/request": "^2.48.12",
|
|
||||||
"@typescript-eslint/eslint-plugin": "^6.7.3",
|
|
||||||
"@typescript-eslint/parser": "^6.7.3",
|
|
||||||
"electron": "30.0.6",
|
|
||||||
"electron-builder": "^24.13.3",
|
|
||||||
"electron-builder-squirrel-windows": "^24.13.3",
|
|
||||||
"electron-devtools-installer": "^3.2.0",
|
|
||||||
"electron-playwright-helpers": "^1.6.0",
|
|
||||||
"eslint": "8.57.0",
|
|
||||||
"eslint-plugin-react": "^7.34.0",
|
|
||||||
"rimraf": "^5.0.5",
|
|
||||||
"run-script-os": "^1.1.6",
|
|
||||||
"typescript": "^5.3.3",
|
|
||||||
"xvfb-maybe": "^0.2.1"
|
|
||||||
},
|
|
||||||
"installConfig": {
|
|
||||||
"hoistingLimits": "workspaces"
|
|
||||||
},
|
|
||||||
"packageManager": "yarn@4.5.3"
|
|
||||||
}
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
import { PlaywrightTestConfig } from '@playwright/test'
|
|
||||||
|
|
||||||
const config: PlaywrightTestConfig = {
|
|
||||||
testDir: './tests/e2e',
|
|
||||||
retries: 0,
|
|
||||||
globalTimeout: 350000,
|
|
||||||
use: {
|
|
||||||
screenshot: 'only-on-failure',
|
|
||||||
video: 'retain-on-failure',
|
|
||||||
trace: 'retain-on-failure',
|
|
||||||
},
|
|
||||||
// reporter: [['html', { outputFolder: './playwright-report' }]],
|
|
||||||
}
|
|
||||||
export default config
|
|
||||||
@ -1,32 +0,0 @@
|
|||||||
/**
|
|
||||||
* Exposes a set of APIs to the renderer process via the contextBridge object.
|
|
||||||
* @module preload
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { APIEvents, APIRoutes } from '@janhq/core/node'
|
|
||||||
import { contextBridge, ipcRenderer } from 'electron'
|
|
||||||
|
|
||||||
const interfaces: { [key: string]: (...args: any[]) => any } = {}
|
|
||||||
|
|
||||||
// Loop over each route in APIRoutes
|
|
||||||
APIRoutes.forEach((method) => {
|
|
||||||
// For each method, create a function on the interfaces object
|
|
||||||
// This function invokes the method on the ipcRenderer with any provided arguments
|
|
||||||
|
|
||||||
interfaces[method] = (...args: any[]) => ipcRenderer.invoke(method, ...args)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Loop over each method in APIEvents
|
|
||||||
APIEvents.forEach((method) => {
|
|
||||||
// For each method, create a function on the interfaces object
|
|
||||||
// This function sets up an event listener on the ipcRenderer for the method
|
|
||||||
// The handler for the event is provided as an argument to the function
|
|
||||||
interfaces[method] = (handler: any) => ipcRenderer.on(method, handler)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Expose the 'interfaces' object in the main world under the name 'electronAPI'
|
|
||||||
// This allows the renderer process to access these methods directly
|
|
||||||
contextBridge.exposeInMainWorld('electronAPI', {
|
|
||||||
...interfaces,
|
|
||||||
isQuickAsk: () => true,
|
|
||||||
})
|
|
||||||
@ -1,60 +0,0 @@
|
|||||||
/**
|
|
||||||
* Exposes a set of APIs to the renderer process via the contextBridge object.
|
|
||||||
* @module preload
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { APIEvents, APIRoutes, AppConfiguration } from '@janhq/core/node'
|
|
||||||
import { contextBridge, ipcRenderer } from 'electron'
|
|
||||||
import { readdirSync } from 'fs'
|
|
||||||
|
|
||||||
const interfaces: { [key: string]: (...args: any[]) => any } = {}
|
|
||||||
|
|
||||||
// Loop over each route in APIRoutes
|
|
||||||
APIRoutes.forEach((method) => {
|
|
||||||
// For each method, create a function on the interfaces object
|
|
||||||
// This function invokes the method on the ipcRenderer with any provided arguments
|
|
||||||
|
|
||||||
interfaces[method] = (...args: any[]) => ipcRenderer.invoke(method, ...args)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Loop over each method in APIEvents
|
|
||||||
APIEvents.forEach((method) => {
|
|
||||||
// For each method, create a function on the interfaces object
|
|
||||||
// This function sets up an event listener on the ipcRenderer for the method
|
|
||||||
// The handler for the event is provided as an argument to the function
|
|
||||||
interfaces[method] = (handler: any) => ipcRenderer.on(method, handler)
|
|
||||||
})
|
|
||||||
|
|
||||||
interfaces['changeDataFolder'] = async (path) => {
|
|
||||||
const appConfiguration: AppConfiguration = await ipcRenderer.invoke(
|
|
||||||
'getAppConfigurations'
|
|
||||||
)
|
|
||||||
const currentJanDataFolder = appConfiguration.data_folder
|
|
||||||
appConfiguration.data_folder = path
|
|
||||||
const reflect = require('@alumna/reflect')
|
|
||||||
const { err } = await reflect({
|
|
||||||
src: currentJanDataFolder,
|
|
||||||
dest: path,
|
|
||||||
recursive: true,
|
|
||||||
delete: false,
|
|
||||||
overwrite: true,
|
|
||||||
errorOnExist: false,
|
|
||||||
})
|
|
||||||
if (err) {
|
|
||||||
console.error(err)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
await ipcRenderer.invoke('updateAppConfiguration', appConfiguration)
|
|
||||||
}
|
|
||||||
|
|
||||||
interfaces['isDirectoryEmpty'] = async (path) => {
|
|
||||||
const dirChildren = await readdirSync(path)
|
|
||||||
return dirChildren.filter((x) => x !== '.DS_Store').length === 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Expose the 'interfaces' object in the main world under the name 'electronAPI'
|
|
||||||
// This allows the renderer process to access these methods directly
|
|
||||||
contextBridge.exposeInMainWorld('electronAPI', {
|
|
||||||
...interfaces,
|
|
||||||
isQuickAsk: () => false,
|
|
||||||
})
|
|
||||||
@ -1,46 +0,0 @@
|
|||||||
!include nsDialogs.nsh
|
|
||||||
|
|
||||||
XPStyle on
|
|
||||||
|
|
||||||
!macro customUnInstall
|
|
||||||
${ifNot} ${isUpdated}
|
|
||||||
; Define the process name of your Electron app
|
|
||||||
StrCpy $0 "Jan.exe"
|
|
||||||
|
|
||||||
; Check if the application is running
|
|
||||||
nsExec::ExecToStack 'tasklist /FI "IMAGENAME eq $0" /NH'
|
|
||||||
Pop $1
|
|
||||||
|
|
||||||
StrCmp $1 "" notRunning
|
|
||||||
|
|
||||||
; If the app is running, notify the user and attempt to close it
|
|
||||||
MessageBox MB_OK "Jan is being uninstalled, force close app." IDOK forceClose
|
|
||||||
|
|
||||||
forceClose:
|
|
||||||
; Attempt to kill the running application
|
|
||||||
nsExec::ExecToStack 'taskkill /F /IM $0'
|
|
||||||
Pop $1
|
|
||||||
|
|
||||||
; Proceed with uninstallation
|
|
||||||
Goto continueUninstall
|
|
||||||
|
|
||||||
notRunning:
|
|
||||||
; If the app is not running, proceed with uninstallation
|
|
||||||
Goto continueUninstall
|
|
||||||
|
|
||||||
continueUninstall:
|
|
||||||
; Proceed with uninstallation
|
|
||||||
DeleteRegKey HKLM "Software\Jan"
|
|
||||||
RMDir /r "$INSTDIR"
|
|
||||||
Delete "$INSTDIR\*.*"
|
|
||||||
|
|
||||||
; Clean up shortcuts and app data
|
|
||||||
Delete "$DESKTOP\Jan.lnk"
|
|
||||||
Delete "$STARTMENU\Programs\Jan.lnk"
|
|
||||||
RMDir /r "$APPDATA\Jan"
|
|
||||||
RMDir /r "$LOCALAPPDATA\jan-updater"
|
|
||||||
|
|
||||||
; Close the uninstaller
|
|
||||||
Quit
|
|
||||||
${endIf}
|
|
||||||
!macroend
|
|
||||||
@ -1,69 +0,0 @@
|
|||||||
const { exec } = require('child_process')
|
|
||||||
|
|
||||||
function execCommandWithRetry(command, retries = 3) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const execute = (attempt) => {
|
|
||||||
exec(command, (error, stdout, stderr) => {
|
|
||||||
if (error) {
|
|
||||||
console.error(`Error: ${error}`)
|
|
||||||
if (attempt < retries) {
|
|
||||||
console.log(`Retrying... Attempt ${attempt + 1}`)
|
|
||||||
execute(attempt + 1)
|
|
||||||
} else {
|
|
||||||
return reject(error)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
console.log(`stdout: ${stdout}`)
|
|
||||||
console.error(`stderr: ${stderr}`)
|
|
||||||
resolve()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
execute(0)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function sign({
|
|
||||||
path,
|
|
||||||
name,
|
|
||||||
certUrl,
|
|
||||||
clientId,
|
|
||||||
tenantId,
|
|
||||||
clientSecret,
|
|
||||||
certName,
|
|
||||||
timestampServer,
|
|
||||||
version,
|
|
||||||
}) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const command = `azuresigntool.exe sign -kvu "${certUrl}" -kvi "${clientId}" -kvt "${tenantId}" -kvs "${clientSecret}" -kvc "${certName}" -tr "${timestampServer}" -v "${path}"`
|
|
||||||
execCommandWithRetry(command)
|
|
||||||
.then(resolve)
|
|
||||||
.catch(reject)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.default = async function (options) {
|
|
||||||
const certUrl = process.env.AZURE_KEY_VAULT_URI
|
|
||||||
const clientId = process.env.AZURE_CLIENT_ID
|
|
||||||
const tenantId = process.env.AZURE_TENANT_ID
|
|
||||||
const clientSecret = process.env.AZURE_CLIENT_SECRET
|
|
||||||
const certName = process.env.AZURE_CERT_NAME
|
|
||||||
const timestampServer = 'http://timestamp.globalsign.com/tsa/r6advanced1'
|
|
||||||
|
|
||||||
try {
|
|
||||||
await sign({
|
|
||||||
path: options.path,
|
|
||||||
name: 'jan-win-x64',
|
|
||||||
certUrl,
|
|
||||||
clientId,
|
|
||||||
tenantId,
|
|
||||||
clientSecret,
|
|
||||||
certName,
|
|
||||||
timestampServer,
|
|
||||||
version: options.version,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to sign after 3 attempts:', error)
|
|
||||||
process.exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,10 +0,0 @@
|
|||||||
const jestRunner = require('jest-runner');
|
|
||||||
|
|
||||||
class EmptyTestFileRunner extends jestRunner.default {
|
|
||||||
async runTests(tests, watcher, onStart, onResult, onFailure, options) {
|
|
||||||
const nonEmptyTests = tests.filter(test => test.context.hasteFS.getSize(test.path) > 0);
|
|
||||||
return super.runTests(nonEmptyTests, watcher, onStart, onResult, onFailure, options);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = EmptyTestFileRunner;
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
export const Constants = {
|
|
||||||
VIDEO_DIR: './playwright-video',
|
|
||||||
TIMEOUT: '300000',
|
|
||||||
}
|
|
||||||
@ -1,126 +0,0 @@
|
|||||||
import {
|
|
||||||
_electron as electron,
|
|
||||||
BrowserContext,
|
|
||||||
ElectronApplication,
|
|
||||||
expect,
|
|
||||||
Page,
|
|
||||||
test as base,
|
|
||||||
} from '@playwright/test'
|
|
||||||
import {
|
|
||||||
ElectronAppInfo,
|
|
||||||
findLatestBuild,
|
|
||||||
parseElectronApp,
|
|
||||||
stubDialog,
|
|
||||||
} from 'electron-playwright-helpers'
|
|
||||||
import { Constants } from './constants'
|
|
||||||
import { HubPage } from '../pages/hubPage'
|
|
||||||
import { CommonActions } from '../pages/commonActions'
|
|
||||||
import { rmSync } from 'fs'
|
|
||||||
import * as path from 'path'
|
|
||||||
|
|
||||||
export let electronApp: ElectronApplication
|
|
||||||
export let page: Page
|
|
||||||
export let appInfo: ElectronAppInfo
|
|
||||||
export const TIMEOUT = parseInt(process.env.TEST_TIMEOUT || Constants.TIMEOUT)
|
|
||||||
|
|
||||||
export async function setupElectron() {
|
|
||||||
console.log(`TEST TIMEOUT: ${TIMEOUT}`)
|
|
||||||
|
|
||||||
process.env.CI = 'e2e'
|
|
||||||
|
|
||||||
const latestBuild = findLatestBuild('dist')
|
|
||||||
expect(latestBuild).toBeTruthy()
|
|
||||||
|
|
||||||
// parse the packaged Electron app and find paths and other info
|
|
||||||
appInfo = parseElectronApp(latestBuild)
|
|
||||||
expect(appInfo).toBeTruthy()
|
|
||||||
|
|
||||||
electronApp = await electron.launch({
|
|
||||||
args: [appInfo.main, '--no-sandbox'], // main file from package.json
|
|
||||||
executablePath: appInfo.executable, // path to the Electron executable
|
|
||||||
// recordVideo: { dir: Constants.VIDEO_DIR }, // Specify the directory for video recordings
|
|
||||||
})
|
|
||||||
await stubDialog(electronApp, 'showMessageBox', { response: 1 })
|
|
||||||
|
|
||||||
page = await electronApp.firstWindow({
|
|
||||||
timeout: TIMEOUT,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function teardownElectron() {
|
|
||||||
await page.close()
|
|
||||||
await electronApp.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* this fixture is needed to record and attach videos / screenshot on failed tests when
|
|
||||||
* tests are run in serial mode (i.e. browser is not closed between tests)
|
|
||||||
*/
|
|
||||||
export const test = base.extend<
|
|
||||||
{
|
|
||||||
commonActions: CommonActions
|
|
||||||
hubPage: HubPage
|
|
||||||
attachVideoPage: Page
|
|
||||||
attachScreenshotsToReport: void
|
|
||||||
},
|
|
||||||
{ createVideoContext: BrowserContext }
|
|
||||||
>({
|
|
||||||
commonActions: async ({ request }, use, testInfo) => {
|
|
||||||
await use(new CommonActions(page, testInfo))
|
|
||||||
},
|
|
||||||
hubPage: async ({ commonActions }, use) => {
|
|
||||||
await use(new HubPage(page, commonActions))
|
|
||||||
},
|
|
||||||
createVideoContext: [
|
|
||||||
async ({ playwright }, use) => {
|
|
||||||
const context = electronApp.context()
|
|
||||||
await use(context)
|
|
||||||
},
|
|
||||||
{ scope: 'worker' },
|
|
||||||
],
|
|
||||||
|
|
||||||
attachVideoPage: [
|
|
||||||
async ({ createVideoContext }, use, testInfo) => {
|
|
||||||
await use(page)
|
|
||||||
|
|
||||||
if (testInfo.status !== testInfo.expectedStatus) {
|
|
||||||
const path = await createVideoContext.pages()[0].video()?.path()
|
|
||||||
await createVideoContext.close()
|
|
||||||
await testInfo.attach('video', {
|
|
||||||
path: path,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{ scope: 'test', auto: true },
|
|
||||||
],
|
|
||||||
|
|
||||||
attachScreenshotsToReport: [
|
|
||||||
async ({ commonActions }, use, testInfo) => {
|
|
||||||
await use()
|
|
||||||
|
|
||||||
// After the test, we can check whether the test passed or failed.
|
|
||||||
if (testInfo.status !== testInfo.expectedStatus) {
|
|
||||||
await commonActions.takeScreenshot('')
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{ auto: true },
|
|
||||||
],
|
|
||||||
})
|
|
||||||
|
|
||||||
test.beforeAll(async () => {
|
|
||||||
rmSync(path.join(__dirname, '../../test-data'), {
|
|
||||||
recursive: true,
|
|
||||||
force: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
test.setTimeout(TIMEOUT)
|
|
||||||
await setupElectron()
|
|
||||||
await page.waitForSelector('img[alt="Jan - Logo"]', {
|
|
||||||
state: 'visible',
|
|
||||||
timeout: TIMEOUT,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
test.afterAll(async () => {
|
|
||||||
// teardownElectron()
|
|
||||||
})
|
|
||||||
@ -1,25 +0,0 @@
|
|||||||
import { test, appInfo, page, TIMEOUT } from '../config/fixtures'
|
|
||||||
import { expect } from '@playwright/test'
|
|
||||||
|
|
||||||
test.beforeAll(async () => {
|
|
||||||
expect(appInfo).toMatchObject({
|
|
||||||
asar: true,
|
|
||||||
executable: expect.anything(),
|
|
||||||
main: expect.anything(),
|
|
||||||
name: 'jan',
|
|
||||||
packageJson: expect.objectContaining({ name: 'jan' }),
|
|
||||||
platform: process.platform,
|
|
||||||
resourcesDir: expect.anything(),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
test('explores hub', async ({ hubPage }) => {
|
|
||||||
await hubPage.navigateByMenu()
|
|
||||||
await hubPage.verifyContainerVisible()
|
|
||||||
await hubPage.scrollToBottom()
|
|
||||||
const useModelBtn = page.getByTestId(/^setup-btn/).first()
|
|
||||||
|
|
||||||
await expect(useModelBtn).toBeVisible({
|
|
||||||
timeout: TIMEOUT,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
import { expect } from '@playwright/test'
|
|
||||||
import { page, test, TIMEOUT } from '../config/fixtures'
|
|
||||||
|
|
||||||
test('renders left navigation panel', async () => {
|
|
||||||
const threadBtn = page.getByTestId('Thread').first()
|
|
||||||
await expect(threadBtn).toBeVisible({ timeout: TIMEOUT })
|
|
||||||
// Chat section should be there
|
|
||||||
await page.getByTestId('Local API Server').first().click({
|
|
||||||
timeout: TIMEOUT,
|
|
||||||
})
|
|
||||||
const localServer = page.getByTestId('local-server-testid').first()
|
|
||||||
await expect(localServer).toBeVisible({
|
|
||||||
timeout: TIMEOUT,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
import { expect } from '@playwright/test'
|
|
||||||
|
|
||||||
import { test, page, TIMEOUT } from '../config/fixtures'
|
|
||||||
|
|
||||||
test('shows settings', async () => {
|
|
||||||
await page.getByTestId('Settings').first().click({
|
|
||||||
timeout: TIMEOUT,
|
|
||||||
})
|
|
||||||
const settingDescription = page.getByTestId('testid-setting-description')
|
|
||||||
await expect(settingDescription).toBeVisible({ timeout: TIMEOUT })
|
|
||||||
})
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
import { expect } from '@playwright/test'
|
|
||||||
import { page, test, TIMEOUT } from '../config/fixtures'
|
|
||||||
|
|
||||||
test('show onboarding screen without any threads created or models downloaded', async () => {
|
|
||||||
await page.getByTestId('Thread').first().click({
|
|
||||||
timeout: TIMEOUT,
|
|
||||||
})
|
|
||||||
const denyButton = page.locator('[data-testid="btn-deny-product-analytics"]')
|
|
||||||
|
|
||||||
if ((await denyButton.count()) > 0) {
|
|
||||||
await denyButton.click({ force: true })
|
|
||||||
}
|
|
||||||
|
|
||||||
const onboardScreen = page.getByTestId('onboard-screen')
|
|
||||||
await expect(onboardScreen).toBeVisible({
|
|
||||||
timeout: TIMEOUT,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@ -1,59 +0,0 @@
|
|||||||
import { Page, expect } from '@playwright/test'
|
|
||||||
import { CommonActions } from './commonActions'
|
|
||||||
import { TIMEOUT } from '../config/fixtures'
|
|
||||||
|
|
||||||
export class BasePage {
|
|
||||||
menuId: string
|
|
||||||
|
|
||||||
constructor(
|
|
||||||
protected readonly page: Page,
|
|
||||||
readonly action: CommonActions,
|
|
||||||
protected containerId: string
|
|
||||||
) {}
|
|
||||||
|
|
||||||
public getValue(key: string) {
|
|
||||||
return this.action.getValue(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
public setValue(key: string, value: string) {
|
|
||||||
this.action.setValue(key, value)
|
|
||||||
}
|
|
||||||
|
|
||||||
async takeScreenshot(name: string = '') {
|
|
||||||
await this.action.takeScreenshot(name)
|
|
||||||
}
|
|
||||||
|
|
||||||
async navigateByMenu() {
|
|
||||||
await this.clickFirstElement(this.menuId)
|
|
||||||
}
|
|
||||||
|
|
||||||
async clickFirstElement(testId: string) {
|
|
||||||
await this.page.getByTestId(testId).first().click()
|
|
||||||
}
|
|
||||||
|
|
||||||
async verifyContainerVisible() {
|
|
||||||
const container = this.page.getByTestId(this.containerId)
|
|
||||||
expect(container.isVisible()).toBeTruthy()
|
|
||||||
}
|
|
||||||
|
|
||||||
async scrollToBottom() {
|
|
||||||
await this.page.evaluate(() => {
|
|
||||||
window.scrollTo(0, document.body.scrollHeight)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async waitUpdateLoader() {
|
|
||||||
await this.isElementVisible('img[alt="Jan - Logo"]')
|
|
||||||
}
|
|
||||||
|
|
||||||
//wait and find a specific element with its selector and return Visible
|
|
||||||
async isElementVisible(selector: any) {
|
|
||||||
let isVisible = true
|
|
||||||
await this.page
|
|
||||||
.waitForSelector(selector, { state: 'visible', timeout: TIMEOUT })
|
|
||||||
.catch(() => {
|
|
||||||
isVisible = false
|
|
||||||
})
|
|
||||||
return isVisible
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,34 +0,0 @@
|
|||||||
import { Page, TestInfo } from '@playwright/test'
|
|
||||||
import { page } from '../config/fixtures'
|
|
||||||
|
|
||||||
export class CommonActions {
|
|
||||||
private testData = new Map<string, string>()
|
|
||||||
|
|
||||||
constructor(
|
|
||||||
public page: Page,
|
|
||||||
public testInfo: TestInfo
|
|
||||||
) {}
|
|
||||||
|
|
||||||
async takeScreenshot(name: string) {
|
|
||||||
const screenshot = await page.screenshot({
|
|
||||||
fullPage: true,
|
|
||||||
})
|
|
||||||
const attachmentName = `${this.testInfo.title}_${name || new Date().toISOString().slice(5, 19).replace(/[-:]/g, '').replace('T', '_')}`
|
|
||||||
await this.testInfo.attach(attachmentName.replace(/\s+/g, ''), {
|
|
||||||
body: screenshot,
|
|
||||||
contentType: 'image/png',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async hooks() {
|
|
||||||
console.log('hook from the scenario page')
|
|
||||||
}
|
|
||||||
|
|
||||||
setValue(key: string, value: string) {
|
|
||||||
this.testData.set(key, value)
|
|
||||||
}
|
|
||||||
|
|
||||||
getValue(key: string) {
|
|
||||||
return this.testData.get(key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||