diff --git a/.github/scripts/electron-checksum.py b/.github/scripts/electron-checksum.py new file mode 100644 index 000000000..fba4ff609 --- /dev/null +++ b/.github/scripts/electron-checksum.py @@ -0,0 +1,28 @@ +import hashlib +import base64 +import sys + +def hash_file(file_path): + # Create a SHA-512 hash object + sha512 = hashlib.sha512() + + # Read and update the hash object with the content of the file + with open(file_path, 'rb') as f: + while True: + data = f.read(1024 * 1024) # Read in 1 MB chunks + if not data: + break + sha512.update(data) + + # Obtain the hash result and encode it in base64 + hash_base64 = base64.b64encode(sha512.digest()).decode('utf-8') + return hash_base64 + +if __name__ == "__main__": + if len(sys.argv) < 2: + print("Usage: python3 script.py ") + sys.exit(1) + + file_path = sys.argv[1] + hash_base64_output = hash_file(file_path) + print(hash_base64_output) diff --git a/.github/scripts/icon-beta.png b/.github/scripts/icon-beta.png new file mode 100644 index 000000000..4b715494d Binary files /dev/null and b/.github/scripts/icon-beta.png differ diff --git a/.github/scripts/icon-nightly.png b/.github/scripts/icon-nightly.png new file mode 100644 index 000000000..23f532947 Binary files /dev/null and b/.github/scripts/icon-nightly.png differ diff --git a/.github/scripts/rename-tauri-app.sh b/.github/scripts/rename-tauri-app.sh new file mode 100644 index 000000000..12a1fadb7 --- /dev/null +++ b/.github/scripts/rename-tauri-app.sh @@ -0,0 +1,48 @@ +#!/bin/bash + +# Check if the correct number of arguments is provided +if [ "$#" -ne 2 ]; then + echo "Usage: $0 " + exit 1 +fi + +INPUT_JSON_FILE="$1" + +CHANNEL="$2" + +if [ "$CHANNEL" == "nightly" ]; then + UPDATER="latest" +else + UPDATER="beta" +fi + +# Check if the input file exists +if [ ! -f "$INPUT_JSON_FILE" ]; then + echo "Input file not found: $INPUT_JSON_FILE" + exit 1 +fi + +# Use jq to transform the content +jq --arg channel "$CHANNEL" --arg updater "$UPDATER" ' + .productName = "Jan-\($channel)" | + .identifier = "jan-\($channel).ai.app" +' "$INPUT_JSON_FILE" > ./tauri.conf.json.tmp + +cat ./tauri.conf.json.tmp + +rm $INPUT_JSON_FILE +mv ./tauri.conf.json.tmp $INPUT_JSON_FILE + +# Update the layout file +# LAYOUT_FILE_PATH="web/app/layout.tsx" + +# if [ ! -f "$LAYOUT_FILE_PATH" ]; then +# echo "File does not exist: $LAYOUT_FILE_PATH" +# exit 1 +# fi + +# Perform the replacements +# sed -i -e "s#Jan#Jan-$CHANNEL#g" "$LAYOUT_FILE_PATH" + +# Notify completion +# echo "File has been updated: $LAYOUT_FILE_PATH" diff --git a/.github/workflows/jan-electron-build-beta.yml b/.github/workflows/jan-electron-build-beta.yml deleted file mode 100644 index 61ff717ac..000000000 --- a/.github/workflows/jan-electron-build-beta.yml +++ /dev/null @@ -1,86 +0,0 @@ -name: Electron Builder - Beta Build - -on: - push: - tags: ["v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+-beta"] - -jobs: - # Job create Update app version based on latest release tag with build number and save to output - get-update-version: - uses: ./.github/workflows/template-get-update-version.yml - - build-macos: - uses: ./.github/workflows/template-build-macos.yml - secrets: inherit - needs: [get-update-version] - with: - ref: ${{ github.ref }} - public_provider: github - new_version: ${{ needs.get-update-version.outputs.new_version }} - beta: true - nightly: false - cortex_api_port: "39271" - - build-windows-x64: - uses: ./.github/workflows/template-build-windows-x64.yml - secrets: inherit - needs: [get-update-version] - with: - ref: ${{ github.ref }} - public_provider: github - new_version: ${{ needs.get-update-version.outputs.new_version }} - beta: true - nightly: false - cortex_api_port: "39271" - - build-linux-x64: - uses: ./.github/workflows/template-build-linux-x64.yml - secrets: inherit - needs: [get-update-version] - with: - ref: ${{ github.ref }} - public_provider: github - new_version: ${{ needs.get-update-version.outputs.new_version }} - beta: true - nightly: false - cortex_api_port: "39271" - - sync-temp-to-latest: - needs: [build-macos, build-windows-x64, build-linux-x64] - runs-on: ubuntu-latest - permissions: - contents: write - steps: - - name: Getting the repo - uses: actions/checkout@v3 - - name: Sync temp to latest - run: | - # sync temp-beta to beta by copy files that are different or new - aws s3 sync "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/" "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/beta/" - env: - AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }} - AWS_EC2_METADATA_DISABLED: "true" - - noti-discord-and-update-url-readme: - needs: [build-macos, get-update-version, build-windows-x64, build-linux-x64, sync-temp-to-latest] - runs-on: ubuntu-latest - steps: - - name: Set version to environment variable - run: | - VERSION=${{ needs.get-update-version.outputs.new_version }} - VERSION="${VERSION#v}" - echo "VERSION=$VERSION" >> $GITHUB_ENV - - - name: Notify Discord - uses: Ilshidur/action-discord@master - with: - args: | - Jan-beta App version {{ VERSION }}, has been released, use the following links to download the app with faster speed or visit the Github release page for more information: - - Windows: https://delta.jan.ai/beta/jan-beta-win-x64-{{ VERSION }}.exe - - macOS Universal: https://delta.jan.ai/beta/jan-beta-mac-universal-{{ VERSION }}.dmg - - Linux Deb: https://delta.jan.ai/beta/jan-beta-linux-amd64-{{ VERSION }}.deb - - Linux AppImage: https://delta.jan.ai/beta/jan-beta-linux-x86_64-{{ VERSION }}.AppImage - env: - DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_JAN_BETA }} \ No newline at end of file diff --git a/.github/workflows/jan-electron-build-nightly.yml b/.github/workflows/jan-electron-build-nightly.yml index af5bab195..b777d5823 100644 --- a/.github/workflows/jan-electron-build-nightly.yml +++ b/.github/workflows/jan-electron-build-nightly.yml @@ -14,6 +14,9 @@ on: default: none pull_request_review: types: [submitted] + pull_request: + branches: + - chore/tauri-cicd jobs: set-public-provider: @@ -47,8 +50,41 @@ jobs: get-update-version: uses: ./.github/workflows/template-get-update-version.yml - build-macos: - uses: ./.github/workflows/template-build-macos.yml + build-tauri-macos: + uses: ./.github/workflows/template-tauri-build-macos.yml + secrets: inherit + needs: [get-update-version, set-public-provider] + with: + ref: ${{ needs.set-public-provider.outputs.ref }} + public_provider: ${{ needs.set-public-provider.outputs.public_provider }} + new_version: ${{ needs.get-update-version.outputs.new_version }} + channel: nightly + cortex_api_port: "39261" + + build-tauri-windows-x64: + uses: ./.github/workflows/template-tauri-build-windows-x64.yml + secrets: inherit + needs: [get-update-version, set-public-provider] + with: + ref: ${{ needs.set-public-provider.outputs.ref }} + public_provider: ${{ needs.set-public-provider.outputs.public_provider }} + new_version: ${{ needs.get-update-version.outputs.new_version }} + channel: nightly + cortex_api_port: "39261" + + build-tauri-linux-x64: + uses: ./.github/workflows/template-tauri-build-linux-x64.yml + secrets: inherit + needs: [get-update-version, set-public-provider] + with: + ref: ${{ needs.set-public-provider.outputs.ref }} + public_provider: ${{ needs.set-public-provider.outputs.public_provider }} + new_version: ${{ needs.get-update-version.outputs.new_version }} + channel: nightly + cortex_api_port: "39261" + + build-electron-macos: + uses: ./.github/workflows/template-electron-build-macos.yml needs: [get-update-version, set-public-provider] secrets: inherit with: @@ -59,8 +95,8 @@ jobs: beta: false cortex_api_port: "39261" - build-windows-x64: - uses: ./.github/workflows/template-build-windows-x64.yml + build-electron-windows-x64: + uses: ./.github/workflows/template-electron-build-windows-x64.yml secrets: inherit needs: [get-update-version, set-public-provider] with: @@ -70,8 +106,9 @@ jobs: nightly: true beta: false cortex_api_port: "39261" - build-linux-x64: - uses: ./.github/workflows/template-build-linux-x64.yml + + build-electron-linux-x64: + uses: ./.github/workflows/template-electron-build-linux-x64.yml secrets: inherit needs: [get-update-version, set-public-provider] with: @@ -83,8 +120,16 @@ jobs: cortex_api_port: "39261" sync-temp-to-latest: - needs: [set-public-provider, build-windows-x64, build-linux-x64, build-macos] runs-on: ubuntu-latest + needs: [ + set-public-provider, + build-electron-windows-x64, + build-electron-linux-x64, + build-electron-macos, + build-tauri-windows-x64, + build-tauri-linux-x64, + build-tauri-macos + ] steps: - name: Sync temp to latest if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }} @@ -97,7 +142,14 @@ jobs: AWS_EC2_METADATA_DISABLED: "true" noti-discord-nightly-and-update-url-readme: - needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest] + needs: [ + build-tauri-macos, + build-tauri-windows-x64, + build-tauri-linux-x64, + get-update-version, + set-public-provider, + sync-temp-to-latest + ] secrets: inherit if: github.event_name == 'schedule' uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml @@ -108,7 +160,17 @@ jobs: new_version: ${{ needs.get-update-version.outputs.new_version }} noti-discord-pre-release-and-update-url-readme: - needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest] + needs: [ + build-electron-macos, + build-electron-windows-x64, + build-electron-linux-x64, + build-tauri-macos, + build-tauri-windows-x64, + build-tauri-linux-x64, + get-update-version, + set-public-provider, + sync-temp-to-latest + ] secrets: inherit if: github.event_name == 'push' uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml @@ -119,7 +181,14 @@ jobs: new_version: ${{ needs.get-update-version.outputs.new_version }} noti-discord-manual-and-update-url-readme: - needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest] + needs: [ + build-tauri-macos, + build-tauri-windows-x64, + build-tauri-linux-x64, + get-update-version, + set-public-provider, + sync-temp-to-latest + ] secrets: inherit if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3' uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml @@ -131,7 +200,17 @@ jobs: comment-pr-build-url: - needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest] + needs: [ + build-electron-macos, + build-electron-windows-x64, + build-electron-linux-x64, + build-tauri-macos, + build-tauri-windows-x64, + build-tauri-linux-x64, + get-update-version, + set-public-provider, + sync-temp-to-latest + ] runs-on: ubuntu-latest if: github.event_name == 'pull_request_review' steps: @@ -147,4 +226,4 @@ jobs: PR_URL=${{ github.event.pull_request.html_url }} RUN_ID=${{ github.run_id }} COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})." - gh pr comment $PR_URL --body "$COMMENT" + gh pr comment $PR_URL --body "$COMMENT" \ No newline at end of file diff --git a/.github/workflows/jan-electron-build.yml b/.github/workflows/jan-electron-build.yml index 7d69a5c12..a223027f3 100644 --- a/.github/workflows/jan-electron-build.yml +++ b/.github/workflows/jan-electron-build.yml @@ -33,8 +33,8 @@ jobs: draft: true prerelease: false - build-macos: - uses: ./.github/workflows/template-build-macos.yml + build-electron-macos: + uses: ./.github/workflows/template-electron-build-macos.yml secrets: inherit needs: [get-update-version] with: @@ -44,8 +44,8 @@ jobs: nightly: false new_version: ${{ needs.get-update-version.outputs.new_version }} - build-windows-x64: - uses: ./.github/workflows/template-build-windows-x64.yml + build-electron-windows-x64: + uses: ./.github/workflows/template-electron-build-windows-x64.yml secrets: inherit needs: [get-update-version] with: @@ -55,8 +55,8 @@ jobs: nightly: false new_version: ${{ needs.get-update-version.outputs.new_version }} - build-linux-x64: - uses: ./.github/workflows/template-build-linux-x64.yml + build-electron-linux-x64: + uses: ./.github/workflows/template-electron-build-linux-x64.yml secrets: inherit needs: [get-update-version] with: @@ -65,9 +65,49 @@ jobs: beta: false nightly: false new_version: ${{ needs.get-update-version.outputs.new_version }} + + # build-tauri-macos: + # uses: ./.github/workflows/template-tauri-build-macos.yml + # secrets: inherit + # needs: [get-update-version, create-draft-release] + # with: + # ref: ${{ github.ref }} + # public_provider: github + # channel: stable + # new_version: ${{ needs.get-update-version.outputs.new_version }} + # upload_url: ${{ needs.create-draft-release.outputs.upload_url }} + + # build-tauri-windows-x64: + # uses: ./.github/workflows/template-tauri-build-windows-x64.yml + # secrets: inherit + # needs: [get-update-version, create-draft-release] + # with: + # ref: ${{ github.ref }} + # public_provider: github + # channel: stable + # new_version: ${{ needs.get-update-version.outputs.new_version }} + # upload_url: ${{ needs.create-draft-release.outputs.upload_url }} + + # build-tauri-linux-x64: + # uses: ./.github/workflows/template-tauri-build-linux-x64.yml + # secrets: inherit + # needs: [get-update-version, create-draft-release] + # with: + # ref: ${{ github.ref }} + # public_provider: github + # channel: stable + # new_version: ${{ needs.get-update-version.outputs.new_version }} + # upload_url: ${{ needs.create-draft-release.outputs.upload_url }} update_release_draft: - needs: [build-macos, build-windows-x64, build-linux-x64] + needs: [ + build-electron-windows-x64, + build-electron-linux-x64, + build-electron-macos, + build-tauri-windows-x64, + build-tauri-linux-x64, + build-tauri-macos + ] permissions: # write permission is required to create a github release contents: write diff --git a/.github/workflows/jan-tauri-build-beta.yml b/.github/workflows/jan-tauri-build-beta.yml new file mode 100644 index 000000000..476293d71 --- /dev/null +++ b/.github/workflows/jan-tauri-build-beta.yml @@ -0,0 +1,156 @@ +name: Tauri Builder - Beta Build + +on: + push: + tags: ["v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+-beta"] + +jobs: + # Job create Update app version based on latest release tag with build number and save to output + get-update-version: + uses: ./.github/workflows/template-get-update-version.yml + create-draft-release: + runs-on: ubuntu-latest + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') + outputs: + upload_url: ${{ steps.create_release.outputs.upload_url }} + version: ${{ steps.get_version.outputs.version }} + permissions: + contents: write + steps: + - name: Extract tag name without v prefix + id: get_version + run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}" + env: + GITHUB_REF: ${{ github.ref }} + - name: Create Draft Release + id: create_release + uses: softprops/action-gh-release@v2 + with: + tag_name: ${{ github.ref_name }} + token: ${{ secrets.GITHUB_TOKEN }} + name: "${{ env.VERSION }}" + draft: true + prerelease: false + generate_release_notes: true + + build-macos: + uses: ./.github/workflows/template-tauri-build-macos.yml + secrets: inherit + needs: [get-update-version, create-draft-release] + with: + ref: ${{ github.ref }} + public_provider: github + new_version: ${{ needs.get-update-version.outputs.new_version }} + channel: beta + cortex_api_port: "39271" + upload_url: ${{ needs.create-draft-release.outputs.upload_url }} + + build-windows-x64: + uses: ./.github/workflows/template-tauri-build-windows-x64.yml + secrets: inherit + needs: [get-update-version, create-draft-release] + with: + ref: ${{ github.ref }} + public_provider: github + new_version: ${{ needs.get-update-version.outputs.new_version }} + channel: beta + cortex_api_port: "39271" + upload_url: ${{ needs.create-draft-release.outputs.upload_url }} + + build-linux-x64: + uses: ./.github/workflows/template-tauri-build-linux-x64.yml + secrets: inherit + needs: [get-update-version, create-draft-release] + with: + ref: ${{ github.ref }} + public_provider: github + new_version: ${{ needs.get-update-version.outputs.new_version }} + channel: beta + cortex_api_port: "39271" + upload_url: ${{ needs.create-draft-release.outputs.upload_url }} + + sync-temp-to-latest: + needs: [create-draft-release, get-update-version, build-macos, build-windows-x64, build-linux-x64] + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Getting the repo + uses: actions/checkout@v3 + + - name: create latest.json file + run: | + + VERSION=${{ needs.get-update-version.outputs.new_version }} + PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ") + LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}" + LINUX_URL="https://delta.jan.ai/beta/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}" + WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}" + WINDOWS_URL="https://delta.jan.ai/beta/${{ needs.build-windows-x64.outputs.FILE_NAME }}" + DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}" + DARWIN_URL="https://delta.jan.ai/beta/Jan-beta_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz" + + jq --arg version "$VERSION" \ + --arg pub_date "$PUB_DATE" \ + --arg linux_signature "$LINUX_SIGNATURE" \ + --arg linux_url "$LINUX_URL" \ + --arg windows_signature "$WINDOWS_SIGNATURE" \ + --arg windows_url "$WINDOWS_URL" \ + --arg darwin_arm_signature "$DARWIN_SIGNATURE" \ + --arg darwin_arm_url "$DARWIN_URL" \ + --arg darwin_amd_signature "$DARWIN_SIGNATURE" \ + --arg darwin_amd_url "$DARWIN_URL" \ + '.version = $version + | .pub_date = $pub_date + | .platforms["linux-x86_64"].signature = $linux_signature + | .platforms["linux-x86_64"].url = $linux_url + | .platforms["windows-x86_64"].signature = $windows_signature + | .platforms["windows-x86_64"].url = $windows_url + | .platforms["darwin-aarch64"].signature = $darwin_arm_signature + | .platforms["darwin-aarch64"].url = $darwin_arm_url + | .platforms["darwin-x86_64"].signature = $darwin_amd_signature + | .platforms["darwin-x86_64"].url = $darwin_amd_url' \ + src-tauri/latest.json.template > latest.json + cat latest.json + - name: Sync temp to latest + run: | + # sync temp-beta to beta by copy files that are different or new + aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/latest.json + aws s3 sync "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/" "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/beta/" + env: + AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }} + AWS_EC2_METADATA_DISABLED: "true" + + - name: Upload release assert if public provider is github + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ needs.create-draft-release.outputs.upload_url }} + asset_path: ./latest.json + asset_name: latest.json + asset_content_type: text/json + + noti-discord-and-update-url-readme: + needs: [build-macos, get-update-version, build-windows-x64, build-linux-x64, sync-temp-to-latest] + runs-on: ubuntu-latest + steps: + - name: Set version to environment variable + run: | + VERSION=${{ needs.get-update-version.outputs.new_version }} + VERSION="${VERSION#v}" + echo "VERSION=$VERSION" >> $GITHUB_ENV + + - name: Notify Discord + uses: Ilshidur/action-discord@master + with: + args: | + Jan-beta App version {{ VERSION }}, has been released, use the following links to download the app with faster speed or visit the Github release page for more information: + - Windows: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_x64-setup.exe + - macOS Universal: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_universal.dmg + - Linux Deb: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_amd64.deb + - Linux AppImage: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_amd64.AppImage + env: + DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_JAN_BETA }} \ No newline at end of file diff --git a/.github/workflows/jan-tauri-build-nightly.yml b/.github/workflows/jan-tauri-build-nightly.yml new file mode 100644 index 000000000..e3769fe59 --- /dev/null +++ b/.github/workflows/jan-tauri-build-nightly.yml @@ -0,0 +1,187 @@ +name: Tauri Builder - Nightly / Manual + +on: + schedule: + - cron: '0 20 * * 1,2,3' # At 8 PM UTC on Monday, Tuesday, and Wednesday which is 3 AM UTC+7 Tuesday, Wednesday, and Thursday + workflow_dispatch: + inputs: + public_provider: + type: choice + description: 'Public Provider' + options: + - none + - aws-s3 + default: none + pull_request_review: + types: [submitted] + +jobs: + set-public-provider: + runs-on: ubuntu-latest + outputs: + public_provider: ${{ steps.set-public-provider.outputs.public_provider }} + ref: ${{ steps.set-public-provider.outputs.ref }} + steps: + - name: Set public provider + id: set-public-provider + run: | + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + echo "::set-output name=public_provider::${{ github.event.inputs.public_provider }}" + echo "::set-output name=ref::${{ github.ref }}" + else + if [ "${{ github.event_name }}" == "schedule" ]; then + echo "::set-output name=public_provider::aws-s3" + echo "::set-output name=ref::refs/heads/dev" + elif [ "${{ github.event_name }}" == "push" ]; then + echo "::set-output name=public_provider::aws-s3" + echo "::set-output name=ref::${{ github.ref }}" + elif [ "${{ github.event_name }}" == "pull_request_review" ]; then + echo "::set-output name=public_provider::none" + echo "::set-output name=ref::${{ github.ref }}" + else + echo "::set-output name=public_provider::none" + echo "::set-output name=ref::${{ github.ref }}" + fi + fi + # Job create Update app version based on latest release tag with build number and save to output + get-update-version: + uses: ./.github/workflows/template-get-update-version.yml + + build-macos: + uses: ./.github/workflows/template-tauri-build-macos.yml + needs: [get-update-version, set-public-provider] + secrets: inherit + with: + ref: ${{ needs.set-public-provider.outputs.ref }} + public_provider: ${{ needs.set-public-provider.outputs.public_provider }} + new_version: ${{ needs.get-update-version.outputs.new_version }} + channel: nightly + cortex_api_port: "39261" + + build-windows-x64: + uses: ./.github/workflows/template-tauri-build-windows-x64.yml + secrets: inherit + needs: [get-update-version, set-public-provider] + with: + ref: ${{ needs.set-public-provider.outputs.ref }} + public_provider: ${{ needs.set-public-provider.outputs.public_provider }} + new_version: ${{ needs.get-update-version.outputs.new_version }} + channel: nightly + cortex_api_port: "39261" + + build-linux-x64: + uses: ./.github/workflows/template-tauri-build-linux-x64.yml + secrets: inherit + needs: [get-update-version, set-public-provider] + with: + ref: ${{ needs.set-public-provider.outputs.ref }} + public_provider: ${{ needs.set-public-provider.outputs.public_provider }} + new_version: ${{ needs.get-update-version.outputs.new_version }} + channel: nightly + cortex_api_port: "39261" + + sync-temp-to-latest: + needs: [get-update-version, set-public-provider, build-windows-x64, build-linux-x64, build-macos] + runs-on: ubuntu-latest + steps: + - name: Getting the repo + uses: actions/checkout@v3 + - name: Install jq + uses: dcarbone/install-jq-action@v2.0.1 + - name: create latest.json file + run: | + + VERSION=${{ needs.get-update-version.outputs.new_version }} + PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ") + LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}" + LINUX_URL="https://delta.jan.ai/nightly/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}" + WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}" + WINDOWS_URL="https://delta.jan.ai/nightly/${{ needs.build-windows-x64.outputs.FILE_NAME }}" + DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}" + DARWIN_URL="https://delta.jan.ai/nightly/Jan-nightly_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz" + + jq --arg version "$VERSION" \ + --arg pub_date "$PUB_DATE" \ + --arg linux_signature "$LINUX_SIGNATURE" \ + --arg linux_url "$LINUX_URL" \ + --arg windows_signature "$WINDOWS_SIGNATURE" \ + --arg windows_url "$WINDOWS_URL" \ + --arg darwin_arm_signature "$DARWIN_SIGNATURE" \ + --arg darwin_arm_url "$DARWIN_URL" \ + --arg darwin_amd_signature "$DARWIN_SIGNATURE" \ + --arg darwin_amd_url "$DARWIN_URL" \ + '.version = $version + | .pub_date = $pub_date + | .platforms["linux-x86_64"].signature = $linux_signature + | .platforms["linux-x86_64"].url = $linux_url + | .platforms["windows-x86_64"].signature = $windows_signature + | .platforms["windows-x86_64"].url = $windows_url + | .platforms["darwin-aarch64"].signature = $darwin_arm_signature + | .platforms["darwin-aarch64"].url = $darwin_arm_url + | .platforms["darwin-x86_64"].signature = $darwin_amd_signature + | .platforms["darwin-x86_64"].url = $darwin_amd_url' \ + src-tauri/latest.json.template > latest.json + cat latest.json + - name: Sync temp to latest + if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }} + run: | + aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/latest.json + aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/ + env: + AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }} + AWS_EC2_METADATA_DISABLED: "true" + + noti-discord-nightly-and-update-url-readme: + needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest] + secrets: inherit + if: github.event_name == 'schedule' + uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml + with: + ref: refs/heads/dev + build_reason: Nightly + push_to_branch: dev + new_version: ${{ needs.get-update-version.outputs.new_version }} + + noti-discord-pre-release-and-update-url-readme: + needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest] + secrets: inherit + if: github.event_name == 'push' + uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml + with: + ref: refs/heads/dev + build_reason: Pre-release + push_to_branch: dev + new_version: ${{ needs.get-update-version.outputs.new_version }} + + noti-discord-manual-and-update-url-readme: + needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest] + secrets: inherit + if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3' + uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml + with: + ref: refs/heads/dev + build_reason: Manual + push_to_branch: dev + new_version: ${{ needs.get-update-version.outputs.new_version }} + + + comment-pr-build-url: + needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, sync-temp-to-latest] + runs-on: ubuntu-latest + if: github.event_name == 'pull_request_review' + steps: + - name: Set up GitHub CLI + run: | + curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz + sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/ + + - name: Comment build URL on PR + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + PR_URL=${{ github.event.pull_request.html_url }} + RUN_ID=${{ github.run_id }} + COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})." + gh pr comment $PR_URL --body "$COMMENT" diff --git a/.github/workflows/nightly-integrate-cortex-cpp.yml b/.github/workflows/nightly-integrate-cortex-cpp.yml deleted file mode 100644 index 066fbd28e..000000000 --- a/.github/workflows/nightly-integrate-cortex-cpp.yml +++ /dev/null @@ -1,127 +0,0 @@ -name: Nightly Update cortex cpp - -on: - schedule: - - cron: '30 19 * * 1-5' # At 01:30 on every day-of-week from Monday through Friday UTC +7 - workflow_dispatch: - -jobs: - update-submodule: - runs-on: ubuntu-latest - permissions: - contents: write - pull-requests: write - actions: write - - outputs: - pr_number: ${{ steps.check-update.outputs.pr_number }} - pr_created: ${{ steps.check-update.outputs.pr_created }} - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - with: - submodules: recursive - ref: dev - fetch-depth: 0 - token: ${{ secrets.PAT_SERVICE_ACCOUNT }} - - - name: Configure Git - run: | - git config --global user.name 'github-actions[bot]' - git config --global user.email 'github-actions[bot]@users.noreply.github.com' - - - name: Update submodule to latest release - id: check-update - env: - GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }} - run: | - curl -s https://api.github.com/repos/menloresearch/cortex/releases > /tmp/github_api_releases.json - latest_prerelease_name=$(cat /tmp/github_api_releases.json | jq -r '.[] | select(.prerelease) | .name' | head -n 1) - - get_asset_count() { - local version_name=$1 - cat /tmp/github_api_releases.json | jq -r --arg version_name "$version_name" '.[] | select(.name == $version_name) | .assets | length' - } - - cortex_cpp_version_file_path="extensions/inference-nitro-extension/bin/version.txt" - current_version_name=$(cat "$cortex_cpp_version_file_path" | head -n 1) - - current_version_asset_count=$(get_asset_count "$current_version_name") - latest_prerelease_asset_count=$(get_asset_count "$latest_prerelease_name") - - if [ "$current_version_name" = "$latest_prerelease_name" ]; then - echo "cortex cpp remote repo doesn't have update today, skip update cortex.cpp for today nightly build" - echo "::set-output name=pr_created::false" - exit 0 - fi - - if [ "$current_version_asset_count" != "$latest_prerelease_asset_count" ]; then - echo "Latest prerelease version has different number of assets, somethink went wrong, skip update cortex.cpp for today nightly build" - echo "::set-output name=pr_created::false" - exit 1 - fi - - echo $latest_prerelease_name > $cortex_cpp_version_file_path - echo "Updated version from $current_version_name to $latest_prerelease_name." - echo "::set-output name=pr_created::true" - - git add -f $cortex_cpp_version_file_path - git commit -m "Update cortex cpp nightly to version $latest_prerelease_name" - branch_name="update-nightly-$(date +'%Y-%m-%d-%H-%M')" - git checkout -b $branch_name - git push origin $branch_name - - pr_title="Update cortex cpp nightly to version $latest_prerelease_name" - pr_body="This PR updates the Update cortex cpp nightly to version $latest_prerelease_name" - - gh pr create --title "$pr_title" --body "$pr_body" --head $branch_name --base dev --reviewer Van-QA - - pr_number=$(gh pr list --head $branch_name --json number --jq '.[0].number') - echo "::set-output name=pr_number::$pr_number" - - check-and-merge-pr: - needs: update-submodule - if: needs.update-submodule.outputs.pr_created == 'true' - runs-on: ubuntu-latest - permissions: - contents: write - pull-requests: write - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - with: - submodules: recursive - fetch-depth: 0 - token: ${{ secrets.PAT_SERVICE_ACCOUNT }} - - - name: Wait for CI to pass - env: - GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }} - run: | - pr_number=${{ needs.update-submodule.outputs.pr_number }} - while true; do - ci_completed=$(gh pr checks $pr_number --json completedAt --jq '.[].completedAt') - if echo "$ci_completed" | grep -q "0001-01-01T00:00:00Z"; then - echo "CI is still running, waiting..." - sleep 60 - else - echo "CI has completed, checking states..." - ci_states=$(gh pr checks $pr_number --json state --jq '.[].state') - if echo "$ci_states" | grep -vqE "SUCCESS|SKIPPED"; then - echo "CI failed, exiting..." - exit 1 - else - echo "CI passed, merging PR..." - break - fi - fi - done - - - name: Merge the PR - env: - GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }} - run: | - pr_number=${{ needs.update-submodule.outputs.pr_number }} - gh pr merge $pr_number --merge --admin diff --git a/.github/workflows/template-build-linux-x64.yml b/.github/workflows/template-electron-build-linux-x64.yml similarity index 96% rename from .github/workflows/template-build-linux-x64.yml rename to .github/workflows/template-electron-build-linux-x64.yml index 58b566931..4dfab5c09 100644 --- a/.github/workflows/template-build-linux-x64.yml +++ b/.github/workflows/template-electron-build-linux-x64.yml @@ -41,6 +41,7 @@ on: jobs: build-linux-x64: + if: inputs.public_provider == 'github' || inputs.public_provider == 'none' runs-on: ubuntu-latest environment: production permissions: @@ -130,7 +131,7 @@ jobs: env: VERSION_TAG: ${{ inputs.new_version }} - - name: Build and publish app to aws s3 r2 or github artifactory + - name: Build and publish app to aws s3 r2 or github artifactory if: inputs.public_provider != 'github' run: | # check public_provider is true or not @@ -176,12 +177,12 @@ jobs: if: inputs.public_provider != 'github' uses: actions/upload-artifact@v4 with: - name: jan-linux-amd64-${{ inputs.new_version }}-deb + name: jan-electron-linux-amd64-${{ inputs.new_version }}-deb path: ./electron/dist/*.deb - name: Upload Artifact .AppImage file if: inputs.public_provider != 'github' uses: actions/upload-artifact@v4 with: - name: jan-linux-amd64-${{ inputs.new_version }}-AppImage + name: jan-electron-linux-amd64-${{ inputs.new_version }}-AppImage path: ./electron/dist/*.AppImage \ No newline at end of file diff --git a/.github/workflows/template-build-macos.yml b/.github/workflows/template-electron-build-macos.yml similarity index 98% rename from .github/workflows/template-build-macos.yml rename to .github/workflows/template-electron-build-macos.yml index a5e5cc724..ab9f002cb 100644 --- a/.github/workflows/template-build-macos.yml +++ b/.github/workflows/template-electron-build-macos.yml @@ -51,6 +51,7 @@ on: jobs: build-macos: + if: inputs.public_provider == 'github' || inputs.public_provider == 'none' runs-on: macos-latest environment: production permissions: @@ -160,7 +161,7 @@ jobs: p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }} p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }} - - name: Build and publish app to aws s3 r2 or github artifactory + - name: Build and publish app to aws s3 r2 or github artifactory if: inputs.public_provider != 'github' run: | # check public_provider is true or not @@ -229,5 +230,5 @@ jobs: if: inputs.public_provider != 'github' uses: actions/upload-artifact@v4 with: - name: jan-mac-universal-${{ inputs.new_version }} + name: jan-electron-mac-universal-${{ inputs.new_version }} path: ./electron/dist/*.dmg \ No newline at end of file diff --git a/.github/workflows/template-build-windows-x64.yml b/.github/workflows/template-electron-build-windows-x64.yml similarity index 98% rename from .github/workflows/template-build-windows-x64.yml rename to .github/workflows/template-electron-build-windows-x64.yml index 9be028e15..9f71dadb0 100644 --- a/.github/workflows/template-build-windows-x64.yml +++ b/.github/workflows/template-electron-build-windows-x64.yml @@ -51,6 +51,7 @@ on: jobs: build-windows-x64: + if: inputs.public_provider == 'github' || inputs.public_provider == 'none' runs-on: windows-latest permissions: contents: write @@ -225,5 +226,5 @@ jobs: if: inputs.public_provider != 'github' uses: actions/upload-artifact@v4 with: - name: jan-win-x64-${{ inputs.new_version }} + name: jan-electron-win-x64-${{ inputs.new_version }} path: ./electron/dist/*.exe \ No newline at end of file diff --git a/.github/workflows/template-get-update-version.yml b/.github/workflows/template-get-update-version.yml index 97340be81..70f5eace9 100644 --- a/.github/workflows/template-get-update-version.yml +++ b/.github/workflows/template-get-update-version.yml @@ -44,9 +44,12 @@ jobs: exit 1 } - if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then + if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then echo "Tag detected, set output follow tag" - echo "::set-output name=new_version::${{ steps.tag.outputs.tag }}" + sanitized_tag="${{ steps.tag.outputs.tag }}" + # Remove the 'v' prefix if it exists + sanitized_tag="${sanitized_tag#v}" + echo "::set-output name=new_version::$sanitized_tag" else # Get the latest release tag from GitHub API LATEST_TAG=$(get_latest_tag) diff --git a/.github/workflows/template-noti-discord-and-update-url-readme.yml b/.github/workflows/template-noti-discord-and-update-url-readme.yml index 282e0aa76..eaaee7e50 100644 --- a/.github/workflows/template-noti-discord-and-update-url-readme.yml +++ b/.github/workflows/template-noti-discord-and-update-url-readme.yml @@ -47,10 +47,10 @@ jobs: with: args: | Jan App ${{ inputs.build_reason }} build artifact version {{ VERSION }}: - - Windows: https://delta.jan.ai/nightly/jan-nightly-win-x64-{{ VERSION }}.exe - - macOS Universal: https://delta.jan.ai/nightly/jan-nightly-mac-universal-{{ VERSION }}.dmg - - Linux Deb: https://delta.jan.ai/nightly/jan-nightly-linux-amd64-{{ VERSION }}.deb - - Linux AppImage: https://delta.jan.ai/nightly/jan-nightly-linux-x86_64-{{ VERSION }}.AppImage + - Windows: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_x64-setup.exe + - macOS Universal: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_universal.dmg + - Linux Deb: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_amd64.deb + - Linux AppImage: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_amd64.AppImage - Github action run: https://github.com/menloresearch/jan/actions/runs/{{ GITHUB_RUN_ID }} env: DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }} diff --git a/.github/workflows/template-tauri-build-linux-x64.yml b/.github/workflows/template-tauri-build-linux-x64.yml new file mode 100644 index 000000000..3baa742ce --- /dev/null +++ b/.github/workflows/template-tauri-build-linux-x64.yml @@ -0,0 +1,296 @@ +name: tauri-build-linux-x64 +on: + workflow_call: + inputs: + ref: + required: true + type: string + default: 'refs/heads/main' + public_provider: + required: true + type: string + default: none + description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3' + new_version: + required: true + type: string + default: '' + cortex_api_port: + required: false + type: string + default: "" + upload_url: + required: false + type: string + default: '' + channel: + required: true + type: string + default: 'nightly' + description: 'The channel to use for this job' + secrets: + DELTA_AWS_S3_BUCKET_NAME: + required: false + DELTA_AWS_ACCESS_KEY_ID: + required: false + DELTA_AWS_SECRET_ACCESS_KEY: + required: false + TAURI_SIGNING_PRIVATE_KEY: + required: false + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: + required: false + TAURI_SIGNING_PUBLIC_KEY: + required: false + outputs: + DEB_SIG: + value: ${{ jobs.build-linux-x64.outputs.DEB_SIG }} + APPIMAGE_SIG: + value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_SIG }} + APPIMAGE_FILE_NAME: + value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }} +jobs: + build-linux-x64: + runs-on: ubuntu-22.04 + outputs: + DEB_SIG: ${{ steps.packageinfo.outputs.DEB_SIG }} + APPIMAGE_SIG: ${{ steps.packageinfo.outputs.APPIMAGE_SIG }} + APPIMAGE_FILE_NAME: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }} + environment: production + permissions: + contents: write + steps: + - name: Getting the repo + uses: actions/checkout@v3 + with: + ref: ${{ inputs.ref }} + + - name: Free Disk Space Before Build + run: | + echo "Disk space before cleanup:" + df -h + sudo rm -rf /usr/local/.ghcup + sudo rm -rf /opt/hostedtoolcache/CodeQL + sudo rm -rf /usr/local/lib/android/sdk/ndk + sudo rm -rf /usr/share/dotnet + sudo rm -rf /opt/ghc + sudo rm -rf /usr/local/share/boost + sudo apt-get clean + echo "Disk space after cleanup:" + df -h + + - name: Replace Icons for Beta Build + if: inputs.channel != 'stable' + shell: bash + run: | + cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png + + - name: Installing node + uses: actions/setup-node@v1 + with: + node-version: 20 + + - name: Install jq + uses: dcarbone/install-jq-action@v2.0.1 + + - name: Install ctoml + run: | + cargo install ctoml + + - name: Install Tauri dependecies + run: | + sudo apt update + sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2 + + - name: Update app version base public_provider + run: | + echo "Version: ${{ inputs.new_version }}" + # Update tauri.conf.json + jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true | .bundle.resources = ["resources/themes/**/*", "resources/pre-install/**/*"] | .bundle.externalBin = ["binaries/cortex-server", "resources/bin/uv"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json + mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + if [ "${{ inputs.channel }}" != "stable" ]; then + jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun", "usr/lib/Jan-${{ inputs.channel }}/binaries/engines": "binaries/engines"}' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json + else + jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun", "usr/lib/Jan/binaries/engines": "binaries/engines"}' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json + fi + mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json + mv /tmp/package.json web/package.json + + ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}" + cat ./src-tauri/Cargo.toml + + # Change app name for beta and nightly builds + if [ "${{ inputs.channel }}" != "stable" ]; then + jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json + mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + + chmod +x .github/scripts/rename-tauri-app.sh + .github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }} + + cat ./src-tauri/tauri.conf.json + + # Update Cargo.toml + ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}" + echo "------------------" + cat ./src-tauri/Cargo.toml + + chmod +x .github/scripts/rename-workspace.sh + .github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }} + cat ./package.json + fi + - name: Build app + run: | + make build-tauri + # Copy engines and bun to appimage + wget https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O ./appimagetool + chmod +x ./appimagetool + if [ "${{ inputs.channel }}" != "stable" ]; then + cp ./src-tauri/resources/bin/bun ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/bin/bun + cp -rf ./src-tauri/binaries/engines ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir/usr/lib/Jan-${{ inputs.channel }}/engines + ./appimagetool ./src-tauri/target/release/bundle/appimage/Jan-${{ inputs.channel }}.AppDir $(ls ./src-tauri/target/release/bundle/appimage/ | grep AppImage) + else + cp ./src-tauri/resources/bin/bun ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/bin/bun + cp -rf ./src-tauri/binaries/engines ./src-tauri/target/release/bundle/appimage/Jan.AppDir/usr/lib/Jan/engines + ./appimagetool ./src-tauri/target/release/bundle/appimage/Jan.AppDir $(ls ./src-tauri/target/release/bundle/appimage/ | grep AppImage) + fi + + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }} + POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }} + # CORTEX_API_PORT: ${{ inputs.cortex_api_port }} + TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }} + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }} + TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }} + + # Publish app + + ## Artifacts, for dev and test + - name: Upload Artifact + if: inputs.public_provider != 'github' + uses: actions/upload-artifact@v4 + with: + name: jan-linux-amd64-${{ inputs.new_version }}-deb + path: ./src-tauri/target/release/bundle/deb/*.deb + + - name: Upload Artifact + if: inputs.public_provider != 'github' + uses: actions/upload-artifact@v4 + with: + name: jan-linux-amd64-${{ inputs.new_version }}-AppImage + path: ./src-tauri/target/release/bundle/appimage/*.AppImage + + ## create zip file and latest-linux.yml for linux electron auto updater + - name: Create zip file and latest-linux.yml for linux electron auto updater + id: packageinfo + run: | + cd ./src-tauri/target/release/bundle + + if [ "${{ inputs.channel }}" != "stable" ]; then + DEB_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb + APPIMAGE_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage + DEB_SIG=$(cat deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig) + APPIMAGE_SIG=$(cat appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig) + else + DEB_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.deb + APPIMAGE_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.AppImage + DEB_SIG=$(cat deb/Jan_${{ inputs.new_version }}_amd64.deb.sig) + APPIMAGE_SIG=$(cat appimage/Jan_${{ inputs.new_version }}_amd64.AppImage.sig) + fi + + DEB_FILE_SIZE=$(stat -c%s deb/$DEB_FILE_NAME) + APPIMAGE_FILE_SIZE=$(stat -c%s appimage/$APPIMAGE_FILE_NAME) + echo "deb file size: $DEB_FILE_SIZE" + echo "appimage file size: $APPIMAGE_FILE_SIZE" + + DEB_SH512_CHECKSUM=$(python3 ../../../../.github/scripts/electron-checksum.py deb/$DEB_FILE_NAME) + APPIMAGE_SH512_CHECKSUM=$(python3 ../../../../.github/scripts/electron-checksum.py appimage/$APPIMAGE_FILE_NAME) + echo "deb sh512 checksum: $DEB_SH512_CHECKSUM" + echo "appimage sh512 checksum: $APPIMAGE_SH512_CHECKSUM" + + CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ") + echo "releaseDate: $CURRENT_TIME" + + # Create latest-linux.yml file + echo "version: ${{ inputs.new_version }}" > latest-linux.yml + echo "files:" >> latest-linux.yml + echo " - url: $DEB_FILE_NAME" >> latest-linux.yml + echo " sha512: $DEB_SH512_CHECKSUM" >> latest-linux.yml + echo " size: $DEB_FILE_SIZE" >> latest-linux.yml + echo " - url: $APPIMAGE_FILE_NAME" >> latest-linux.yml + echo " sha512: $APPIMAGE_SH512_CHECKSUM" >> latest-linux.yml + echo " size: $APPIMAGE_FILE_SIZE" >> latest-linux.yml + echo "path: $APPIMAGE_FILE_NAME" >> latest-linux.yml + echo "sha512: $APPIMAGE_SH512_CHECKSUM" >> latest-linux.yml + echo "releaseDate: $CURRENT_TIME" >> latest-linux.yml + + cat latest-linux.yml + cp latest-linux.yml beta-linux.yml + + echo "DEB_SIG=$DEB_SIG" >> $GITHUB_OUTPUT + echo "APPIMAGE_SIG=$APPIMAGE_SIG" >> $GITHUB_OUTPUT + echo "DEB_FILE_NAME=$DEB_FILE_NAME" >> $GITHUB_OUTPUT + echo "APPIMAGE_FILE_NAME=$APPIMAGE_FILE_NAME" >> $GITHUB_OUTPUT + + ## Upload to s3 for nightly and beta + - name: upload to aws s3 if public provider is aws + if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta' + run: | + cd ./src-tauri/target/release/bundle + + # Upload for electron updater + aws s3 cp ./latest-linux.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest-linux.yml + aws s3 cp ./beta-linux.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta-linux.yml + + # Upload for tauri updater + aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage + aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb + env: + AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }} + AWS_EC2_METADATA_DISABLED: "true" + + ## Upload to github release for stable release + - name: Upload release assert if public provider is github + if: inputs.channel == 'stable' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/release/bundle/latest-linux.yml + asset_name: latest-linux.yml + asset_content_type: text/yaml + + - name: Upload release assert if public provider is github + if: inputs.channel == 'beta' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/release/bundle/beta-linux.yml + asset_name: beta-linux.yml + asset_content_type: text/yaml + - name: Upload release assert if public provider is github + if: inputs.public_provider == 'github' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/release/bundle/appimage/${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }} + asset_name: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }} + asset_content_type: application/octet-stream + + - name: Upload release assert if public provider is github + if: inputs.public_provider == 'github' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/release/bundle/deb/${{ steps.packageinfo.outputs.DEB_FILE_NAME }} + asset_name: ${{ steps.packageinfo.outputs.DEB_FILE_NAME }} + asset_content_type: application/octet-stream diff --git a/.github/workflows/template-tauri-build-macos.yml b/.github/workflows/template-tauri-build-macos.yml new file mode 100644 index 000000000..a68f4487d --- /dev/null +++ b/.github/workflows/template-tauri-build-macos.yml @@ -0,0 +1,310 @@ +name: tauri-build-macos +on: + workflow_call: + inputs: + ref: + required: true + type: string + default: 'refs/heads/main' + public_provider: + required: true + type: string + default: none + description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3' + new_version: + required: true + type: string + default: '' + cortex_api_port: + required: false + type: string + default: "" + upload_url: + required: false + type: string + default: '' + channel: + required: true + type: string + default: 'nightly' + description: 'The channel to use for this job' + secrets: + DELTA_AWS_S3_BUCKET_NAME: + required: false + DELTA_AWS_ACCESS_KEY_ID: + required: false + DELTA_AWS_SECRET_ACCESS_KEY: + required: false + CODE_SIGN_P12_BASE64: + required: false + CODE_SIGN_P12_PASSWORD: + required: false + APPLE_ID: + required: false + APPLE_APP_SPECIFIC_PASSWORD: + required: false + DEVELOPER_ID: + required: false + TAURI_SIGNING_PRIVATE_KEY: + required: false + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: + required: false + TAURI_SIGNING_PUBLIC_KEY: + required: false + outputs: + MAC_UNIVERSAL_SIG: + value: ${{ jobs.build-macos.outputs.MAC_UNIVERSAL_SIG }} + TAR_NAME: + value: ${{ jobs.build-macos.outputs.TAR_NAME }} + +jobs: + build-macos: + runs-on: macos-latest + outputs: + MAC_UNIVERSAL_SIG: ${{ steps.metadata.outputs.MAC_UNIVERSAL_SIG }} + TAR_NAME: ${{ steps.metadata.outputs.TAR_NAME }} + environment: production + permissions: + contents: write + steps: + - name: Getting the repo + uses: actions/checkout@v3 + with: + ref: ${{ inputs.ref }} + - name: Replace Icons for Beta Build + if: inputs.channel != 'stable' + shell: bash + run: | + cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png + + - name: Installing node + uses: actions/setup-node@v1 + with: + node-version: 20 + + - name: Install jq + uses: dcarbone/install-jq-action@v2.0.1 + + - name: Install ctoml + run: | + cargo install ctoml + + - name: Create bun and uv universal + run: | + mkdir -p ./src-tauri/resources/bin/ + cd ./src-tauri/resources/bin/ + curl -L -o bun-darwin-x64.zip https://github.com/oven-sh/bun/releases/download/bun-v1.2.10/bun-darwin-x64.zip + curl -L -o bun-darwin-aarch64.zip https://github.com/oven-sh/bun/releases/download/bun-v1.2.10/bun-darwin-aarch64.zip + unzip bun-darwin-x64.zip + unzip bun-darwin-aarch64.zip + lipo -create -output bun-universal-apple-darwin bun-darwin-x64/bun bun-darwin-aarch64/bun + cp -f bun-darwin-aarch64/bun bun-aarch64-apple-darwin + cp -f bun-darwin-x64/bun bun-x86_64-apple-darwin + cp -f bun-universal-apple-darwin bun + + curl -L -o uv-x86_64.tar.gz https://github.com/astral-sh/uv/releases/download/0.6.17/uv-x86_64-apple-darwin.tar.gz + curl -L -o uv-arm64.tar.gz https://github.com/astral-sh/uv/releases/download/0.6.17/uv-aarch64-apple-darwin.tar.gz + tar -xzf uv-x86_64.tar.gz + tar -xzf uv-arm64.tar.gz + mv uv-x86_64-apple-darwin uv-x86_64 + mv uv-aarch64-apple-darwin uv-aarch64 + lipo -create -output uv-universal-apple-darwin uv-x86_64/uv uv-aarch64/uv + cp -f uv-x86_64/uv uv-x86_64-apple-darwin + cp -f uv-aarch64/uv uv-aarch64-apple-darwin + cp -f uv-universal-apple-darwin uv + ls -la + + - name: Update app version based on latest release tag with build number + run: | + echo "Version: ${{ inputs.new_version }}" + # Update tauri.conf.json + jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json + mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json + mv /tmp/package.json web/package.json + + ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}" + cat ./src-tauri/Cargo.toml + + # Change app name for beta and nightly builds + if [ "${{ inputs.channel }}" != "stable" ]; then + jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json + mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + + chmod +x .github/scripts/rename-tauri-app.sh + .github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }} + + cat ./src-tauri/tauri.conf.json + + # Update Cargo.toml + ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}" + echo "------------------" + cat ./src-tauri/Cargo.toml + + chmod +x .github/scripts/rename-workspace.sh + .github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }} + cat ./package.json + fi + - name: Get key for notarize + run: base64 -d <<< "$NOTARIZE_P8_BASE64" > /tmp/notary-key.p8 + shell: bash + env: + NOTARIZE_P8_BASE64: ${{ secrets.NOTARIZE_P8_BASE64 }} + + - uses: apple-actions/import-codesign-certs@v2 + continue-on-error: true + with: + p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }} + p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }} + + - name: Build app + run: | + rustup target add x86_64-apple-darwin + make build-tauri + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + APP_PATH: '.' + POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }} + POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }} + # CORTEX_API_PORT: ${{ inputs.cortex_api_port }} + APPLE_CERTIFICATE: ${{ secrets.CODE_SIGN_P12_BASE64 }} + APPLE_CERTIFICATE_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }} + APPLE_API_ISSUER: ${{ secrets.NOTARY_ISSUER }} + APPLE_API_KEY: ${{ secrets.NOTARY_KEY_ID }} + APPLE_API_KEY_PATH: /tmp/notary-key.p8 + TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }} + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }} + TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }} + + # Publish app + + ## Artifacts, for dev and test + - name: Upload Artifact + if: inputs.public_provider != 'github' + uses: actions/upload-artifact@v4 + with: + name: jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.dmg + path: | + ./src-tauri/target/universal-apple-darwin/release/bundle/dmg/*.dmg + + + ## create zip file and latest-mac.yml for mac electron auto updater + - name: create zip file and latest-mac.yml for mac electron auto updater + run: | + cd ./src-tauri/target/universal-apple-darwin/release/bundle/macos + if [ "${{ inputs.channel }}" != "stable" ]; then + zip -r jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip Jan-${{ inputs.channel }}.app + FILE_NAME=jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip + DMG_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg + MAC_UNIVERSAL_SIG=$(cat Jan-${{ inputs.channel }}.app.tar.gz.sig) + TAR_NAME=Jan-${{ inputs.channel }}.app.tar.gz + else + zip -r jan-mac-universal-${{ inputs.new_version }}.zip Jan.app + FILE_NAME=jan-mac-universal-${{ inputs.new_version }}.zip + MAC_UNIVERSAL_SIG=$(cat Jan.app.tar.gz.sig) + DMG_NAME=Jan_${{ inputs.new_version }}_universal.dmg + TAR_NAME=Jan.app.tar.gz + fi + + FILE_SIZE=$(stat -f%z $FILE_NAME) + echo "size: $FILE_SIZE" + + SH512_CHECKSUM=$(python3 ../../../../../../.github/scripts/electron-checksum.py $FILE_NAME) + echo "sha512: $SH512_CHECKSUM" + CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ") + echo "releaseDate: $CURRENT_TIME" + + # Create latest-mac.yml file + echo "version: ${{ inputs.new_version }}" > latest-mac.yml + echo "files:" >> latest-mac.yml + echo " - url: $FILE_NAME" >> latest-mac.yml + echo " sha512: $SH512_CHECKSUM" >> latest-mac.yml + echo " size: $FILE_NAME" >> latest-mac.yml + echo "path: $FILE_NAME" >> latest-mac.yml + echo "sha512: $SH512_CHECKSUM" >> latest-mac.yml + echo "releaseDate: $CURRENT_TIME" >> latest-mac.yml + + cat latest-mac.yml + cp latest-mac.yml beta-mac.yml + + echo "::set-output name=MAC_UNIVERSAL_SIG::$MAC_UNIVERSAL_SIG" + echo "::set-output name=FILE_NAME::$FILE_NAME" + echo "::set-output name=DMG_NAME::$DMG_NAME" + echo "::set-output name=TAR_NAME::$TAR_NAME" + id: metadata + + ## Upload to s3 for nightly and beta + - name: upload to aws s3 if public provider is aws + if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta' + run: | + cd ./src-tauri/target/universal-apple-darwin/release/bundle + + # Upload for electron updater + aws s3 cp ./macos/latest-mac.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest-mac.yml + aws s3 cp ./macos/beta-mac.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta-mac.yml + aws s3 cp ./macos/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip + + # Upload for tauri updater + aws s3 cp ./dmg/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg + aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}//Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz + env: + AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }} + AWS_EC2_METADATA_DISABLED: "true" + + ## Upload to github release for stable release + - name: Upload release assert if public provider is github + if: inputs.channel == 'stable' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/latest-mac.yml + asset_name: latest-mac.yml + asset_content_type: text/yaml + + - name: Upload release assert if public provider is github + if: inputs.channel == 'beta' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/beta-mac.yml + asset_name: beta-mac.yml + asset_content_type: text/yaml + + - name: Upload release assert if public provider is github + if: inputs.public_provider == 'github' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.FILE_NAME }} + asset_name: ${{ steps.metadata.outputs.FILE_NAME }} + asset_content_type: application/gzip + + - name: Upload release assert if public provider is github + if: inputs.public_provider == 'github' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/dmg/${{ steps.metadata.outputs.DMG_NAME }} + asset_name: ${{ steps.metadata.outputs.DMG_NAME }} + asset_content_type: application/octet-stream + + - name: Upload release assert if public provider is github + if: inputs.public_provider == 'github' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.TAR_NAME }} + asset_name: ${{ steps.metadata.outputs.TAR_NAME }} + asset_content_type: application/gzip \ No newline at end of file diff --git a/.github/workflows/template-tauri-build-windows-x64.yml b/.github/workflows/template-tauri-build-windows-x64.yml new file mode 100644 index 000000000..103c3141c --- /dev/null +++ b/.github/workflows/template-tauri-build-windows-x64.yml @@ -0,0 +1,288 @@ +name: tauri-build-windows-x64 +on: + workflow_call: + inputs: + ref: + required: true + type: string + default: "refs/heads/main" + public_provider: + required: true + type: string + default: none + description: "none: build only, github: build and publish to github, aws s3: build and publish to aws s3" + new_version: + required: true + type: string + default: '' + cortex_api_port: + required: false + type: string + default: "" + upload_url: + required: false + type: string + default: '' + channel: + required: true + type: string + default: 'nightly' + description: 'The channel to use for this job' + secrets: + DELTA_AWS_S3_BUCKET_NAME: + required: false + DELTA_AWS_ACCESS_KEY_ID: + required: false + DELTA_AWS_SECRET_ACCESS_KEY: + required: false + AZURE_KEY_VAULT_URI: + required: false + AZURE_CLIENT_ID: + required: false + AZURE_TENANT_ID: + required: false + AZURE_CLIENT_SECRET: + required: false + AZURE_CERT_NAME: + required: false + TAURI_SIGNING_PRIVATE_KEY: + required: false + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: + required: false + TAURI_SIGNING_PUBLIC_KEY: + required: false + outputs: + WIN_SIG: + value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }} + FILE_NAME: + value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }} + +jobs: + build-windows-x64: + runs-on: windows-latest + outputs: + WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }} + FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }} + permissions: + contents: write + steps: + - name: Getting the repo + uses: actions/checkout@v3 + with: + ref: ${{ inputs.ref }} + + - name: Replace Icons for Beta Build + if: inputs.channel != 'stable' + shell: bash + run: | + cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png + + - name: Installing node + uses: actions/setup-node@v1 + with: + node-version: 20 + + - name: Install jq + uses: dcarbone/install-jq-action@v2.0.1 + + - name: Install ctoml + run: | + cargo install ctoml + + - name: Update app version base on tag + id: version_update + shell: bash + run: | + echo "Version: ${{ inputs.new_version }}" + # Update tauri.conf.json + jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true | .bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json + mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json + mv /tmp/package.json web/package.json + + ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}" + echo "---------Cargo.toml---------" + cat ./src-tauri/Cargo.toml + + generate_build_version() { + ### Examble + ### input 0.5.6 output will be 0.5.6 and 0.5.6.0 + ### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2 + ### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213 + local new_version="$1" + local base_version + local t_value + + # Check if it has a "-" + if [[ "$new_version" == *-* ]]; then + base_version="${new_version%%-*}" # part before - + suffix="${new_version#*-}" # part after - + + # Check if it is rcX-beta + if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then + t_value="${BASH_REMATCH[1]}" + else + t_value="$suffix" + fi + else + base_version="$new_version" + t_value="0" + fi + + # Export two values + new_base_version="$base_version" + new_build_version="${base_version}.${t_value}" + } + generate_build_version ${{ inputs.new_version }} + sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template + + # Change app name for beta and nightly builds + if [ "${{ inputs.channel }}" != "stable" ]; then + jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json + mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json + + chmod +x .github/scripts/rename-tauri-app.sh + .github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }} + + echo "---------tauri.conf.json---------" + cat ./src-tauri/tauri.conf.json + + # Update Cargo.toml + ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}" + echo "------------------" + cat ./src-tauri/Cargo.toml + + chmod +x .github/scripts/rename-workspace.sh + .github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }} + cat ./package.json + + sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template + sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template + fi + echo "---------nsis.template---------" + cat ./src-tauri/tauri.bundle.windows.nsis.template + + - name: Install AzureSignTool + run: | + dotnet tool install --global --version 6.0.0 AzureSignTool + + - name: Build app + shell: bash + run: | + make build-tauri + env: + AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }} + AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} + AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} + AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }} + AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }} + AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: auto + AWS_EC2_METADATA_DISABLED: "true" + AWS_MAX_ATTEMPTS: "5" + POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }} + POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }} + # CORTEX_API_PORT: ${{ inputs.cortex_api_port }} + TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }} + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }} + TAURI_SIGNING_PUBLIC_KEY: ${{ secrets.TAURI_SIGNING_PUBLIC_KEY }} + + - name: Upload Artifact + uses: actions/upload-artifact@v4 + with: + name: jan-windows-${{ inputs.new_version }} + path: | + ./src-tauri/target/release/bundle/nsis/*.exe + + ## create zip file and latest.yml for windows electron auto updater + - name: create zip file and latest.yml for windows electron auto updater + shell: bash + run: | + cd ./src-tauri/target/release/bundle/nsis + if [ "${{ inputs.channel }}" != "stable" ]; then + FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe + WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig) + else + FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe + WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig) + fi + + FILE_SIZE=$(stat -c %s $FILE_NAME) + echo "size: $FILE_SIZE" + + SH512_CHECKSUM=$(python3 ../../../../../.github/scripts/electron-checksum.py $FILE_NAME) + echo "sha512: $SH512_CHECKSUM" + CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ") + echo "releaseDate: $CURRENT_TIME" + + # Create latest.yml file + echo "version: ${{ inputs.new_version }}" > latest.yml + echo "files:" >> latest.yml + echo " - url: $FILE_NAME" >> latest.yml + echo " sha512: $SH512_CHECKSUM" >> latest.yml + echo " size: $FILE_NAME" >> latest.yml + echo "path: $FILE_NAME" >> latest.yml + echo "sha512: $SH512_CHECKSUM" >> latest.yml + echo "releaseDate: $CURRENT_TIME" >> latest.yml + + cat latest.yml + cp latest.yml beta.yml + + echo "::set-output name=WIN_SIG::$WIN_SIG" + echo "::set-output name=FILE_NAME::$FILE_NAME" + id: metadata + + ## Upload to s3 for nightly and beta + - name: upload to aws s3 if public provider is aws + shell: bash + if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta' + run: | + cd ./src-tauri/target/release/bundle/nsis + + # Upload for electron updater + aws s3 cp ./latest.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/latest.yml + aws s3 cp ./beta.yml s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/beta.yml + + # Upload for tauri updater + aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }} + env: + AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }} + AWS_EC2_METADATA_DISABLED: "true" + + ## Upload to github release for stable release + - name: Upload release assert if public provider is github + if: inputs.channel == 'stable' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/release/bundle/nsis/latest.yml + asset_name: latest.yml + asset_content_type: text/yaml + + - name: Upload release assert if public provider is github + if: inputs.channel == 'beta' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/release/bundle/nsis/beta.yml + asset_name: beta.yml + asset_content_type: text/yaml + + - name: Upload release assert if public provider is github + if: inputs.public_provider == 'github' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }} + asset_name: ${{ steps.metadata.outputs.FILE_NAME }} + asset_content_type: application/octet-stream \ No newline at end of file diff --git a/.gitignore b/.gitignore index bbf7c059f..d2f46cc8f 100644 --- a/.gitignore +++ b/.gitignore @@ -23,19 +23,6 @@ coverage *.log core/lib/** -# Nitro binary files -extensions/*-extension/bin/*/nitro -extensions/*-extension/bin/*/*.metal -extensions/*-extension/bin/*/*.exe -extensions/*-extension/bin/*/*.dll -extensions/*-extension/bin/*/*.exp -extensions/*-extension/bin/*/*.lib -extensions/*-extension/bin/saved-* -extensions/*-extension/bin/*.tar.gz -extensions/*-extension/bin/vulkaninfoSDK.exe -extensions/*-extension/bin/vulkaninfo - - # Turborepo .turbo electron/test-data @@ -51,3 +38,10 @@ electron/shared/** # docs docs/yarn.lock electron/.version.bak +src-tauri/binaries/engines/cortex.llamacpp +src-tauri/resources/themes +src-tauri/Cargo.lock +src-tauri/icons +!src-tauri/icons/icon.png +src-tauri/gen/apple +src-tauri/resources/bin diff --git a/Makefile b/Makefile index 0c2a2ef8a..2803641fa 100644 --- a/Makefile +++ b/Makefile @@ -39,6 +39,11 @@ endif dev: check-file-counts yarn dev +dev-tauri: check-file-counts + yarn install:cortex + yarn download:bin + yarn dev:tauri + # Linting lint: check-file-counts yarn lint @@ -115,6 +120,9 @@ build-and-publish: check-file-counts build: check-file-counts yarn build +build-tauri: check-file-counts + yarn build-tauri + clean: ifeq ($(OS),Windows_NT) -powershell -Command "Get-ChildItem -Path . -Include node_modules, .next, dist, build, out, .turbo, .yarn -Recurse -Directory | Remove-Item -Recurse -Force" diff --git a/core/src/browser/core.test.ts b/core/src/browser/core.test.ts index 117298eb6..6197da023 100644 --- a/core/src/browser/core.test.ts +++ b/core/src/browser/core.test.ts @@ -1,3 +1,6 @@ +/** + * @jest-environment jsdom + */ import { openExternalUrl } from './core' import { joinPath } from './core' import { openFileExplorer } from './core' @@ -25,7 +28,7 @@ describe('test core apis', () => { }, } const result = await joinPath(paths) - expect(globalThis.core.api.joinPath).toHaveBeenCalledWith(paths) + expect(globalThis.core.api.joinPath).toHaveBeenCalledWith({ args: paths }) expect(result).toBe('/path/one/path/two') }) @@ -37,7 +40,7 @@ describe('test core apis', () => { }, } const result = await openFileExplorer(path) - expect(globalThis.core.api.openFileExplorer).toHaveBeenCalledWith(path) + expect(globalThis.core.api.openFileExplorer).toHaveBeenCalledWith({ path }) expect(result).toBe('opened') }) @@ -51,20 +54,6 @@ describe('test core apis', () => { expect(globalThis.core.api.getJanDataFolderPath).toHaveBeenCalled() expect(result).toBe('/path/to/jan/data') }) - - it('should execute function on main process', async () => { - const extension = 'testExtension' - const method = 'testMethod' - const args = ['arg1', 'arg2'] - globalThis.core = { - api: { - invokeExtensionFunc: jest.fn().mockResolvedValue('result'), - }, - } - const result = await executeOnMain(extension, method, ...args) - expect(globalThis.core.api.invokeExtensionFunc).toHaveBeenCalledWith(extension, method, ...args) - expect(result).toBe('result') - }) }) describe('dirName - just a pass thru api', () => { diff --git a/core/src/browser/core.ts b/core/src/browser/core.ts index 43b5f9d48..3025ba963 100644 --- a/core/src/browser/core.ts +++ b/core/src/browser/core.ts @@ -13,8 +13,11 @@ const executeOnMain: (extension: string, method: string, ...args: any[]) => Prom extension, method, ...args -) => globalThis.core?.api?.invokeExtensionFunc(extension, method, ...args) - +) => { + if ('electronAPI' in window && window.electronAPI) + return globalThis.core?.api?.invokeExtensionFunc(extension, method, ...args) + return () => {} +} /** * Gets Jan's data folder path. @@ -29,15 +32,15 @@ const getJanDataFolderPath = (): Promise => globalThis.core.api?.getJanD * @returns {Promise} A promise that resolves when the file explorer is opened. */ const openFileExplorer: (path: string) => Promise = (path) => - globalThis.core.api?.openFileExplorer(path) + globalThis.core.api?.openFileExplorer({ path }) /** * Joins multiple paths together. * @param paths - The paths to join. * @returns {Promise} A promise that resolves with the joined path. */ -const joinPath: (paths: string[]) => Promise = (paths) => - globalThis.core.api?.joinPath(paths) +const joinPath: (args: string[]) => Promise = (args) => + globalThis.core.api?.joinPath({ args }) /** * Get dirname of a file path. diff --git a/core/src/browser/extension.test.ts b/core/src/browser/extension.test.ts index 879258876..b2a1d1e73 100644 --- a/core/src/browser/extension.test.ts +++ b/core/src/browser/extension.test.ts @@ -1,7 +1,5 @@ import { BaseExtension } from './extension' import { SettingComponentProps } from '../types' -import { getJanDataFolderPath, joinPath } from './core' -import { fs } from './fs' jest.mock('./core') jest.mock('./fs') @@ -90,18 +88,32 @@ describe('BaseExtension', () => { { key: 'setting2', controllerProps: { value: 'value2' } } as any, ] - ;(getJanDataFolderPath as jest.Mock).mockResolvedValue('/data') - ;(joinPath as jest.Mock).mockResolvedValue('/data/settings/TestExtension') - ;(fs.existsSync as jest.Mock).mockResolvedValue(false) - ;(fs.mkdir as jest.Mock).mockResolvedValue(undefined) - ;(fs.writeFileSync as jest.Mock).mockResolvedValue(undefined) + const localStorageMock = (() => { + let store: Record = {} + return { + getItem: (key: string) => store[key] || null, + setItem: (key: string, value: string) => { + store[key] = value + }, + removeItem: (key: string) => { + delete store[key] + }, + clear: () => { + store = {} + }, + } + })() + + Object.defineProperty(global, 'localStorage', { + value: localStorageMock, + }) + const mock = jest.spyOn(localStorage, 'setItem') await baseExtension.registerSettings(settings) - expect(fs.mkdir).toHaveBeenCalledWith('/data/settings/TestExtension') - expect(fs.writeFileSync).toHaveBeenCalledWith( - '/data/settings/TestExtension', - JSON.stringify(settings, null, 2) + expect(mock).toHaveBeenCalledWith( + 'TestExtension', + JSON.stringify(settings) ) }) @@ -125,17 +137,15 @@ describe('BaseExtension', () => { ] jest.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings) - ;(getJanDataFolderPath as jest.Mock).mockResolvedValue('/data') - ;(joinPath as jest.Mock).mockResolvedValue('/data/settings/TestExtension/settings.json') - ;(fs.writeFileSync as jest.Mock).mockResolvedValue(undefined) + const mockSetItem = jest.spyOn(localStorage, 'setItem') await baseExtension.updateSettings([ { key: 'setting1', controllerProps: { value: 'newValue' } } as any, ]) - expect(fs.writeFileSync).toHaveBeenCalledWith( - '/data/settings/TestExtension/settings.json', - JSON.stringify([{ key: 'setting1', controllerProps: { value: 'newValue' } }], null, 2) + expect(mockSetItem).toHaveBeenCalledWith( + 'TestExtension', + JSON.stringify([{ key: 'setting1', controllerProps: { value: 'newValue' } }]) ) }) }) diff --git a/core/src/browser/extension.ts b/core/src/browser/extension.ts index a050b9d59..fa6d06f3c 100644 --- a/core/src/browser/extension.ts +++ b/core/src/browser/extension.ts @@ -1,7 +1,4 @@ -import { Model, ModelEvent, SettingComponentProps } from '../types' -import { getJanDataFolderPath, joinPath } from './core' -import { events } from './events' -import { fs } from './fs' +import { Model, SettingComponentProps } from '../types' import { ModelManager } from './models' export enum ExtensionTypeEnum { @@ -117,22 +114,13 @@ export abstract class BaseExtension implements ExtensionType { return } - const extensionSettingFolderPath = await joinPath([ - await getJanDataFolderPath(), - 'settings', - this.name, - ]) settings.forEach((setting) => { setting.extensionName = this.name }) try { - if (!(await fs.existsSync(extensionSettingFolderPath))) - await fs.mkdir(extensionSettingFolderPath) - const settingFilePath = await joinPath([extensionSettingFolderPath, this.settingFileName]) - + const oldSettings = localStorage.getItem(this.name) // Persists new settings - if (await fs.existsSync(settingFilePath)) { - const oldSettings = JSON.parse(await fs.readFileSync(settingFilePath, 'utf-8')) + if (oldSettings) { settings.forEach((setting) => { // Keep setting value if (setting.controllerProps && Array.isArray(oldSettings)) @@ -141,7 +129,7 @@ export abstract class BaseExtension implements ExtensionType { )?.controllerProps?.value }) } - await fs.writeFileSync(settingFilePath, JSON.stringify(settings, null, 2)) + localStorage.setItem(this.name, JSON.stringify(settings)) } catch (err) { console.error(err) } @@ -180,21 +168,14 @@ export abstract class BaseExtension implements ExtensionType { async getSettings(): Promise { if (!this.name) return [] - const settingPath = await joinPath([ - await getJanDataFolderPath(), - this.settingFolderName, - this.name, - this.settingFileName, - ]) - try { - if (!(await fs.existsSync(settingPath))) return [] - const content = await fs.readFileSync(settingPath, 'utf-8') - const settings: SettingComponentProps[] = JSON.parse(content) - return settings + const settingsString = localStorage.getItem(this.name); + if (!settingsString) return []; + const settings: SettingComponentProps[] = JSON.parse(settingsString); + return settings; } catch (err) { - console.warn(err) - return [] + console.warn(err); + return []; } } @@ -220,20 +201,8 @@ export abstract class BaseExtension implements ExtensionType { if (!updatedSettings.length) updatedSettings = componentProps as SettingComponentProps[] - const settingFolder = await joinPath([ - await getJanDataFolderPath(), - this.settingFolderName, - this.name, - ]) - - if (!(await fs.existsSync(settingFolder))) { - await fs.mkdir(settingFolder) - } - - const settingPath = await joinPath([settingFolder, this.settingFileName]) - - await fs.writeFileSync(settingPath, JSON.stringify(updatedSettings, null, 2)) - + localStorage.setItem(this.name, JSON.stringify(updatedSettings)); + updatedSettings.forEach((setting) => { this.onSettingUpdate( setting.key, diff --git a/core/src/browser/extensions/engines/AIEngine.ts b/core/src/browser/extensions/engines/AIEngine.ts index 2d1bdb3c2..6616208ad 100644 --- a/core/src/browser/extensions/engines/AIEngine.ts +++ b/core/src/browser/extensions/engines/AIEngine.ts @@ -40,12 +40,13 @@ export abstract class AIEngine extends BaseExtension { * Stops the model. */ async unloadModel(model?: Model): Promise { - if (model?.engine && model.engine.toString() !== this.provider) return Promise.resolve() + if (model?.engine && model.engine.toString() !== this.provider) + return Promise.resolve() events.emit(ModelEvent.OnModelStopped, model ?? {}) return Promise.resolve() } - /* + /** * Inference request */ inference(data: MessageRequest) {} diff --git a/core/src/browser/extensions/engines/EngineManager.ts b/core/src/browser/extensions/engines/EngineManager.ts index 90ce75ac5..7bf7a9924 100644 --- a/core/src/browser/extensions/engines/EngineManager.ts +++ b/core/src/browser/extensions/engines/EngineManager.ts @@ -6,6 +6,7 @@ import { AIEngine } from './AIEngine' */ export class EngineManager { public engines = new Map() + public controller: AbortController | null = null /** * Registers an engine. diff --git a/core/src/browser/extensions/engines/OAIEngine.test.ts b/core/src/browser/extensions/engines/OAIEngine.test.ts index 66537d0be..0e985fd1b 100644 --- a/core/src/browser/extensions/engines/OAIEngine.test.ts +++ b/core/src/browser/extensions/engines/OAIEngine.test.ts @@ -12,11 +12,7 @@ import { ChatCompletionRole, ContentType, } from '../../../types' -import { requestInference } from './helpers/sse' -import { ulid } from 'ulidx' -jest.mock('./helpers/sse') -jest.mock('ulidx') jest.mock('../../events') class TestOAIEngine extends OAIEngine { @@ -48,79 +44,6 @@ describe('OAIEngine', () => { ) }) - it('should handle inference request', async () => { - const data: MessageRequest = { - model: { engine: 'test-provider', id: 'test-model' } as any, - threadId: 'test-thread', - type: MessageRequestType.Thread, - assistantId: 'test-assistant', - messages: [{ role: ChatCompletionRole.User, content: 'Hello' }], - } - - ;(ulid as jest.Mock).mockReturnValue('test-id') - ;(requestInference as jest.Mock).mockReturnValue({ - subscribe: ({ next, complete }: any) => { - next('test response') - complete() - }, - }) - - await engine.inference(data) - - expect(requestInference).toHaveBeenCalledWith( - 'http://test-inference-url', - expect.objectContaining({ model: 'test-model' }), - expect.any(Object), - expect.any(AbortController), - { Authorization: 'Bearer test-token' }, - undefined - ) - - expect(events.emit).toHaveBeenCalledWith( - MessageEvent.OnMessageResponse, - expect.objectContaining({ id: 'test-id' }) - ) - expect(events.emit).toHaveBeenCalledWith( - MessageEvent.OnMessageUpdate, - expect.objectContaining({ - content: [ - { - type: ContentType.Text, - text: { value: 'test response', annotations: [] }, - }, - ], - status: MessageStatus.Ready, - }) - ) - }) - - it('should handle inference error', async () => { - const data: MessageRequest = { - model: { engine: 'test-provider', id: 'test-model' } as any, - threadId: 'test-thread', - type: MessageRequestType.Thread, - assistantId: 'test-assistant', - messages: [{ role: ChatCompletionRole.User, content: 'Hello' }], - } - - ;(ulid as jest.Mock).mockReturnValue('test-id') - ;(requestInference as jest.Mock).mockReturnValue({ - subscribe: ({ error }: any) => { - error({ message: 'test error', code: 500 }) - }, - }) - - await engine.inference(data) - - expect(events.emit).toHaveBeenLastCalledWith( - MessageEvent.OnMessageUpdate, - expect.objectContaining({ - status: 'error', - error_code: 500, - }) - ) - }) - it('should stop inference', () => { engine.stopInference() expect(engine.isCancelled).toBe(true) diff --git a/core/src/browser/extensions/engines/OAIEngine.ts b/core/src/browser/extensions/engines/OAIEngine.ts index 61032357c..3502aa1f7 100644 --- a/core/src/browser/extensions/engines/OAIEngine.ts +++ b/core/src/browser/extensions/engines/OAIEngine.ts @@ -1,18 +1,9 @@ -import { requestInference } from './helpers/sse' -import { ulid } from 'ulidx' import { AIEngine } from './AIEngine' import { - ChatCompletionRole, - ContentType, InferenceEvent, MessageEvent, MessageRequest, - MessageRequestType, - MessageStatus, Model, - ModelInfo, - ThreadContent, - ThreadMessage, } from '../../../types' import { events } from '../../events' @@ -53,111 +44,6 @@ export abstract class OAIEngine extends AIEngine { */ override onUnload(): void {} - /* - * Inference request - */ - override async inference(data: MessageRequest) { - if (!data.model?.id) { - events.emit(MessageEvent.OnMessageResponse, { - status: MessageStatus.Error, - content: [ - { - type: ContentType.Text, - text: { - value: 'No model ID provided', - annotations: [], - }, - }, - ], - }) - return - } - - const timestamp = Date.now() / 1000 - const message: ThreadMessage = { - id: ulid(), - thread_id: data.threadId, - type: data.type, - assistant_id: data.assistantId, - role: ChatCompletionRole.Assistant, - content: [], - status: MessageStatus.Pending, - created_at: timestamp, - completed_at: timestamp, - object: 'thread.message', - } - - if (data.type !== MessageRequestType.Summary) { - events.emit(MessageEvent.OnMessageResponse, message) - } - - this.isCancelled = false - this.controller = new AbortController() - - const model: ModelInfo = { - ...(this.loadedModel ? this.loadedModel : {}), - ...data.model, - } - - const header = await this.headers() - let requestBody = { - messages: data.messages ?? [], - model: model.id, - stream: true, - ...model.parameters, - } - if (this.transformPayload) { - requestBody = this.transformPayload(requestBody) - } - - requestInference( - this.inferenceUrl, - requestBody, - model, - this.controller, - header, - this.transformResponse - ).subscribe({ - next: (content: any) => { - const messageContent: ThreadContent = { - type: ContentType.Text, - text: { - value: content.trim(), - annotations: [], - }, - } - message.content = [messageContent] - events.emit(MessageEvent.OnMessageUpdate, message) - }, - complete: async () => { - message.status = message.content.length - ? MessageStatus.Ready - : MessageStatus.Error - events.emit(MessageEvent.OnMessageUpdate, message) - }, - error: async (err: any) => { - if (this.isCancelled || message.content.length) { - message.status = MessageStatus.Stopped - events.emit(MessageEvent.OnMessageUpdate, message) - return - } - message.status = MessageStatus.Error - message.content[0] = { - type: ContentType.Text, - text: { - value: - typeof message === 'string' - ? err.message - : (JSON.stringify(err.message) ?? err.detail), - annotations: [], - }, - } - message.error_code = err.code - events.emit(MessageEvent.OnMessageUpdate, message) - }, - }) - } - /** * Stops the inference. */ diff --git a/core/src/browser/extensions/engines/helpers/sse.test.ts b/core/src/browser/extensions/engines/helpers/sse.test.ts deleted file mode 100644 index f8c2ac6b4..000000000 --- a/core/src/browser/extensions/engines/helpers/sse.test.ts +++ /dev/null @@ -1,146 +0,0 @@ -import { lastValueFrom, Observable } from 'rxjs' -import { requestInference } from './sse' - -import { ReadableStream } from 'stream/web' -describe('requestInference', () => { - it('should send a request to the inference server and return an Observable', () => { - // Mock the fetch function - const mockFetch: any = jest.fn(() => - Promise.resolve({ - ok: true, - json: () => - Promise.resolve({ - choices: [{ message: { content: 'Generated response' } }], - }), - headers: new Headers(), - redirected: false, - status: 200, - statusText: 'OK', - // Add other required properties here - }) - ) - jest.spyOn(global, 'fetch').mockImplementation(mockFetch) - - // Define the test inputs - const inferenceUrl = 'https://inference-server.com' - const requestBody = { message: 'Hello' } - const model = { id: 'model-id', parameters: { stream: false } } - - // Call the function - const result = requestInference(inferenceUrl, requestBody, model) - - // Assert the expected behavior - expect(result).toBeInstanceOf(Observable) - expect(lastValueFrom(result)).resolves.toEqual('Generated response') - }) - - it('returns 401 error', () => { - // Mock the fetch function - const mockFetch: any = jest.fn(() => - Promise.resolve({ - ok: false, - json: () => - Promise.resolve({ - error: { message: 'Invalid API Key.', code: 'invalid_api_key' }, - }), - headers: new Headers(), - redirected: false, - status: 401, - statusText: 'invalid_api_key', - // Add other required properties here - }) - ) - jest.spyOn(global, 'fetch').mockImplementation(mockFetch) - - // Define the test inputs - const inferenceUrl = 'https://inference-server.com' - const requestBody = { message: 'Hello' } - const model = { id: 'model-id', parameters: { stream: false } } - - // Call the function - const result = requestInference(inferenceUrl, requestBody, model) - - // Assert the expected behavior - expect(result).toBeInstanceOf(Observable) - expect(lastValueFrom(result)).rejects.toEqual({ - message: 'Invalid API Key.', - code: 'invalid_api_key', - }) - }) -}) - -it('should handle a successful response with a transformResponse function', () => { - // Mock the fetch function - const mockFetch: any = jest.fn(() => - Promise.resolve({ - ok: true, - json: () => - Promise.resolve({ - choices: [{ message: { content: 'Generated response' } }], - }), - headers: new Headers(), - redirected: false, - status: 200, - statusText: 'OK', - }) - ) - jest.spyOn(global, 'fetch').mockImplementation(mockFetch) - - // Define the test inputs - const inferenceUrl = 'https://inference-server.com' - const requestBody = { message: 'Hello' } - const model = { id: 'model-id', parameters: { stream: false } } - const transformResponse = (data: any) => - data.choices[0].message.content.toUpperCase() - - // Call the function - const result = requestInference( - inferenceUrl, - requestBody, - model, - undefined, - undefined, - transformResponse - ) - - // Assert the expected behavior - expect(result).toBeInstanceOf(Observable) - expect(lastValueFrom(result)).resolves.toEqual('GENERATED RESPONSE') -}) - -it('should handle a successful response with streaming enabled', () => { - // Mock the fetch function - const mockFetch: any = jest.fn(() => - Promise.resolve({ - ok: true, - body: new ReadableStream({ - start(controller) { - controller.enqueue( - new TextEncoder().encode( - 'data: {"choices": [{"delta": {"content": "Streamed"}}]}' - ) - ) - controller.enqueue(new TextEncoder().encode('data: [DONE]')) - controller.close() - }, - }), - headers: new Headers(), - redirected: false, - status: 200, - statusText: 'OK', - }) - ) - jest.spyOn(global, 'fetch').mockImplementation(mockFetch) - - // Define the test inputs - const inferenceUrl = 'https://inference-server.com' - const requestBody = { message: 'Hello' } - const model = { id: 'model-id', parameters: { stream: true } } - - // Call the function - const result = requestInference(inferenceUrl, requestBody, model) - - // Assert the expected behavior - expect(result).toBeInstanceOf(Observable) - expect(lastValueFrom(result)).resolves.toEqual('Streamed') -}) diff --git a/core/src/browser/extensions/engines/helpers/sse.ts b/core/src/browser/extensions/engines/helpers/sse.ts deleted file mode 100644 index 5c63008ff..000000000 --- a/core/src/browser/extensions/engines/helpers/sse.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { Observable } from 'rxjs' -import { ErrorCode, ModelRuntimeParams } from '../../../../types' -/** - * Sends a request to the inference server to generate a response based on the recent messages. - * @param recentMessages - An array of recent messages to use as context for the inference. - * @returns An Observable that emits the generated response as a string. - */ -export function requestInference( - inferenceUrl: string, - requestBody: any, - model: { - id: string - parameters?: ModelRuntimeParams - }, - controller?: AbortController, - headers?: HeadersInit, - transformResponse?: Function -): Observable { - return new Observable((subscriber) => { - fetch(inferenceUrl, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'Access-Control-Allow-Origin': '*', - 'Accept': model.parameters?.stream - ? 'text/event-stream' - : 'application/json', - ...headers, - }, - body: JSON.stringify(requestBody), - signal: controller?.signal, - }) - .then(async (response) => { - if (!response.ok) { - if (response.status === 401) { - throw { - code: ErrorCode.InvalidApiKey, - message: 'Invalid API Key.', - } - } - let data = await response.json() - try { - handleError(data) - } catch (err) { - subscriber.error(err) - return - } - } - // There could be overriden stream parameter in the model - // that is set in request body (transformed payload) - if ( - requestBody?.stream === false || - model.parameters?.stream === false - ) { - const data = await response.json() - try { - handleError(data) - } catch (err) { - subscriber.error(err) - return - } - if (transformResponse) { - subscriber.next(transformResponse(data)) - } else { - subscriber.next( - data.choices - ? data.choices[0]?.message?.content - : (data.content[0]?.text ?? '') - ) - } - } else { - const stream = response.body - const decoder = new TextDecoder('utf-8') - const reader = stream?.getReader() - let content = '' - - while (true && reader) { - const { done, value } = await reader.read() - if (done) { - break - } - const text = decoder.decode(value) - const lines = text.trim().split('\n') - let cachedLines = '' - for (const line of lines) { - try { - if (transformResponse) { - content += transformResponse(line) - subscriber.next(content ?? '') - } else { - const toParse = cachedLines + line - if (!line.includes('data: [DONE]')) { - const data = JSON.parse(toParse.replace('data: ', '')) - try { - handleError(data) - } catch (err) { - subscriber.error(err) - return - } - content += data.choices[0]?.delta?.content ?? '' - if (content.startsWith('assistant: ')) { - content = content.replace('assistant: ', '') - } - if (content !== '') subscriber.next(content) - } - } - } catch { - cachedLines = line - } - } - } - } - subscriber.complete() - }) - .catch((err) => subscriber.error(err)) - }) -} - -/** - * Handle error and normalize it to a common format. - * @param data - */ -const handleError = (data: any) => { - if ( - data.error || - data.message || - data.detail || - (Array.isArray(data) && data.length && data[0].error) - ) { - throw data.error ?? data[0]?.error ?? data - } -} diff --git a/core/src/browser/fs.test.ts b/core/src/browser/fs.test.ts index 21da54874..04e6fbe1c 100644 --- a/core/src/browser/fs.test.ts +++ b/core/src/browser/fs.test.ts @@ -36,31 +36,31 @@ describe('fs module', () => { it('should call readFileSync with correct arguments', () => { const args = ['path/to/file'] fs.readFileSync(...args) - expect(globalThis.core.api.readFileSync).toHaveBeenCalledWith(...args) + expect(globalThis.core.api.readFileSync).toHaveBeenCalledWith({ args }) }) it('should call existsSync with correct arguments', () => { const args = ['path/to/file'] fs.existsSync(...args) - expect(globalThis.core.api.existsSync).toHaveBeenCalledWith(...args) + expect(globalThis.core.api.existsSync).toHaveBeenCalledWith({ args }) }) it('should call readdirSync with correct arguments', () => { const args = ['path/to/directory'] fs.readdirSync(...args) - expect(globalThis.core.api.readdirSync).toHaveBeenCalledWith(...args) + expect(globalThis.core.api.readdirSync).toHaveBeenCalledWith({ args }) }) it('should call mkdir with correct arguments', () => { const args = ['path/to/directory'] fs.mkdir(...args) - expect(globalThis.core.api.mkdir).toHaveBeenCalledWith(...args) + expect(globalThis.core.api.mkdir).toHaveBeenCalledWith({ args }) }) it('should call rm with correct arguments', () => { const args = ['path/to/directory'] fs.rm(...args) - expect(globalThis.core.api.rm).toHaveBeenCalledWith(...args, { recursive: true, force: true }) + expect(globalThis.core.api.rm).toHaveBeenCalledWith({ args }) }) it('should call unlinkSync with correct arguments', () => { diff --git a/core/src/browser/fs.ts b/core/src/browser/fs.ts index 7aa5f4d92..18f937037 100644 --- a/core/src/browser/fs.ts +++ b/core/src/browser/fs.ts @@ -19,29 +19,29 @@ const writeBlob: (path: string, data: string) => Promise = (path, data) => * Reads the contents of a file at the specified path. * @returns {Promise} A Promise that resolves with the contents of the file. */ -const readFileSync = (...args: any[]) => globalThis.core.api?.readFileSync(...args) +const readFileSync = (...args: any[]) => globalThis.core.api?.readFileSync({ args }) /** * Check whether the file exists * @param {string} path * @returns {boolean} A boolean indicating whether the path is a file. */ -const existsSync = (...args: any[]) => globalThis.core.api?.existsSync(...args) +const existsSync = (...args: any[]) => globalThis.core.api?.existsSync({ args }) /** * List the directory files * @returns {Promise} A Promise that resolves with the contents of the directory. */ -const readdirSync = (...args: any[]) => globalThis.core.api?.readdirSync(...args) +const readdirSync = (...args: any[]) => globalThis.core.api?.readdirSync({ args }) /** * Creates a directory at the specified path. * @returns {Promise} A Promise that resolves when the directory is created successfully. */ -const mkdir = (...args: any[]) => globalThis.core.api?.mkdir(...args) +const mkdir = (...args: any[]) => globalThis.core.api?.mkdir({ args }) /** * Removes a directory at the specified path. * @returns {Promise} A Promise that resolves when the directory is removed successfully. */ -const rm = (...args: any[]) => globalThis.core.api?.rm(...args, { recursive: true, force: true }) +const rm = (...args: any[]) => globalThis.core.api?.rm({ args }) /** * Deletes a file from the local file system. diff --git a/core/src/browser/index.test.ts b/core/src/browser/index.test.ts index c8cabbb0b..fcdb635ff 100644 --- a/core/src/browser/index.test.ts +++ b/core/src/browser/index.test.ts @@ -3,7 +3,6 @@ import * as Events from './events' import * as FileSystem from './fs' import * as Extension from './extension' import * as Extensions from './extensions' -import * as Tools from './tools' import * as Models from './models' describe('Module Tests', () => { @@ -27,10 +26,6 @@ describe('Module Tests', () => { expect(Extensions).toBeDefined() }) - it('should export all base tools', () => { - expect(Tools).toBeDefined() - }) - it('should export all base tools', () => { expect(Models).toBeDefined() }) diff --git a/core/src/browser/index.ts b/core/src/browser/index.ts index a6ce187ca..5912d8c3b 100644 --- a/core/src/browser/index.ts +++ b/core/src/browser/index.ts @@ -28,12 +28,6 @@ export * from './extension' */ export * from './extensions' -/** - * Export all base tools. - * @module - */ -export * from './tools' - /** * Export all base models. * @module diff --git a/core/src/browser/tools/index.test.ts b/core/src/browser/tools/index.test.ts deleted file mode 100644 index 8a24d3bb6..000000000 --- a/core/src/browser/tools/index.test.ts +++ /dev/null @@ -1,5 +0,0 @@ - - -it('should not throw any errors when imported', () => { - expect(() => require('./index')).not.toThrow(); -}) diff --git a/core/src/browser/tools/index.ts b/core/src/browser/tools/index.ts deleted file mode 100644 index 24cd12780..000000000 --- a/core/src/browser/tools/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from './manager' -export * from './tool' diff --git a/core/src/browser/tools/manager.ts b/core/src/browser/tools/manager.ts deleted file mode 100644 index b323ad7ce..000000000 --- a/core/src/browser/tools/manager.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { AssistantTool, MessageRequest } from '../../types' -import { InferenceTool } from './tool' - -/** - * Manages the registration and retrieval of inference tools. - */ -export class ToolManager { - public tools = new Map() - - /** - * Registers a tool. - * @param tool - The tool to register. - */ - register(tool: T) { - this.tools.set(tool.name, tool) - } - - /** - * Retrieves a tool by it's name. - * @param name - The name of the tool to retrieve. - * @returns The tool, if found. - */ - get(name: string): T | undefined { - return this.tools.get(name) as T | undefined - } - - /* - ** Process the message request with the tools. - */ - process(request: MessageRequest, tools: AssistantTool[]): Promise { - return tools.reduce((prevPromise, currentTool) => { - return prevPromise.then((prevResult) => { - return currentTool.enabled - ? this.get(currentTool.type)?.process(prevResult, currentTool) ?? - Promise.resolve(prevResult) - : Promise.resolve(prevResult) - }) - }, Promise.resolve(request)) - } - - /** - * The instance of the tool manager. - */ - static instance(): ToolManager { - return (window.core?.toolManager as ToolManager) ?? new ToolManager() - } -} diff --git a/core/src/browser/tools/tool.test.ts b/core/src/browser/tools/tool.test.ts deleted file mode 100644 index dcb478478..000000000 --- a/core/src/browser/tools/tool.test.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { ToolManager } from '../../browser/tools/manager' -import { InferenceTool } from '../../browser/tools/tool' -import { AssistantTool, MessageRequest } from '../../types' - -class MockInferenceTool implements InferenceTool { - name = 'mockTool' - process(request: MessageRequest, tool: AssistantTool): Promise { - return Promise.resolve(request) - } -} - -it('should register a tool', () => { - const manager = new ToolManager() - const tool = new MockInferenceTool() - manager.register(tool) - expect(manager.get(tool.name)).toBe(tool) -}) - -it('should retrieve a tool by its name', () => { - const manager = new ToolManager() - const tool = new MockInferenceTool() - manager.register(tool) - const retrievedTool = manager.get(tool.name) - expect(retrievedTool).toBe(tool) -}) - -it('should return undefined for a non-existent tool', () => { - const manager = new ToolManager() - const retrievedTool = manager.get('nonExistentTool') - expect(retrievedTool).toBeUndefined() -}) - -it('should process the message request with enabled tools', async () => { - const manager = new ToolManager() - const tool = new MockInferenceTool() - manager.register(tool) - - const request: MessageRequest = { message: 'test' } as any - const tools: AssistantTool[] = [{ type: 'mockTool', enabled: true }] as any - - const result = await manager.process(request, tools) - expect(result).toBe(request) -}) - -it('should skip processing for disabled tools', async () => { - const manager = new ToolManager() - const tool = new MockInferenceTool() - manager.register(tool) - - const request: MessageRequest = { message: 'test' } as any - const tools: AssistantTool[] = [{ type: 'mockTool', enabled: false }] as any - - const result = await manager.process(request, tools) - expect(result).toBe(request) -}) - -it('should throw an error when process is called without implementation', () => { - class TestTool extends InferenceTool { - name = 'testTool' - } - const tool = new TestTool() - expect(() => tool.process({} as MessageRequest)).toThrowError() -}) diff --git a/core/src/browser/tools/tool.ts b/core/src/browser/tools/tool.ts deleted file mode 100644 index 0fd342933..000000000 --- a/core/src/browser/tools/tool.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { AssistantTool, MessageRequest } from '../../types' - -/** - * Represents a base inference tool. - */ -export abstract class InferenceTool { - abstract name: string - /* - ** Process a message request and return the processed message request. - */ - abstract process(request: MessageRequest, tool?: AssistantTool): Promise -} diff --git a/core/src/node/api/processors/app.ts b/core/src/node/api/processors/app.ts index d86e6265c..4fbd8648f 100644 --- a/core/src/node/api/processors/app.ts +++ b/core/src/node/api/processors/app.ts @@ -8,6 +8,7 @@ import { normalizeFilePath, getJanDataFolderPath, } from '../../helper' +import { readdirSync, readFileSync } from 'fs' export class App implements Processor { observer?: Function @@ -25,8 +26,8 @@ export class App implements Processor { /** * Joins multiple paths together, respect to the current OS. */ - joinPath(args: any[]) { - return join(...args) + joinPath(args: any) { + return join(...('args' in args ? args.args : args)) } /** @@ -69,10 +70,32 @@ export class App implements Processor { writeLog(args) } + /** + * Get app configurations. + */ getAppConfigurations() { return appConfiguration() } + /** + * Get themes from the app data folder. + * @returns + */ + getThemes() { + const themesPath = join(getJanDataFolderPath(), 'themes') + return readdirSync(themesPath) + } + + /** + * Read theme.json + * @param theme + * @returns + */ + readTheme({ theme }: { theme: string }) { + const themePath = join(getJanDataFolderPath(), 'themes', theme, 'theme.json') + return readFileSync(themePath, { encoding: 'utf-8' }) + } + async updateAppConfiguration(args: any) { await updateAppConfiguration(args) } diff --git a/core/src/node/api/processors/fs.ts b/core/src/node/api/processors/fs.ts index ada744d53..7bc5f1e20 100644 --- a/core/src/node/api/processors/fs.ts +++ b/core/src/node/api/processors/fs.ts @@ -21,18 +21,21 @@ export class FileSystem implements Processor { return import(FileSystem.moduleName).then((mdl) => mdl[route]( ...args.map((arg: any, index: number) => { - if(index !== 0) { + const arg0 = args[0] + if ('args' in arg0) arg = arg0.args + if (Array.isArray(arg)) arg = arg[0] + if (index !== 0) { return arg } if (index === 0 && typeof arg !== 'string') { throw new Error(`Invalid argument ${JSON.stringify(args)}`) } const path = - (arg.startsWith(`file:/`) || arg.startsWith(`file:\\`)) - ? join(getJanDataFolderPath(), normalizeFilePath(arg)) - : arg + arg.startsWith(`file:/`) || arg.startsWith(`file:\\`) + ? join(getJanDataFolderPath(), normalizeFilePath(arg)) + : arg - if(path.startsWith(`http://`) || path.startsWith(`https://`)) { + if (path.startsWith(`http://`) || path.startsWith(`https://`)) { return path } const absolutePath = resolve(path) @@ -88,5 +91,4 @@ export class FileSystem implements Processor { }) }) } - } diff --git a/core/src/node/extension/extension.ts b/core/src/node/extension/extension.ts index aea14f705..cd2bb0e06 100644 --- a/core/src/node/extension/extension.ts +++ b/core/src/node/extension/extension.ts @@ -94,8 +94,6 @@ export default class Extension { `Package ${this.origin} does not contain a valid manifest: ${error}` ) } - - return true } /** diff --git a/core/src/node/helper/config.ts b/core/src/node/helper/config.ts index 6fb28d01f..89955a2d6 100644 --- a/core/src/node/helper/config.ts +++ b/core/src/node/helper/config.ts @@ -18,9 +18,7 @@ export const getAppConfigurations = (): AppConfiguration => { if (!fs.existsSync(configurationFile)) { // create default app config if we don't have one - console.debug( - `App config not found, creating default config at ${configurationFile}` - ) + console.debug(`App config not found, creating default config at ${configurationFile}`) fs.writeFileSync(configurationFile, JSON.stringify(appDefaultConfiguration)) return appDefaultConfiguration } @@ -31,28 +29,23 @@ export const getAppConfigurations = (): AppConfiguration => { ) return appConfigurations } catch (err) { - console.error( - `Failed to read app config, return default config instead! Err: ${err}` - ) + console.error(`Failed to read app config, return default config instead! Err: ${err}`) return defaultAppConfig() } } const getConfigurationFilePath = () => join( - global.core?.appPath() || - process.env[process.platform == 'win32' ? 'USERPROFILE' : 'HOME'], + global.core?.appPath() || process.env[process.platform == 'win32' ? 'USERPROFILE' : 'HOME'], configurationFileName ) -export const updateAppConfiguration = ( +export const updateAppConfiguration = ({ + configuration, +}: { configuration: AppConfiguration -): Promise => { +}): Promise => { const configurationFile = getConfigurationFilePath() - console.debug( - 'updateAppConfiguration, configurationFile: ', - configurationFile - ) fs.writeFileSync(configurationFile, JSON.stringify(configuration)) return Promise.resolve() @@ -87,14 +80,11 @@ export const getJanExtensionsPath = (): string => { */ export const defaultAppConfig = (): AppConfiguration => { const { app } = require('electron') - const defaultJanDataFolder = join( - app?.getPath('userData') ?? os?.homedir() ?? '', - 'data' - ) + const defaultJanDataFolder = join(app?.getPath('userData') ?? os?.homedir() ?? '', 'data') return { data_folder: process.env.CI === 'e2e' - ? (process.env.APP_CONFIG_PATH ?? resolve('./test-data')) + ? process.env.APP_CONFIG_PATH ?? resolve('./test-data') : defaultJanDataFolder, quick_ask: false, } diff --git a/core/src/types/api/index.ts b/core/src/types/api/index.ts index 2f33b72e4..d96ba6d06 100644 --- a/core/src/types/api/index.ts +++ b/core/src/types/api/index.ts @@ -40,7 +40,7 @@ export enum NativeRoute { /** * App Route APIs * @description Enum of all the routes exposed by the app - */ +*/ export enum AppRoute { getAppConfigurations = 'getAppConfigurations', updateAppConfiguration = 'updateAppConfiguration', @@ -51,6 +51,8 @@ export enum AppRoute { log = 'log', systemInformation = 'systemInformation', showToast = 'showToast', + getThemes = 'getThemes', + readTheme = 'readTheme' } export enum AppEvent { diff --git a/core/src/types/inference/inferenceEntity.ts b/core/src/types/inference/inferenceEntity.ts index c37e3b079..ac2e48d32 100644 --- a/core/src/types/inference/inferenceEntity.ts +++ b/core/src/types/inference/inferenceEntity.ts @@ -7,6 +7,7 @@ export enum ChatCompletionRole { System = 'system', Assistant = 'assistant', User = 'user', + Tool = 'tool', } /** @@ -18,6 +19,9 @@ export type ChatCompletionMessage = { content?: ChatCompletionMessageContent /** The role of the author of this message. **/ role: ChatCompletionRole + type?: string + output?: string + tool_call_id?: string } export type ChatCompletionMessageContent = diff --git a/core/src/types/message/messageEntity.ts b/core/src/types/message/messageEntity.ts index edd253a57..20979c68e 100644 --- a/core/src/types/message/messageEntity.ts +++ b/core/src/types/message/messageEntity.ts @@ -36,6 +36,8 @@ export type ThreadMessage = { type?: string /** The error code which explain what error type. Used in conjunction with MessageStatus.Error */ error_code?: ErrorCode + + tool_call_id?: string } /** @@ -43,6 +45,9 @@ export type ThreadMessage = { * @data_transfer_object */ export type MessageRequest = { + /** + * The id of the message request. + */ id?: string /** @@ -71,6 +76,11 @@ export type MessageRequest = { // TODO: deprecate threadId field thread?: Thread + /** + * ChatCompletion tools + */ + tools?: MessageTool[] + /** Engine name to process */ engine?: string @@ -78,6 +88,24 @@ export type MessageRequest = { type?: string } +/** + * ChatCompletion Tool parameters + */ +export type MessageTool = { + type: string + function: MessageFunction +} + +/** + * ChatCompletion Tool's function parameters + */ +export type MessageFunction = { + name: string + description?: string + parameters?: Record + strict?: boolean +} + /** * The status of the message. * @data_transfer_object diff --git a/extensions/assistant-extension/package.json b/extensions/assistant-extension/package.json index 08ccb3b3d..4761aa900 100644 --- a/extensions/assistant-extension/package.json +++ b/extensions/assistant-extension/package.json @@ -8,17 +8,10 @@ "author": "Jan ", "license": "AGPL-3.0", "scripts": { - "clean:modules": "rimraf node_modules/pdf-parse/test && cd node_modules/pdf-parse/lib/pdf.js && rimraf v1.9.426 v1.10.88 v2.0.550", - "build-universal-hnswlib": "[ \"$IS_TEST\" = \"true\" ] && echo \"Skip universal build\" || (cd node_modules/hnswlib-node && arch -x86_64 npx node-gyp rebuild --arch=x64 && mv build/Release/addon.node ./addon-amd64.node && node-gyp rebuild --arch=arm64 && mv build/Release/addon.node ./addon-arm64.node && lipo -create -output build/Release/addon.node ./addon-arm64.node ./addon-amd64.node && rm ./addon-arm64.node && rm ./addon-amd64.node)", - "build": "yarn clean:modules && rolldown -c rolldown.config.mjs", - "build:publish:linux": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install", - "build:publish:darwin": "rimraf *.tgz --glob || true && yarn build-universal-hnswlib && yarn build && ../../.github/scripts/auto-sign.sh && npm pack && cpx *.tgz ../../pre-install", - "build:publish:win32": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install", - "build:publish": "run-script-os", - "build:dev": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install" + "build": "rolldown -c rolldown.config.mjs", + "build:publish": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install" }, "devDependencies": { - "@types/pdf-parse": "^1.1.4", "cpx": "^1.5.0", "rimraf": "^3.0.2", "rolldown": "1.0.0-beta.1", @@ -27,11 +20,6 @@ }, "dependencies": { "@janhq/core": "../../core/package.tgz", - "@langchain/community": "0.0.13", - "hnswlib-node": "^1.4.2", - "langchain": "^0.0.214", - "node-gyp": "^11.0.0", - "pdf-parse": "^1.1.1", "ts-loader": "^9.5.0" }, "files": [ @@ -40,8 +28,7 @@ "README.md" ], "bundleDependencies": [ - "@janhq/core", - "hnswlib-node" + "@janhq/core" ], "installConfig": { "hoistingLimits": "workspaces" diff --git a/extensions/assistant-extension/rolldown.config.mjs b/extensions/assistant-extension/rolldown.config.mjs index e549ea7d9..436de93a8 100644 --- a/extensions/assistant-extension/rolldown.config.mjs +++ b/extensions/assistant-extension/rolldown.config.mjs @@ -13,22 +13,5 @@ export default defineConfig([ NODE: JSON.stringify(`${pkgJson.name}/${pkgJson.node}`), VERSION: JSON.stringify(pkgJson.version), }, - }, - { - input: 'src/node/index.ts', - external: ['@janhq/core/node', 'path', 'hnswlib-node'], - output: { - format: 'cjs', - file: 'dist/node/index.js', - sourcemap: false, - inlineDynamicImports: true, - }, - resolve: { - extensions: ['.js', '.ts'], - }, - define: { - CORTEX_API_URL: JSON.stringify(`http://127.0.0.1:${process.env.CORTEX_API_PORT ?? "39291"}`), - }, - platform: 'node', - }, + } ]) diff --git a/extensions/assistant-extension/src/index.ts b/extensions/assistant-extension/src/index.ts index 621d8e216..bb253bd7f 100644 --- a/extensions/assistant-extension/src/index.ts +++ b/extensions/assistant-extension/src/index.ts @@ -1,126 +1,20 @@ -import { - fs, - Assistant, - events, - joinPath, - AssistantExtension, - AssistantEvent, - ToolManager, -} from '@janhq/core' -import { RetrievalTool } from './tools/retrieval' +import { Assistant, AssistantExtension } from '@janhq/core' export default class JanAssistantExtension extends AssistantExtension { - private static readonly _homeDir = 'file://assistants' - - async onLoad() { - // Register the retrieval tool - ToolManager.instance().register(new RetrievalTool()) - - // making the assistant directory - const assistantDirExist = await fs.existsSync( - JanAssistantExtension._homeDir - ) - if ( - localStorage.getItem(`${this.name}-version`) !== VERSION || - !assistantDirExist - ) { - if (!assistantDirExist) await fs.mkdir(JanAssistantExtension._homeDir) - - // Write assistant metadata - await this.createJanAssistant() - // Finished migration - localStorage.setItem(`${this.name}-version`, VERSION) - // Update the assistant list - events.emit(AssistantEvent.OnAssistantsUpdate, {}) - } - } + async onLoad() {} /** * Called when the extension is unloaded. */ onUnload(): void {} - async createAssistant(assistant: Assistant): Promise { - const assistantDir = await joinPath([ - JanAssistantExtension._homeDir, - assistant.id, - ]) - if (!(await fs.existsSync(assistantDir))) await fs.mkdir(assistantDir) - - // store the assistant metadata json - const assistantMetadataPath = await joinPath([ - assistantDir, - 'assistant.json', - ]) - try { - await fs.writeFileSync( - assistantMetadataPath, - JSON.stringify(assistant, null, 2) - ) - } catch (err) { - console.error(err) - } - } - async getAssistants(): Promise { - try { - // get all the assistant directories - // get all the assistant metadata json - const results: Assistant[] = [] - - const allFileName: string[] = await fs.readdirSync( - JanAssistantExtension._homeDir - ) - - for (const fileName of allFileName) { - const filePath = await joinPath([ - JanAssistantExtension._homeDir, - fileName, - ]) - - if (!(await fs.fileStat(filePath))?.isDirectory) continue - const jsonFiles: string[] = (await fs.readdirSync(filePath)).filter( - (file: string) => file === 'assistant.json' - ) - - if (jsonFiles.length !== 1) { - // has more than one assistant file -> ignore - continue - } - - const content = await fs.readFileSync( - await joinPath([filePath, jsonFiles[0]]), - 'utf-8' - ) - const assistant: Assistant = - typeof content === 'object' ? content : JSON.parse(content) - - results.push(assistant) - } - - return results - } catch (err) { - console.debug(err) - return [this.defaultAssistant] - } + return [this.defaultAssistant] } - async deleteAssistant(assistant: Assistant): Promise { - if (assistant.id === 'jan') { - return Promise.reject('Cannot delete Jan Assistant') - } - - // remove the directory - const assistantDir = await joinPath([ - JanAssistantExtension._homeDir, - assistant.id, - ]) - return fs.rm(assistantDir) - } - - private async createJanAssistant(): Promise { - await this.createAssistant(this.defaultAssistant) - } + /** DEPRECATED */ + async createAssistant(assistant: Assistant): Promise {} + async deleteAssistant(assistant: Assistant): Promise {} private defaultAssistant: Assistant = { avatar: '', diff --git a/extensions/assistant-extension/src/node/index.ts b/extensions/assistant-extension/src/node/index.ts deleted file mode 100644 index 731890b34..000000000 --- a/extensions/assistant-extension/src/node/index.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { getJanDataFolderPath } from '@janhq/core/node' -import { retrieval } from './retrieval' -import path from 'path' - -export function toolRetrievalUpdateTextSplitter( - chunkSize: number, - chunkOverlap: number -) { - retrieval.updateTextSplitter(chunkSize, chunkOverlap) -} -export async function toolRetrievalIngestNewDocument( - thread: string, - file: string, - model: string, - engine: string, - useTimeWeighted: boolean -) { - const threadPath = path.join(getJanDataFolderPath(), 'threads', thread) - const filePath = path.join(getJanDataFolderPath(), 'files', file) - retrieval.updateEmbeddingEngine(model, engine) - return retrieval - .ingestAgentKnowledge(filePath, `${threadPath}/memory`, useTimeWeighted) - .catch((err) => { - console.error(err) - }) -} - -export async function toolRetrievalLoadThreadMemory(threadId: string) { - return retrieval - .loadRetrievalAgent( - path.join(getJanDataFolderPath(), 'threads', threadId, 'memory') - ) - .catch((err) => { - console.error(err) - }) -} - -export async function toolRetrievalQueryResult( - query: string, - useTimeWeighted: boolean = false -) { - return retrieval.generateResult(query, useTimeWeighted).catch((err) => { - console.error(err) - }) -} diff --git a/extensions/assistant-extension/src/node/retrieval.ts b/extensions/assistant-extension/src/node/retrieval.ts deleted file mode 100644 index 05fa67d54..000000000 --- a/extensions/assistant-extension/src/node/retrieval.ts +++ /dev/null @@ -1,121 +0,0 @@ -import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter' -import { formatDocumentsAsString } from 'langchain/util/document' -import { PDFLoader } from 'langchain/document_loaders/fs/pdf' - -import { TimeWeightedVectorStoreRetriever } from 'langchain/retrievers/time_weighted' -import { MemoryVectorStore } from 'langchain/vectorstores/memory' - -import { HNSWLib } from 'langchain/vectorstores/hnswlib' - -import { OpenAIEmbeddings } from 'langchain/embeddings/openai' - -export class Retrieval { - public chunkSize: number = 100 - public chunkOverlap?: number = 0 - private retriever: any - - private embeddingModel?: OpenAIEmbeddings = undefined - private textSplitter?: RecursiveCharacterTextSplitter - - // to support time-weighted retrieval - private timeWeightedVectorStore: MemoryVectorStore - private timeWeightedretriever: any | TimeWeightedVectorStoreRetriever - - constructor(chunkSize: number = 4000, chunkOverlap: number = 200) { - this.updateTextSplitter(chunkSize, chunkOverlap) - this.initialize() - } - - private async initialize() { - const apiKey = await window.core?.api.appToken() ?? 'cortex.cpp' - - // declare time-weighted retriever and storage - this.timeWeightedVectorStore = new MemoryVectorStore( - new OpenAIEmbeddings( - { openAIApiKey: apiKey }, - { basePath: `${CORTEX_API_URL}/v1` } - ) - ) - this.timeWeightedretriever = new TimeWeightedVectorStoreRetriever({ - vectorStore: this.timeWeightedVectorStore, - memoryStream: [], - searchKwargs: 2, - }) - } - - public updateTextSplitter(chunkSize: number, chunkOverlap: number): void { - this.chunkSize = chunkSize - this.chunkOverlap = chunkOverlap - this.textSplitter = new RecursiveCharacterTextSplitter({ - chunkSize: chunkSize, - chunkOverlap: chunkOverlap, - }) - } - - public async updateEmbeddingEngine(model: string, engine: string) { - const apiKey = await window.core?.api.appToken() ?? 'cortex.cpp' - this.embeddingModel = new OpenAIEmbeddings( - { openAIApiKey: apiKey, model }, - // TODO: Raw settings - { basePath: `${CORTEX_API_URL}/v1` } - ) - - // update time-weighted embedding model - this.timeWeightedVectorStore.embeddings = this.embeddingModel - } - - public ingestAgentKnowledge = async ( - filePath: string, - memoryPath: string, - useTimeWeighted: boolean - ): Promise => { - const loader = new PDFLoader(filePath, { - splitPages: true, - }) - if (!this.embeddingModel) return Promise.reject() - const doc = await loader.load() - const docs = await this.textSplitter!.splitDocuments(doc) - const vectorStore = await HNSWLib.fromDocuments(docs, this.embeddingModel) - - // add documents with metadata by using the time-weighted retriever in order to support time-weighted retrieval - if (useTimeWeighted && this.timeWeightedretriever) { - await ( - this.timeWeightedretriever as TimeWeightedVectorStoreRetriever - ).addDocuments(docs) - } - return vectorStore.save(memoryPath) - } - - public loadRetrievalAgent = async (memoryPath: string): Promise => { - if (!this.embeddingModel) return Promise.reject() - const vectorStore = await HNSWLib.load(memoryPath, this.embeddingModel) - this.retriever = vectorStore.asRetriever(2) - return Promise.resolve() - } - - public generateResult = async ( - query: string, - useTimeWeighted: boolean - ): Promise => { - if (useTimeWeighted) { - if (!this.timeWeightedretriever) { - return Promise.resolve(' ') - } - // use invoke because getRelevantDocuments is deprecated - const relevantDocs = await this.timeWeightedretriever.invoke(query) - const serializedDoc = formatDocumentsAsString(relevantDocs) - return Promise.resolve(serializedDoc) - } - - if (!this.retriever) { - return Promise.resolve(' ') - } - - // should use invoke(query) because getRelevantDocuments is deprecated - const relevantDocs = await this.retriever.getRelevantDocuments(query) - const serializedDoc = formatDocumentsAsString(relevantDocs) - return Promise.resolve(serializedDoc) - } -} - -export const retrieval = new Retrieval() diff --git a/extensions/assistant-extension/src/tools/retrieval.ts b/extensions/assistant-extension/src/tools/retrieval.ts deleted file mode 100644 index b1a0c3cba..000000000 --- a/extensions/assistant-extension/src/tools/retrieval.ts +++ /dev/null @@ -1,118 +0,0 @@ -import { - AssistantTool, - executeOnMain, - fs, - InferenceTool, - joinPath, - MessageRequest, -} from '@janhq/core' - -export class RetrievalTool extends InferenceTool { - private _threadDir = 'file://threads' - private retrievalThreadId: string | undefined = undefined - - name: string = 'retrieval' - - async process( - data: MessageRequest, - tool?: AssistantTool - ): Promise { - if (!data.model || !data.messages) { - return Promise.resolve(data) - } - - const latestMessage = data.messages[data.messages.length - 1] - - // 1. Ingest the document if needed - if ( - latestMessage && - latestMessage.content && - typeof latestMessage.content !== 'string' && - latestMessage.content.length > 1 - ) { - const docFile = latestMessage.content[1]?.doc_url?.url - if (docFile) { - await executeOnMain( - NODE, - 'toolRetrievalIngestNewDocument', - data.thread?.id, - docFile, - data.model?.id, - data.model?.engine, - tool?.useTimeWeightedRetriever ?? false - ) - } else { - return Promise.resolve(data) - } - } else if ( - // Check whether we need to ingest document or not - // Otherwise wrong context will be sent - !(await fs.existsSync( - await joinPath([this._threadDir, data.threadId, 'memory']) - )) - ) { - // No document ingested, reroute the result to inference engine - - return Promise.resolve(data) - } - // 2. Load agent on thread changed - if (this.retrievalThreadId !== data.threadId) { - await executeOnMain(NODE, 'toolRetrievalLoadThreadMemory', data.threadId) - - this.retrievalThreadId = data.threadId - - // Update the text splitter - await executeOnMain( - NODE, - 'toolRetrievalUpdateTextSplitter', - tool?.settings?.chunk_size ?? 4000, - tool?.settings?.chunk_overlap ?? 200 - ) - } - - // 3. Using the retrieval template with the result and query - if (latestMessage.content) { - const prompt = - typeof latestMessage.content === 'string' - ? latestMessage.content - : latestMessage.content[0].text - // Retrieve the result - const retrievalResult = await executeOnMain( - NODE, - 'toolRetrievalQueryResult', - prompt, - tool?.useTimeWeightedRetriever ?? false - ) - console.debug('toolRetrievalQueryResult', retrievalResult) - - // Update message content - if (retrievalResult) - data.messages[data.messages.length - 1].content = - tool?.settings?.retrieval_template - ?.replace('{CONTEXT}', retrievalResult) - .replace('{QUESTION}', prompt) - } - - // 4. Reroute the result to inference engine - return Promise.resolve(this.normalize(data)) - } - - // Filter out all the messages that are not text - // TODO: Remove it until engines can handle multiple content types - normalize(request: MessageRequest): MessageRequest { - request.messages = request.messages?.map((message) => { - if ( - message.content && - typeof message.content !== 'string' && - (message.content.length ?? 0) > 0 - ) { - return { - ...message, - content: [message.content[0]], - } - } - return message - }) - return request - } -} diff --git a/extensions/conversational-extension/package.json b/extensions/conversational-extension/package.json index a5224b99b..693adf6d6 100644 --- a/extensions/conversational-extension/package.json +++ b/extensions/conversational-extension/package.json @@ -23,9 +23,7 @@ "typescript": "^5.7.2" }, "dependencies": { - "@janhq/core": "../../core/package.tgz", - "ky": "^1.7.2", - "p-queue": "^8.0.1" + "@janhq/core": "../../core/package.tgz" }, "engines": { "node": ">=18.0.0" diff --git a/extensions/conversational-extension/src/index.ts b/extensions/conversational-extension/src/index.ts index 791385fc9..720291d88 100644 --- a/extensions/conversational-extension/src/index.ts +++ b/extensions/conversational-extension/src/index.ts @@ -4,45 +4,17 @@ import { ThreadAssistantInfo, ThreadMessage, } from '@janhq/core' -import ky, { KyInstance } from 'ky' -import PQueue from 'p-queue' - -type ThreadList = { - data: Thread[] -} - -type MessageList = { - data: ThreadMessage[] -} /** * JSONConversationalExtension is a ConversationalExtension implementation that provides * functionality for managing threads. */ export default class CortexConversationalExtension extends ConversationalExtension { - queue = new PQueue({ concurrency: 1 }) - - api?: KyInstance - /** - * Get the API instance - * @returns - */ - async apiInstance(): Promise { - if(this.api) return this.api - const apiKey = (await window.core?.api.appToken()) ?? 'cortex.cpp' - this.api = ky.extend({ - prefixUrl: API_URL, - headers: { - Authorization: `Bearer ${apiKey}`, - }, - }) - return this.api - } /** * Called when the extension is loaded. */ async onLoad() { - this.queue.add(() => this.healthz()) + // this.queue.add(() => this.healthz()) } /** @@ -54,14 +26,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi * Returns a Promise that resolves to an array of Conversation objects. */ async listThreads(): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .get('v1/threads?limit=-1') - .json() - .then((e) => e.data) - ) - ) as Promise + return window.core.api.listThreads() } /** @@ -69,11 +34,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi * @param thread The Thread object to save. */ async createThread(thread: Thread): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api.post('v1/threads', { json: thread }).json() - ) - ) as Promise + return window.core.api.createThread({ thread }) } /** @@ -81,13 +42,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi * @param thread The Thread object to save. */ async modifyThread(thread: Thread): Promise { - return this.queue - .add(() => - this.apiInstance().then((api) => - api.patch(`v1/threads/${thread.id}`, { json: thread }) - ) - ) - .then() + return window.core.api.modifyThread({ thread }) } /** @@ -95,11 +50,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi * @param threadId The ID of the thread to delete. */ async deleteThread(threadId: string): Promise { - return this.queue - .add(() => - this.apiInstance().then((api) => api.delete(`v1/threads/${threadId}`)) - ) - .then() + return window.core.api.deleteThread({ threadId }) } /** @@ -108,15 +59,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi * @returns A Promise that resolves when the message has been added. */ async createMessage(message: ThreadMessage): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .post(`v1/threads/${message.thread_id}/messages`, { - json: message, - }) - .json() - ) - ) as Promise + return window.core.api.createMessage({ message }) } /** @@ -125,15 +68,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi * @returns */ async modifyMessage(message: ThreadMessage): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .patch(`v1/threads/${message.thread_id}/messages/${message.id}`, { - json: message, - }) - .json() - ) - ) as Promise + return window.core.api.modifyMessage({ message }) } /** @@ -143,13 +78,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi * @returns A Promise that resolves when the message has been successfully deleted. */ async deleteMessage(threadId: string, messageId: string): Promise { - return this.queue - .add(() => - this.apiInstance().then((api) => - api.delete(`v1/threads/${threadId}/messages/${messageId}`) - ) - ) - .then() + return window.core.api.deleteMessage({ threadId, messageId }) } /** @@ -158,14 +87,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi * @returns A Promise that resolves to an array of ThreadMessage objects. */ async listMessages(threadId: string): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .get(`v1/threads/${threadId}/messages?order=asc&limit=-1`) - .json() - .then((e) => e.data) - ) - ) as Promise + return window.core.api.listMessages({ threadId }) } /** @@ -175,13 +97,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi * the details of the assistant associated with the specified thread. */ async getThreadAssistant(threadId: string): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .get(`v1/assistants/${threadId}?limit=-1`) - .json() - ) - ) as Promise + return window.core.api.getThreadAssistant({ threadId }) } /** * Creates a new assistant for the specified thread. @@ -193,13 +109,7 @@ export default class CortexConversationalExtension extends ConversationalExtensi threadId: string, assistant: ThreadAssistantInfo ): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .post(`v1/assistants/${threadId}`, { json: assistant }) - .json() - ) - ) as Promise + return window.core.api.createThreadAssistant(threadId, assistant) } /** @@ -212,26 +122,6 @@ export default class CortexConversationalExtension extends ConversationalExtensi threadId: string, assistant: ThreadAssistantInfo ): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .patch(`v1/assistants/${threadId}`, { json: assistant }) - .json() - ) - ) as Promise - } - - /** - * Do health check on cortex.cpp - * @returns - */ - async healthz(): Promise { - return this.apiInstance() - .then((api) => - api.get('healthz', { - retry: { limit: 20, delay: () => 500, methods: ['get'] }, - }) - ) - .then(() => {}) + return window.core.api.modifyThreadAssistant({ threadId, assistant }) } } diff --git a/extensions/engine-management-extension/resources/google_gemini.json b/extensions/engine-management-extension/resources/google_gemini.json index e0fa809a5..f860a1990 100644 --- a/extensions/engine-management-extension/resources/google_gemini.json +++ b/extensions/engine-management-extension/resources/google_gemini.json @@ -5,7 +5,7 @@ "url": "https://aistudio.google.com/apikey", "api_key": "", "metadata": { - "get_models_url": "https://generativelanguage.googleapis.com/v1beta/models", + "get_models_url": "https://generativelanguage.googleapis.com/openai/v1beta/models", "header_template": "Authorization: Bearer {{api_key}}", "transform_req": { "chat_completions": { diff --git a/extensions/engine-management-extension/rolldown.config.mjs b/extensions/engine-management-extension/rolldown.config.mjs index 7d6a6c1af..a385f1efd 100644 --- a/extensions/engine-management-extension/rolldown.config.mjs +++ b/extensions/engine-management-extension/rolldown.config.mjs @@ -15,7 +15,7 @@ export default defineConfig([ `http://127.0.0.1:${process.env.CORTEX_API_PORT ?? '39291'}` ), PLATFORM: JSON.stringify(process.platform), - CORTEX_ENGINE_VERSION: JSON.stringify('v0.1.56'), + CORTEX_ENGINE_VERSION: JSON.stringify('b5371'), DEFAULT_REMOTE_ENGINES: JSON.stringify(engines), DEFAULT_REMOTE_MODELS: JSON.stringify(models), DEFAULT_REQUEST_PAYLOAD_TRANSFORM: JSON.stringify( @@ -38,7 +38,7 @@ export default defineConfig([ file: 'dist/node/index.cjs.js', }, define: { - CORTEX_ENGINE_VERSION: JSON.stringify('v0.1.56'), + CORTEX_ENGINE_VERSION: JSON.stringify('b5371'), }, }, ]) diff --git a/extensions/engine-management-extension/src/index.ts b/extensions/engine-management-extension/src/index.ts index 7d0c9f9c4..029c54646 100644 --- a/extensions/engine-management-extension/src/index.ts +++ b/extensions/engine-management-extension/src/index.ts @@ -16,7 +16,6 @@ import { EngineEvent, } from '@janhq/core' import ky, { HTTPError, KyInstance } from 'ky' -import PQueue from 'p-queue' import { EngineError } from './error' import { getJanDataFolderPath } from '@janhq/core' import { engineVariant } from './utils' @@ -29,21 +28,22 @@ interface ModelList { * functionality for managing engines. */ export default class JanEngineManagementExtension extends EngineManagementExtension { - queue = new PQueue({ concurrency: 1 }) - api?: KyInstance /** * Get the API instance * @returns */ async apiInstance(): Promise { - if(this.api) return this.api - const apiKey = (await window.core?.api.appToken()) ?? 'cortex.cpp' + if (this.api) return this.api + const apiKey = (await window.core?.api.appToken()) this.api = ky.extend({ prefixUrl: API_URL, - headers: { - Authorization: `Bearer ${apiKey}`, - }, + headers: apiKey + ? { + Authorization: `Bearer ${apiKey}`, + } + : {}, + retry: 10, }) return this.api } @@ -53,8 +53,6 @@ export default class JanEngineManagementExtension extends EngineManagementExtens async onLoad() { // Symlink Engines Directory await executeOnMain(NODE, 'symlinkEngines') - // Run Healthcheck - this.queue.add(() => this.healthz()) // Update default local engine this.updateDefaultEngine() @@ -74,13 +72,11 @@ export default class JanEngineManagementExtension extends EngineManagementExtens * @returns A Promise that resolves to an object of list engines. */ async getEngines(): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .get('v1/engines') - .json() - .then((e) => e) - ) + return this.apiInstance().then((api) => + api + .get('v1/engines') + .json() + .then((e) => e) ) as Promise } @@ -104,13 +100,11 @@ export default class JanEngineManagementExtension extends EngineManagementExtens * @returns A Promise that resolves to an array of installed engine. */ async getInstalledEngines(name: InferenceEngine): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .get(`v1/engines/${name}`) - .json() - .then((e) => e) - ) + return this.apiInstance().then((api) => + api + .get(`v1/engines/${name}`) + .json() + .then((e) => e) ) as Promise } @@ -125,15 +119,13 @@ export default class JanEngineManagementExtension extends EngineManagementExtens version: string, platform?: string ) { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .get(`v1/engines/${name}/releases/${version}`) - .json() - .then((e) => - platform ? e.filter((r) => r.name.includes(platform)) : e - ) - ) + return this.apiInstance().then((api) => + api + .get(`v1/engines/${name}/releases/${version}`) + .json() + .then((e) => + platform ? e.filter((r) => r.name.includes(platform)) : e + ) ) as Promise } @@ -143,15 +135,13 @@ export default class JanEngineManagementExtension extends EngineManagementExtens * @returns A Promise that resolves to an array of latest released engine by version. */ async getLatestReleasedEngine(name: InferenceEngine, platform?: string) { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .get(`v1/engines/${name}/releases/latest`) - .json() - .then((e) => - platform ? e.filter((r) => r.name.includes(platform)) : e - ) - ) + return this.apiInstance().then((api) => + api + .get(`v1/engines/${name}/releases/latest`) + .json() + .then((e) => + platform ? e.filter((r) => r.name.includes(platform)) : e + ) ) as Promise } @@ -160,12 +150,10 @@ export default class JanEngineManagementExtension extends EngineManagementExtens * @returns A Promise that resolves to intall of engine. */ async installEngine(name: string, engineConfig: EngineConfig) { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .post(`v1/engines/${name}/install`, { json: engineConfig }) - .then((e) => e) - ) + return this.apiInstance().then((api) => + api + .post(`v1/engines/${name}/install`, { json: engineConfig }) + .then((e) => e) ) as Promise<{ messages: string }> } @@ -195,18 +183,16 @@ export default class JanEngineManagementExtension extends EngineManagementExtens if (engineConfig.metadata && !engineConfig.metadata?.header_template) engineConfig.metadata.header_template = DEFAULT_REQUEST_HEADERS_TRANSFORM - return this.queue.add(() => - this.apiInstance().then((api) => - api.post('v1/engines', { json: engineConfig }).then((e) => { - if (persistModels && engineConfig.metadata?.get_models_url) { - // Pull /models from remote models endpoint - return this.populateRemoteModels(engineConfig) - .then(() => e) - .catch(() => e) - } - return e - }) - ) + return this.apiInstance().then((api) => + api.post('v1/engines', { json: engineConfig }).then((e) => { + if (persistModels && engineConfig.metadata?.get_models_url) { + // Pull /models from remote models endpoint + return this.populateRemoteModels(engineConfig) + .then(() => e) + .catch(() => e) + } + return e + }) ) as Promise<{ messages: string }> } @@ -215,12 +201,10 @@ export default class JanEngineManagementExtension extends EngineManagementExtens * @returns A Promise that resolves to unintall of engine. */ async uninstallEngine(name: InferenceEngine, engineConfig: EngineConfig) { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .delete(`v1/engines/${name}/install`, { json: engineConfig }) - .then((e) => e) - ) + return this.apiInstance().then((api) => + api + .delete(`v1/engines/${name}/install`, { json: engineConfig }) + .then((e) => e) ) as Promise<{ messages: string }> } @@ -229,25 +213,22 @@ export default class JanEngineManagementExtension extends EngineManagementExtens * @param model - Remote model object. */ async addRemoteModel(model: Model) { - return this.queue.add(() => - this.apiInstance() - .then((api) => - api - .post('v1/models/add', { - json: { - inference_params: { - max_tokens: 4096, - temperature: 0.7, - top_p: 0.95, - stream: true, - frequency_penalty: 0, - presence_penalty: 0, - }, - ...model, - }, - }) - .then((e) => e) - ) + return this.apiInstance().then((api) => + api + .post('v1/models/add', { + json: { + inference_params: { + max_tokens: 4096, + temperature: 0.7, + top_p: 0.95, + stream: true, + frequency_penalty: 0, + presence_penalty: 0, + }, + ...model, + }, + }) + .then((e) => e) .then(() => {}) ) } @@ -257,13 +238,11 @@ export default class JanEngineManagementExtension extends EngineManagementExtens * @returns A Promise that resolves to an object of default engine. */ async getDefaultEngineVariant(name: InferenceEngine) { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .get(`v1/engines/${name}/default`) - .json<{ messages: string }>() - .then((e) => e) - ) + return this.apiInstance().then((api) => + api + .get(`v1/engines/${name}/default`) + .json<{ messages: string }>() + .then((e) => e) ) as Promise } @@ -276,12 +255,10 @@ export default class JanEngineManagementExtension extends EngineManagementExtens name: InferenceEngine, engineConfig: EngineConfig ) { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .post(`v1/engines/${name}/default`, { json: engineConfig }) - .then((e) => e) - ) + return this.apiInstance().then((api) => + api + .post(`v1/engines/${name}/default`, { json: engineConfig }) + .then((e) => e) ) as Promise<{ messages: string }> } @@ -289,31 +266,13 @@ export default class JanEngineManagementExtension extends EngineManagementExtens * @returns A Promise that resolves to update engine. */ async updateEngine(name: InferenceEngine, engineConfig?: EngineConfig) { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .post(`v1/engines/${name}/update`, { json: engineConfig }) - .then((e) => e) - ) + return this.apiInstance().then((api) => + api + .post(`v1/engines/${name}/update`, { json: engineConfig }) + .then((e) => e) ) as Promise<{ messages: string }> } - /** - * Do health check on cortex.cpp - * @returns - */ - async healthz(): Promise { - return this.apiInstance() - .then((api) => - api.get('healthz', { - retry: { limit: 20, delay: () => 500, methods: ['get'] }, - }) - ) - .then(() => { - this.queue.concurrency = Infinity - }) - } - /** * Update default local engine * This is to use built-in engine variant in case there is no default engine set @@ -428,8 +387,6 @@ export default class JanEngineManagementExtension extends EngineManagementExtens */ migrate = async () => { // Ensure health check is done - await this.queue.onEmpty() - const version = await this.getSetting('version', '0.0.0') const engines = await this.getEngines() if (version < VERSION) { diff --git a/extensions/engine-management-extension/src/node/index.ts b/extensions/engine-management-extension/src/node/index.ts index ae1934b25..ce8d9b274 100644 --- a/extensions/engine-management-extension/src/node/index.ts +++ b/extensions/engine-management-extension/src/node/index.ts @@ -16,12 +16,12 @@ const symlinkEngines = async () => { appResourcePath(), 'shared', 'engines', - 'cortex.llamacpp' + 'llama.cpp' ) const symlinkEnginePath = path.join( getJanDataFolderPath(), 'engines', - 'cortex.llamacpp' + 'llama.cpp' ) const variantFolders = await readdir(sourceEnginePath) const isStandalone = process.platform === 'linux' diff --git a/extensions/engine-management-extension/src/utils.ts b/extensions/engine-management-extension/src/utils.ts index 5e3f01ef7..fc80d255f 100644 --- a/extensions/engine-management-extension/src/utils.ts +++ b/extensions/engine-management-extension/src/utils.ts @@ -32,23 +32,23 @@ const gpuRunMode = (settings?: GpuSetting): RunMode => { */ const os = (settings?: GpuSetting): string => { return PLATFORM === 'win32' - ? 'windows-amd64' + ? 'win' : PLATFORM === 'darwin' ? settings?.cpu?.arch === 'arm64' - ? 'mac-arm64' - : 'mac-amd64' - : 'linux-amd64' + ? 'macos-arm64' + : 'macos-x64' + : 'linux' } /** - * The CUDA version that will be set - either '11-7' or '12-0'. + * The CUDA version that will be set - either 'cu12.0' or 'cu11.7'. * @param settings * @returns */ -const cudaVersion = (settings?: GpuSetting): '12-0' | '11-7' | undefined => { +const cudaVersion = (settings?: GpuSetting): 'cu12.0' | 'cu11.7' | undefined => { return settings.gpus?.some((gpu) => gpu.version.includes('12')) - ? '12-0' - : '11-7' + ? 'cu12.0' + : 'cu11.7' } /** @@ -84,15 +84,17 @@ export const engineVariant = async ( : 'noavx', runMode, cudaVersion(gpuSetting), + 'x64', ] : // For cpu only we need to check all available supported instructions [ (gpuSetting.cpu?.instructions ?? ['noavx']).find((e) => instructionBinaryNames.includes(e.toLowerCase()) ) ?? 'noavx', + 'x64', ]), ].filter(Boolean) - : [platform, 'vulkan'] + : [platform, 'vulkan', 'x64'] let engineVariantString = engineVariant.join('-') diff --git a/extensions/hardware-management-extension/src/index.ts b/extensions/hardware-management-extension/src/index.ts index edd98a7ae..665dd05ef 100644 --- a/extensions/hardware-management-extension/src/index.ts +++ b/extensions/hardware-management-extension/src/index.ts @@ -1,21 +1,15 @@ import { HardwareManagementExtension, HardwareInformation } from '@janhq/core' import ky, { KyInstance } from 'ky' -import PQueue from 'p-queue' /** * JSONHardwareManagementExtension is a HardwareManagementExtension implementation that provides * functionality for managing engines. */ export default class JSONHardwareManagementExtension extends HardwareManagementExtension { - queue = new PQueue({ concurrency: 1 }) - /** * Called when the extension is loaded. */ - async onLoad() { - // Run Healthcheck - this.queue.add(() => this.healthz()) - } + async onLoad() {} api?: KyInstance /** @@ -23,13 +17,16 @@ export default class JSONHardwareManagementExtension extends HardwareManagementE * @returns */ async apiInstance(): Promise { - if(this.api) return this.api - const apiKey = (await window.core?.api.appToken()) ?? 'cortex.cpp' + if (this.api) return this.api + const apiKey = (await window.core?.api.appToken()) this.api = ky.extend({ prefixUrl: API_URL, - headers: { - Authorization: `Bearer ${apiKey}`, - }, + headers: apiKey + ? { + Authorization: `Bearer ${apiKey}`, + } + : {}, + retry: 10, }) return this.api } @@ -39,31 +36,15 @@ export default class JSONHardwareManagementExtension extends HardwareManagementE */ onUnload() {} - /** - * Do health check on cortex.cpp - * @returns - */ - async healthz(): Promise { - return this.apiInstance().then((api) => - api - .get('healthz', { - retry: { limit: 20, delay: () => 500, methods: ['get'] }, - }) - .then(() => {}) - ) - } - /** * @returns A Promise that resolves to an object of hardware. */ async getHardware(): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .get('v1/hardware') - .json() - .then((e) => e) - ) + return this.apiInstance().then((api) => + api + .get('v1/hardware') + .json() + .then((e) => e) ) as Promise } @@ -74,10 +55,8 @@ export default class JSONHardwareManagementExtension extends HardwareManagementE message: string activated_gpus: number[] }> { - return this.queue.add(() => - this.apiInstance().then((api) => - api.post('v1/hardware/activate', { json: data }).then((e) => e) - ) + return this.apiInstance().then((api) => + api.post('v1/hardware/activate', { json: data }).then((e) => e) ) as Promise<{ message: string activated_gpus: number[] diff --git a/extensions/inference-cortex-extension/bin/version.txt b/extensions/inference-cortex-extension/bin/version.txt index 492b167a6..d5f450d92 100644 --- a/extensions/inference-cortex-extension/bin/version.txt +++ b/extensions/inference-cortex-extension/bin/version.txt @@ -1 +1 @@ -1.0.12 \ No newline at end of file +1.0.13-rc6 \ No newline at end of file diff --git a/extensions/inference-cortex-extension/download.bat b/extensions/inference-cortex-extension/download.bat index 220c5528b..ec6e68560 100644 --- a/extensions/inference-cortex-extension/download.bat +++ b/extensions/inference-cortex-extension/download.bat @@ -2,39 +2,38 @@ set BIN_PATH=./bin set SHARED_PATH=./../../electron/shared set /p CORTEX_VERSION=<./bin/version.txt -set ENGINE_VERSION=0.1.56 +set ENGINE_VERSION=b5371 -@REM Download cortex.llamacpp binaries -set DOWNLOAD_URL=https://github.com/menloresearch/cortex.llamacpp/releases/download/v%ENGINE_VERSION%/cortex.llamacpp-%ENGINE_VERSION%-windows-amd64 -set CUDA_DOWNLOAD_URL=https://github.com/menloresearch/cortex.llamacpp/releases/download/v%ENGINE_VERSION% -set SUBFOLDERS=windows-amd64-noavx-cuda-12-0 windows-amd64-noavx-cuda-11-7 windows-amd64-avx2-cuda-12-0 windows-amd64-avx2-cuda-11-7 windows-amd64-noavx windows-amd64-avx windows-amd64-avx2 windows-amd64-avx512 windows-amd64-vulkan +@REM Download llama.cpp binaries +set DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/%ENGINE_VERSION%/llama-%ENGINE_VERSION%-bin-win +set DOWNLOAD_GGML_URL=https://github.com/ggml-org/llama.cpp/releases/download/%ENGINE_VERSION%/llama-%ENGINE_VERSION%-bin-win +set CUDA_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/%ENGINE_VERSION% +set SUBFOLDERS=win-noavx-cuda-cu12.0-x64 win-noavx-cuda-cu11.7-x64 win-avx2-cuda-cu12.0-x64 win-avx2-cuda-cu11.7-x64 win-noavx-x64 win-avx-x64 win-avx2-x64 win-avx512-x64 win-vulkan-x64 call .\node_modules\.bin\download -e --strip 1 -o %BIN_PATH% https://github.com/menloresearch/cortex.cpp/releases/download/v%CORTEX_VERSION%/cortex-%CORTEX_VERSION%-windows-amd64.tar.gz -call .\node_modules\.bin\download %DOWNLOAD_URL%-avx2-cuda-12-0.tar.gz -e --strip 1 -o %SHARED_PATH%/engines/cortex.llamacpp/windows-amd64-avx2-cuda-12-0/v%ENGINE_VERSION% -call .\node_modules\.bin\download %DOWNLOAD_URL%-avx2-cuda-11-7.tar.gz -e --strip 1 -o %SHARED_PATH%/engines/cortex.llamacpp/windows-amd64-avx2-cuda-11-7/v%ENGINE_VERSION% -call .\node_modules\.bin\download %DOWNLOAD_URL%-noavx-cuda-12-0.tar.gz -e --strip 1 -o %SHARED_PATH%/engines/cortex.llamacpp/windows-amd64-noavx-cuda-12-0/v%ENGINE_VERSION% -call .\node_modules\.bin\download %DOWNLOAD_URL%-noavx-cuda-11-7.tar.gz -e --strip 1 -o %SHARED_PATH%/engines/cortex.llamacpp/windows-amd64-noavx-cuda-11-7/v%ENGINE_VERSION% -call .\node_modules\.bin\download %DOWNLOAD_URL%-noavx.tar.gz -e --strip 1 -o %SHARED_PATH%/engines/cortex.llamacpp/windows-amd64-noavx/v%ENGINE_VERSION% -call .\node_modules\.bin\download %DOWNLOAD_URL%-avx.tar.gz -e --strip 1 -o %SHARED_PATH%/engines/cortex.llamacpp/windows-amd64-avx/v%ENGINE_VERSION% -call .\node_modules\.bin\download %DOWNLOAD_URL%-avx2.tar.gz -e --strip 1 -o %SHARED_PATH%/engines/cortex.llamacpp/windows-amd64-avx2/v%ENGINE_VERSION% -call .\node_modules\.bin\download %DOWNLOAD_URL%-avx512.tar.gz -e --strip 1 -o %SHARED_PATH%/engines/cortex.llamacpp/windows-amd64-avx512/v%ENGINE_VERSION% -call .\node_modules\.bin\download %DOWNLOAD_URL%-vulkan.tar.gz -e --strip 1 -o %SHARED_PATH%/engines/cortex.llamacpp/windows-amd64-vulkan/v%ENGINE_VERSION% -call .\node_modules\.bin\download %CUDA_DOWNLOAD_URL%/cuda-12-0-windows-amd64.tar.gz -e --strip 1 -o %BIN_PATH% -call .\node_modules\.bin\download %CUDA_DOWNLOAD_URL%/cuda-11-7-windows-amd64.tar.gz -e --strip 1 -o %BIN_PATH% +call .\node_modules\.bin\download %DOWNLOAD_URL%-avx2-cuda-cu12.0-x64.tar.gz -e --strip 2 -o %SHARED_PATH%/engines/llama.cpp/win-avx2-cuda-cu12.0-x64/%ENGINE_VERSION% +call .\node_modules\.bin\download %DOWNLOAD_URL%-avx2-cuda-cu11.7-x64.tar.gz -e --strip 2 -o %SHARED_PATH%/engines/llama.cpp/win-avx2-cuda-cu11.7-x64/%ENGINE_VERSION% +call .\node_modules\.bin\download %DOWNLOAD_URL%-noavx-cuda-cu12.0-x64.tar.gz -e --strip 2 -o %SHARED_PATH%/engines/llama.cpp/win-noavx-cuda-cu12.0-x64/%ENGINE_VERSION% +call .\node_modules\.bin\download %DOWNLOAD_URL%-noavx-cuda-cu11.7-x64.tar.gz -e --strip 2 -o %SHARED_PATH%/engines/llama.cpp/win-noavx-cuda-cu11.7-x64/%ENGINE_VERSION% +call .\node_modules\.bin\download %DOWNLOAD_URL%-noavx-x64.tar.gz -e --strip 2 -o %SHARED_PATH%/engines/llama.cpp/win-noavx-x64/%ENGINE_VERSION% +call .\node_modules\.bin\download %DOWNLOAD_URL%-avx-x64.tar.gz -e --strip 2 -o %SHARED_PATH%/engines/llama.cpp/win-avx-x64/%ENGINE_VERSION% +call .\node_modules\.bin\download %DOWNLOAD_URL%-avx2-x64.tar.gz -e --strip 2 -o %SHARED_PATH%/engines/llama.cpp/win-avx2-x64/%ENGINE_VERSION% +call .\node_modules\.bin\download %DOWNLOAD_URL%-avx512-x64.tar.gz -e --strip 2 -o %SHARED_PATH%/engines/llama.cpp/win-avx512-x64/%ENGINE_VERSION% +call .\node_modules\.bin\download %DOWNLOAD_GGML_URL%-vulkan-x64.zip -e --strip 1 -o %SHARED_PATH%/engines/llama.cpp/win-vulkan-x64/%ENGINE_VERSION% +call .\node_modules\.bin\download %CUDA_DOWNLOAD_URL%/cudart-llama-bin-win-cu12.0-x64.tar.gz -e --strip 1 -o %BIN_PATH% +call .\node_modules\.bin\download %CUDA_DOWNLOAD_URL%/cudart-llama-bin-win-cu11.7-x64.tar.gz -e --strip 1 -o %BIN_PATH% move %BIN_PATH%\cortex-server-beta.exe %BIN_PATH%\cortex-server.exe del %BIN_PATH%\cortex-beta.exe del %BIN_PATH%\cortex.exe -@REM Loop through each folder and move DLLs (excluding engine.dll) +@REM Loop through each folder and move DLLs for %%F in (%SUBFOLDERS%) do ( - echo Processing folder: %SHARED_PATH%\engines\cortex.llamacpp\%%F\v%ENGINE_VERSION% + echo Processing folder: %SHARED_PATH%\engines\llama.cpp\%%F\%ENGINE_VERSION% - @REM Move all .dll files except engine.dll - for %%D in (%SHARED_PATH%\engines\cortex.llamacpp\%%F\v%ENGINE_VERSION%\*.dll) do ( - if /I not "%%~nxD"=="engine.dll" ( - move "%%D" "%BIN_PATH%" - ) + @REM Move cu*.dll files + for %%D in (%SHARED_PATH%\engines\llama.cpp\%%F\%ENGINE_VERSION%\cu*.dll) do ( + move "%%D" "%BIN_PATH%" ) ) diff --git a/extensions/inference-cortex-extension/download.sh b/extensions/inference-cortex-extension/download.sh index 46fe35c48..6d3ea4639 100755 --- a/extensions/inference-cortex-extension/download.sh +++ b/extensions/inference-cortex-extension/download.sh @@ -2,10 +2,10 @@ # Read CORTEX_VERSION CORTEX_VERSION=$(cat ./bin/version.txt) -ENGINE_VERSION=0.1.56 +ENGINE_VERSION=b5371 CORTEX_RELEASE_URL="https://github.com/menloresearch/cortex.cpp/releases/download" -ENGINE_DOWNLOAD_URL="https://github.com/menloresearch/cortex.llamacpp/releases/download/v${ENGINE_VERSION}/cortex.llamacpp-${ENGINE_VERSION}" -CUDA_DOWNLOAD_URL="https://github.com/menloresearch/cortex.llamacpp/releases/download/v${ENGINE_VERSION}" +ENGINE_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/${ENGINE_VERSION}/llama-${ENGINE_VERSION}-bin +CUDA_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/${ENGINE_VERSION} BIN_PATH=./bin SHARED_PATH="../../electron/shared" # Detect platform @@ -20,17 +20,17 @@ if [ "$OS_TYPE" == "Linux" ]; then chmod +x "./bin/cortex-server" # Download engines for Linux - download "${ENGINE_DOWNLOAD_URL}-linux-amd64-noavx.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-noavx/v${ENGINE_VERSION}" 1 - download "${ENGINE_DOWNLOAD_URL}-linux-amd64-avx.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-avx/v${ENGINE_VERSION}" 1 - download "${ENGINE_DOWNLOAD_URL}-linux-amd64-avx2.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-avx2/v${ENGINE_VERSION}" 1 - download "${ENGINE_DOWNLOAD_URL}-linux-amd64-avx512.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-avx512/v${ENGINE_VERSION}" 1 - download "${ENGINE_DOWNLOAD_URL}-linux-amd64-avx2-cuda-12-0.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-avx2-cuda-12-0/v${ENGINE_VERSION}" 1 - download "${ENGINE_DOWNLOAD_URL}-linux-amd64-avx2-cuda-11-7.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-avx2-cuda-11-7/v${ENGINE_VERSION}" 1 - download "${ENGINE_DOWNLOAD_URL}-linux-amd64-noavx-cuda-12-0.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-noavx-cuda-12-0/v${ENGINE_VERSION}" 1 - download "${ENGINE_DOWNLOAD_URL}-linux-amd64-noavx-cuda-11-7.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-noavx-cuda-11-7/v${ENGINE_VERSION}" 1 - download "${ENGINE_DOWNLOAD_URL}-linux-amd64-vulkan.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/linux-amd64-vulkan/v${ENGINE_VERSION}" 1 - download "${CUDA_DOWNLOAD_URL}/cuda-12-0-linux-amd64.tar.gz" -e --strip 1 -o "${BIN_PATH}" 1 - download "${CUDA_DOWNLOAD_URL}/cuda-11-7-linux-amd64.tar.gz" -e --strip 1 -o "${BIN_PATH}" 1 + download "${ENGINE_DOWNLOAD_URL}-linux-noavx-x64.tar.gz" -e --strip 2 -o "${SHARED_PATH}/engines/llama.cpp/linux-noavx-x64/${ENGINE_VERSION}" 1 + download "${ENGINE_DOWNLOAD_URL}-linux-avx-x64.tar.gz" -e --strip 2 -o "${SHARED_PATH}/engines/llama.cpp/linux-avx-x64/${ENGINE_VERSION}" 1 + download "${ENGINE_DOWNLOAD_URL}-linux-avx2-x64.tar.gz" -e --strip 2 -o "${SHARED_PATH}/engines/llama.cpp/linux-avx2-x64/${ENGINE_VERSION}" 1 + download "${ENGINE_DOWNLOAD_URL}-linux-avx512-x64.tar.gz" -e --strip 2 -o "${SHARED_PATH}/engines/llama.cpp/linux-avx512-x64/${ENGINE_VERSION}" 1 + download "${ENGINE_DOWNLOAD_URL}-linux-avx2-cuda-cu12.0-x64.tar.gz" -e --strip 2 -o "${SHARED_PATH}/engines/llama.cpp/linux-avx2-cuda-cu12.0-x64/${ENGINE_VERSION}" 1 + download "${ENGINE_DOWNLOAD_URL}-linux-avx2-cuda-cu11.7-x64.tar.gz" -e --strip 2 -o "${SHARED_PATH}/engines/llama.cpp/linux-avx2-cuda-cu11.7-x64/${ENGINE_VERSION}" 1 + download "${ENGINE_DOWNLOAD_URL}-linux-noavx-cuda-cu12.0-x64.tar.gz" -e --strip 2 -o "${SHARED_PATH}/engines/llama.cpp/linux-noavx-cuda-cu12.0-x64/${ENGINE_VERSION}" 1 + download "${ENGINE_DOWNLOAD_URL}-linux-noavx-cuda-cu11.7-x64.tar.gz" -e --strip 2 -o "${SHARED_PATH}/engines/llama.cpp/linux-noavx-cuda-cu11.7-x64/${ENGINE_VERSION}" 1 + download "${ENGINE_DOWNLOAD_URL}-linux-vulkan-x64.tar.gz" -e --strip 2 -o "${SHARED_PATH}/engines/llama.cpp/linux-vulkan-x64/${ENGINE_VERSION}" 1 + download "${CUDA_DOWNLOAD_URL}/cudart-llama-bin-linux-cu12.0-x64.tar.gz" -e --strip 1 -o "${BIN_PATH}" 1 + download "${CUDA_DOWNLOAD_URL}/cudart-llama-bin-linux-cu11.7-x64.tar.gz" -e --strip 1 -o "${BIN_PATH}" 1 elif [ "$OS_TYPE" == "Darwin" ]; then # macOS downloads @@ -41,8 +41,8 @@ elif [ "$OS_TYPE" == "Darwin" ]; then chmod +x "./bin/cortex-server" # Download engines for macOS - download "${ENGINE_DOWNLOAD_URL}-mac-arm64.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/mac-arm64/v${ENGINE_VERSION}" - download "${ENGINE_DOWNLOAD_URL}-mac-amd64.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/mac-amd64/v${ENGINE_VERSION}" + download "${ENGINE_DOWNLOAD_URL}-macos-arm64.tar.gz" -e --strip 2 -o "${SHARED_PATH}/engines/llama.cpp/macos-arm64/${ENGINE_VERSION}" + download "${ENGINE_DOWNLOAD_URL}-macos-x64.tar.gz" -e --strip 2 -o "${SHARED_PATH}/engines/llama.cpp/macos-x64/${ENGINE_VERSION}" else echo "Unsupported operating system: $OS_TYPE" diff --git a/extensions/inference-cortex-extension/rolldown.config.mjs b/extensions/inference-cortex-extension/rolldown.config.mjs index 0e91dfbc1..6c0df4933 100644 --- a/extensions/inference-cortex-extension/rolldown.config.mjs +++ b/extensions/inference-cortex-extension/rolldown.config.mjs @@ -19,7 +19,7 @@ export default defineConfig([ CORTEX_SOCKET_URL: JSON.stringify( `ws://127.0.0.1:${process.env.CORTEX_API_PORT ?? '39291'}` ), - CORTEX_ENGINE_VERSION: JSON.stringify('v0.1.56'), + CORTEX_ENGINE_VERSION: JSON.stringify('b5371'), }, }, { diff --git a/extensions/inference-cortex-extension/src/index.ts b/extensions/inference-cortex-extension/src/index.ts index 7ed51f9c2..33bd398c3 100644 --- a/extensions/inference-cortex-extension/src/index.ts +++ b/extensions/inference-cortex-extension/src/index.ts @@ -16,7 +16,6 @@ import { events, ModelEvent, } from '@janhq/core' -import PQueue from 'p-queue' import ky, { KyInstance } from 'ky' /** @@ -48,8 +47,6 @@ export enum Settings { export default class JanInferenceCortexExtension extends LocalOAIEngine { nodeModule: string = 'node' - queue = new PQueue({ concurrency: 1 }) - provider: string = InferenceEngine.cortex shouldReconnect = true @@ -81,13 +78,16 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine { * @returns */ async apiInstance(): Promise { - if(this.api) return this.api - const apiKey = (await window.core?.api.appToken()) ?? 'cortex.cpp' + if (this.api) return this.api + const apiKey = await window.core?.api.appToken() this.api = ky.extend({ prefixUrl: CORTEX_API_URL, - headers: { - Authorization: `Bearer ${apiKey}`, - }, + headers: apiKey + ? { + Authorization: `Bearer ${apiKey}`, + } + : {}, + retry: 10, }) return this.api } @@ -129,10 +129,8 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine { ) if (!Number.isNaN(threads_number)) this.cpu_threads = threads_number - // Run the process watchdog - // const systemInfo = await systemInformation() - this.queue.add(() => executeOnMain(NODE, 'run')) - this.queue.add(() => this.healthz()) + await executeOnMain(NODE, 'run') + this.subscribeToEvents() window.addEventListener('beforeunload', () => { @@ -179,35 +177,33 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine { this.abortControllers.set(model.id, controller) - return await this.queue.add(() => - this.apiInstance().then((api) => - api - .post('v1/models/start', { - json: { - ...extractModelLoadParams(model.settings), - model: model.id, - engine: - model.engine === InferenceEngine.nitro // Legacy model cache - ? InferenceEngine.cortex_llamacpp - : model.engine, - cont_batching: this.cont_batching, - n_parallel: this.n_parallel, - caching_enabled: this.caching_enabled, - flash_attn: this.flash_attn, - cache_type: this.cache_type, - use_mmap: this.use_mmap, - ...(this.cpu_threads ? { cpu_threads: this.cpu_threads } : {}), - }, - timeout: false, - signal, - }) - .json() - .catch(async (e) => { - throw (await e.response?.json()) ?? e - }) - .finally(() => this.abortControllers.delete(model.id)) - .then() - ) + return await this.apiInstance().then((api) => + api + .post('v1/models/start', { + json: { + ...extractModelLoadParams(model.settings), + model: model.id, + engine: + model.engine === InferenceEngine.nitro // Legacy model cache + ? InferenceEngine.cortex_llamacpp + : model.engine, + cont_batching: this.cont_batching, + n_parallel: this.n_parallel, + caching_enabled: this.caching_enabled, + flash_attn: this.flash_attn, + cache_type: this.cache_type, + use_mmap: this.use_mmap, + ...(this.cpu_threads ? { cpu_threads: this.cpu_threads } : {}), + }, + timeout: false, + signal, + }) + .json() + .catch(async (e) => { + throw (await e.response?.json()) ?? e + }) + .finally(() => this.abortControllers.delete(model.id)) + .then() ) } @@ -225,24 +221,6 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine { ) } - /** - * Do health check on cortex.cpp - * @returns - */ - private async healthz(): Promise { - return this.apiInstance().then((api) => - api - .get('healthz', { - retry: { - limit: 20, - delay: () => 500, - methods: ['get'], - }, - }) - .then(() => {}) - ) - } - /** * Clean cortex processes * @returns @@ -266,76 +244,64 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine { * Subscribe to cortex.cpp websocket events */ private subscribeToEvents() { - this.queue.add( - () => - new Promise((resolve) => { - this.socket = new WebSocket(`${CORTEX_SOCKET_URL}/events`) + console.log('Subscribing to events...') + this.socket = new WebSocket(`${CORTEX_SOCKET_URL}/events`) - this.socket.addEventListener('message', (event) => { - const data = JSON.parse(event.data) + this.socket.addEventListener('message', (event) => { + const data = JSON.parse(event.data) - const transferred = data.task.items.reduce( - (acc: number, cur: any) => acc + cur.downloadedBytes, - 0 - ) - const total = data.task.items.reduce( - (acc: number, cur: any) => acc + cur.bytes, - 0 - ) - const percent = total > 0 ? transferred / total : 0 + const transferred = data.task.items.reduce( + (acc: number, cur: any) => acc + cur.downloadedBytes, + 0 + ) + const total = data.task.items.reduce( + (acc: number, cur: any) => acc + cur.bytes, + 0 + ) + const percent = total > 0 ? transferred / total : 0 - events.emit( - DownloadTypes[data.type as keyof typeof DownloadTypes], - { - modelId: data.task.id, - percent: percent, - size: { - transferred: transferred, - total: total, - }, - downloadType: data.task.type, - } - ) + events.emit(DownloadTypes[data.type as keyof typeof DownloadTypes], { + modelId: data.task.id, + percent: percent, + size: { + transferred: transferred, + total: total, + }, + downloadType: data.task.type, + }) - if (data.task.type === 'Engine') { - events.emit(EngineEvent.OnEngineUpdate, { - type: DownloadTypes[data.type as keyof typeof DownloadTypes], - percent: percent, - id: data.task.id, - }) - } else { - if (data.type === DownloadTypes.DownloadSuccess) { - // Delay for the state update from cortex.cpp - // Just to be sure - setTimeout(() => { - events.emit(ModelEvent.OnModelsUpdate, { - fetch: true, - }) - }, 500) - } - } - }) - - /** - * This is to handle the server segfault issue - */ - this.socket.onclose = (event) => { - console.log('WebSocket closed:', event) - // Notify app to update model running state - events.emit(ModelEvent.OnModelStopped, {}) - - // Reconnect to the /events websocket - if (this.shouldReconnect) { - console.log(`Attempting to reconnect...`) - setTimeout(() => this.subscribeToEvents(), 1000) - } - - // Queue up health check - this.queue.add(() => this.healthz()) - } - - resolve() + if (data.task.type === 'Engine') { + events.emit(EngineEvent.OnEngineUpdate, { + type: DownloadTypes[data.type as keyof typeof DownloadTypes], + percent: percent, + id: data.task.id, }) - ) + } else { + if (data.type === DownloadTypes.DownloadSuccess) { + // Delay for the state update from cortex.cpp + // Just to be sure + setTimeout(() => { + events.emit(ModelEvent.OnModelsUpdate, { + fetch: true, + }) + }, 500) + } + } + }) + + /** + * This is to handle the server segfault issue + */ + this.socket.onclose = (event) => { + console.log('WebSocket closed:', event) + // Notify app to update model running state + events.emit(ModelEvent.OnModelStopped, {}) + + // Reconnect to the /events websocket + if (this.shouldReconnect) { + console.log(`Attempting to reconnect...`) + setTimeout(() => this.subscribeToEvents(), 1000) + } + } } } diff --git a/extensions/model-extension/src/index.ts b/extensions/model-extension/src/index.ts index fd1e5581d..835b9d3a7 100644 --- a/extensions/model-extension/src/index.ts +++ b/extensions/model-extension/src/index.ts @@ -12,7 +12,6 @@ import { } from '@janhq/core' import { scanModelsFolder } from './legacy/model-json' import { deleteModelFiles } from './legacy/delete' -import PQueue from 'p-queue' import ky, { KyInstance } from 'ky' /** @@ -31,21 +30,22 @@ type Data = { * A extension for models */ export default class JanModelExtension extends ModelExtension { - queue = new PQueue({ concurrency: 1 }) - api?: KyInstance /** * Get the API instance * @returns */ async apiInstance(): Promise { - if(this.api) return this.api - const apiKey = (await window.core?.api.appToken()) ?? 'cortex.cpp' + if (this.api) return this.api + const apiKey = (await window.core?.api.appToken()) this.api = ky.extend({ prefixUrl: CORTEX_API_URL, - headers: { - Authorization: `Bearer ${apiKey}`, - }, + headers: apiKey + ? { + Authorization: `Bearer ${apiKey}`, + } + : {}, + retry: 10 }) return this.api } @@ -53,8 +53,6 @@ export default class JanModelExtension extends ModelExtension { * Called when the extension is loaded. */ async onLoad() { - this.queue.add(() => this.healthz()) - this.registerSettings(SETTINGS) // Configure huggingface token if available @@ -97,16 +95,14 @@ export default class JanModelExtension extends ModelExtension { /** * Sending POST to /models/pull/{id} endpoint to pull the model */ - return this.queue.add(() => - this.apiInstance().then((api) => - api - .post('v1/models/pull', { json: { model, id, name }, timeout: false }) - .json() - .catch(async (e) => { - throw (await e.response?.json()) ?? e - }) - .then() - ) + return this.apiInstance().then((api) => + api + .post('v1/models/pull', { json: { model, id, name }, timeout: false }) + .json() + .catch(async (e) => { + throw (await e.response?.json()) ?? e + }) + .then() ) } @@ -120,13 +116,11 @@ export default class JanModelExtension extends ModelExtension { /** * Sending DELETE to /models/pull/{id} endpoint to cancel a model pull */ - return this.queue.add(() => - this.apiInstance().then((api) => - api - .delete('v1/models/pull', { json: { taskId: model } }) - .json() - .then() - ) + return this.apiInstance().then((api) => + api + .delete('v1/models/pull', { json: { taskId: model } }) + .json() + .then() ) } @@ -136,12 +130,8 @@ export default class JanModelExtension extends ModelExtension { * @returns A Promise that resolves when the model is deleted. */ async deleteModel(model: string): Promise { - return this.queue - .add(() => - this.apiInstance().then((api) => - api.delete(`v1/models/${model}`).json().then() - ) - ) + return this.apiInstance() + .then((api) => api.delete(`v1/models/${model}`).json().then()) .catch((e) => console.debug(e)) .finally(async () => { // Delete legacy model files @@ -241,17 +231,15 @@ export default class JanModelExtension extends ModelExtension { * @param model - The metadata of the model */ async updateModel(model: Partial): Promise { - return this.queue - .add(() => - this.apiInstance().then((api) => - api - .patch(`v1/models/${model.id}`, { - json: { ...model }, - timeout: false, - }) - .json() - .then() - ) + return this.apiInstance() + .then((api) => + api + .patch(`v1/models/${model.id}`, { + json: { ...model }, + timeout: false, + }) + .json() + .then() ) .then(() => this.getModel(model.id)) } @@ -261,13 +249,11 @@ export default class JanModelExtension extends ModelExtension { * @param model - The ID of the model */ async getModel(model: string): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .get(`v1/models/${model}`) - .json() - .then((e) => this.transformModel(e)) - ) + return this.apiInstance().then((api) => + api + .get(`v1/models/${model}`) + .json() + .then((e) => this.transformModel(e)) ) as Promise } @@ -282,17 +268,15 @@ export default class JanModelExtension extends ModelExtension { name?: string, option?: OptionType ): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api - .post('v1/models/import', { - json: { model, modelPath, name, option }, - timeout: false, - }) - .json() - .catch((e) => console.debug(e)) // Ignore error - .then() - ) + return this.apiInstance().then((api) => + api + .post('v1/models/import', { + json: { model, modelPath, name, option }, + timeout: false, + }) + .json() + .catch((e) => console.debug(e)) // Ignore error + .then() ) } @@ -302,12 +286,8 @@ export default class JanModelExtension extends ModelExtension { * @param model */ async getSources(): Promise { - const sources = await this.queue - .add(() => - this.apiInstance().then((api) => - api.get('v1/models/sources').json>() - ) - ) + const sources = await this.apiInstance() + .then((api) => api.get('v1/models/sources').json>()) .then((e) => (typeof e === 'object' ? (e.data as ModelSource[]) : [])) .catch(() => []) return sources.concat( @@ -320,14 +300,12 @@ export default class JanModelExtension extends ModelExtension { * @param model */ async addSource(source: string): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api.post('v1/models/sources', { - json: { - source, - }, - }) - ) + return this.apiInstance().then((api) => + api.post('v1/models/sources', { + json: { + source, + }, + }) ) } @@ -336,15 +314,13 @@ export default class JanModelExtension extends ModelExtension { * @param model */ async deleteSource(source: string): Promise { - return this.queue.add(() => - this.apiInstance().then((api) => - api.delete('v1/models/sources', { - json: { - source, - }, - timeout: false, - }) - ) + return this.apiInstance().then((api) => + api.delete('v1/models/sources', { + json: { + source, + }, + timeout: false, + }) ) } // END - Model Sources @@ -354,10 +330,8 @@ export default class JanModelExtension extends ModelExtension { * @param model */ async isModelLoaded(model: string): Promise { - return this.queue - .add(() => - this.apiInstance().then((api) => api.get(`v1/models/status/${model}`)) - ) + return this.apiInstance() + .then((api) => api.get(`v1/models/status/${model}`)) .then((e) => true) .catch(() => false) } @@ -375,12 +349,8 @@ export default class JanModelExtension extends ModelExtension { * @returns */ async fetchModels(): Promise { - return this.queue - .add(() => - this.apiInstance().then((api) => - api.get('v1/models?limit=-1').json>() - ) - ) + return this.apiInstance() + .then((api) => api.get('v1/models?limit=-1').json>()) .then((e) => typeof e === 'object' ? e.data.map((e) => this.transformModel(e)) : [] ) @@ -418,33 +388,9 @@ export default class JanModelExtension extends ModelExtension { private async updateCortexConfig(body: { [key: string]: any }): Promise { - return this.queue - .add(() => - this.apiInstance().then((api) => - api.patch('v1/configs', { json: body }).then(() => {}) - ) - ) - .catch((e) => console.debug(e)) - } - - /** - * Do health check on cortex.cpp - * @returns - */ - private healthz(): Promise { return this.apiInstance() - .then((api) => - api.get('healthz', { - retry: { - limit: 20, - delay: () => 500, - methods: ['get'], - }, - }) - ) - .then(() => { - this.queue.concurrency = Infinity - }) + .then((api) => api.patch('v1/configs', { json: body }).then(() => {})) + .catch((e) => console.debug(e)) } /** @@ -453,25 +399,23 @@ export default class JanModelExtension extends ModelExtension { fetchModelsHub = async () => { const models = await this.fetchModels() - return this.queue.add(() => - this.apiInstance() - .then((api) => - api - .get('v1/models/hub?author=cortexso&tag=cortex.cpp') - .json>() - .then((e) => { - e.data?.forEach((model) => { - if ( - !models.some( - (e) => 'modelSource' in e && e.modelSource === model - ) + return this.apiInstance() + .then((api) => + api + .get('v1/models/hub?author=cortexso&tag=cortex.cpp') + .json>() + .then((e) => { + e.data?.forEach((model) => { + if ( + !models.some( + (e) => 'modelSource' in e && e.modelSource === model ) - this.addSource(model).catch((e) => console.debug(e)) - }) + ) + this.addSource(model).catch((e) => console.debug(e)) }) - ) - .catch((e) => console.debug(e)) - ) + }) + ) + .catch((e) => console.debug(e)) } // END: - Private API } diff --git a/package.json b/package.json index 743406122..dcbf93831 100644 --- a/package.json +++ b/package.json @@ -17,10 +17,21 @@ "test": "yarn workspace jan test:e2e", "test-local": "yarn lint && yarn build:test && yarn test", "copy:assets": "cpx \"pre-install/*.tgz\" \"electron/pre-install/\" && cpx \"themes/**\" \"electron/themes\"", + "copy:assets:tauri": "cpx \"pre-install/*.tgz\" \"src-tauri/resources/pre-install/\" && cpx \"themes/**\" \"src-tauri/resources/themes\"", "dev:electron": "yarn copy:assets && yarn workspace jan dev", + "dev:web:standalone": "concurrently \"yarn workspace @janhq/web dev\" \"wait-on http://localhost:3000 && rsync -av --prune-empty-dirs --include '*/' --include 'dist/***' --include 'package.json' --include 'tsconfig.json' --exclude '*' ./extensions/ web/.next/static/extensions/\"", "dev:web": "yarn workspace @janhq/web dev", "dev:server": "yarn workspace @janhq/server dev", "dev": "concurrently -n \"NEXT,ELECTRON\" -c \"yellow,blue\" --kill-others \"yarn dev:web\" \"yarn dev:electron\"", + "install:cortex:linux:darwin": "cd src-tauri/binaries && ./download.sh", + "install:cortex:win32": "cd src-tauri/binaries && download.bat", + "install:cortex": "run-script-os", + "download:bin": "node ./scripts/download-bin.mjs", + "dev:tauri": "yarn build:icon && yarn copy:assets:tauri && tauri dev", + "build:tauri:linux:win32": "yarn download:bin && yarn install:cortex && yarn build:icon && yarn copy:assets:tauri && yarn tauri build --verbose", + "build:tauri:darwin": "yarn install:cortex && yarn build:icon && yarn copy:assets:tauri && yarn tauri build --verbose --target universal-apple-darwin", + "build:tauri": "run-script-os", + "build:icon": "tauri icon ./src-tauri/icons/icon.png", "build:server": "cd server && yarn build", "build:core": "cd core && yarn build && yarn pack", "build:web": "yarn workspace @janhq/web build && cpx \"web/out/**\" \"electron/renderer/\"", @@ -29,18 +40,24 @@ "build:extensions": "rimraf ./pre-install/*.tgz || true && yarn workspace @janhq/core build && cd extensions && yarn install && yarn workspaces foreach -Apt run build:publish", "build:test": "yarn copy:assets && yarn workspace @janhq/web build && cpx \"web/out/**\" \"electron/renderer/\" && yarn workspace jan build:test", "build": "yarn build:web && yarn build:electron", + "build-tauri": "yarn build:web && yarn build:tauri", "build:publish": "yarn copy:assets && yarn build:web && yarn workspace jan build:publish", "dev:joi": "yarn workspace @janhq/joi install && yarn workspace @janhq/joi dev", "build:joi": "yarn workspace @janhq/joi build", "prepare": "husky" }, "devDependencies": { + "@tauri-apps/cli": "^2.2.5", "concurrently": "^9.1.0", "cpx": "^1.5.0", + "cross-env": "^7.0.3", "husky": "^9.1.5", "jest": "^29.7.0", "jest-environment-jsdom": "^29.7.0", "rimraf": "^3.0.2", + "run-script-os": "^1.1.6", + "tar": "^4.4.19", + "unzipper": "^0.12.3", "wait-on": "^7.0.1" }, "version": "0.0.0", diff --git a/scripts/download-bin.mjs b/scripts/download-bin.mjs new file mode 100644 index 000000000..44693ab79 --- /dev/null +++ b/scripts/download-bin.mjs @@ -0,0 +1,232 @@ +console.log('Script is running') +// scripts/download.js +import https from 'https' +import fs, { copyFile, mkdirSync } from 'fs' +import os from 'os' +import path from 'path' +import unzipper from 'unzipper' +import tar from 'tar' +import { copySync } from 'cpx' + +function download(url, dest) { + return new Promise((resolve, reject) => { + console.log(`Downloading ${url} to ${dest}`) + const file = fs.createWriteStream(dest) + https + .get(url, (response) => { + console.log(`Response status code: ${response.statusCode}`) + if ( + response.statusCode >= 300 && + response.statusCode < 400 && + response.headers.location + ) { + // Handle redirect + const redirectURL = response.headers.location + console.log(`Redirecting to ${redirectURL}`) + download(redirectURL, dest).then(resolve, reject) // Recursive call + return + } else if (response.statusCode !== 200) { + reject(`Failed to get '${url}' (${response.statusCode})`) + return + } + response.pipe(file) + file.on('finish', () => { + file.close(resolve) + }) + }) + .on('error', (err) => { + fs.unlink(dest, () => reject(err.message)) + }) + }) +} + +async function decompress(filePath, targetDir) { + console.log(`Decompressing ${filePath} to ${targetDir}`) + if (filePath.endsWith('.zip')) { + await fs + .createReadStream(filePath) + .pipe(unzipper.Extract({ path: targetDir })) + .promise() + } else if (filePath.endsWith('.tar.gz')) { + await tar.x({ + file: filePath, + cwd: targetDir, + }) + } else { + throw new Error(`Unsupported archive format: ${filePath}`) + } +} + +function getPlatformArch() { + const platform = os.platform() // 'darwin', 'linux', 'win32' + const arch = os.arch() // 'x64', 'arm64', etc. + + let bunPlatform, uvPlatform + + if (platform === 'darwin') { + bunPlatform = arch === 'arm64' ? 'darwin-aarch64' : 'darwin-x86' + uvPlatform = + arch === 'arm64' ? 'aarch64-apple-darwin' : 'x86_64-apple-darwin' + } else if (platform === 'linux') { + bunPlatform = arch === 'arm64' ? 'linux-aarch64' : 'linux-x64' + uvPlatform = arch === 'arm64' ? 'aarch64-unknown-linux-gnu' : 'x86_64-unknown-linux-gnu' + } else if (platform === 'win32') { + bunPlatform = 'windows-x64' // Bun has limited Windows support + uvPlatform = 'x86_64-pc-windows-msvc' + } else { + throw new Error(`Unsupported platform: ${platform}`) + } + + return { bunPlatform, uvPlatform } +} + +async function main() { + console.log('Starting main function') + const platform = os.platform() + const { bunPlatform, uvPlatform } = getPlatformArch() + console.log(`bunPlatform: ${bunPlatform}, uvPlatform: ${uvPlatform}`) + + const binDir = 'src-tauri/resources/bin' + const tempBinDir = 'scripts/dist' + const bunPath = `${tempBinDir}/bun-${bunPlatform}.zip` + let uvPath = `${tempBinDir}/uv-${uvPlatform}.tar.gz` + if (platform === 'win32') { + uvPath = `${tempBinDir}/uv-${uvPlatform}.zip` + } + try { + mkdirSync('scripts/dist') + } catch (err) { + // Expect EEXIST error if the directory already exists + } + + // Adjust these URLs based on latest releases + const bunVersion = '1.2.10' // Example Bun version + const bunUrl = `https://github.com/oven-sh/bun/releases/download/bun-v${bunVersion}/bun-${bunPlatform}.zip` + + const uvVersion = '0.6.17' // Example UV version + let uvUrl = `https://github.com/astral-sh/uv/releases/download/${uvVersion}/uv-${uvPlatform}.tar.gz` + if (platform === 'win32') { + uvUrl = `https://github.com/astral-sh/uv/releases/download/${uvVersion}/uv-${uvPlatform}.zip` + } + + console.log(`Downloading Bun for ${bunPlatform}...`) + const bunSaveDir = path.join(tempBinDir, `bun-${bunPlatform}.zip`) + if (!fs.existsSync(bunSaveDir)) { + await download(bunUrl, bunSaveDir) + await decompress(bunPath, tempBinDir) + } + try { + copySync( + path.join(tempBinDir, `bun-${bunPlatform}`, 'bun'), + path.join(binDir) + ) + fs.chmod(path.join(binDir, 'bun'), 0o755, (err) => { + if (err) { + console.log('Add execution permission failed!', err) + } + }); + if (platform === 'darwin') { + copyFile(path.join(binDir, 'bun'), path.join(binDir, 'bun-x86_64-apple-darwin'), (err) => { + if (err) { + console.log("Error Found:", err); + } + }) + copyFile(path.join(binDir, 'bun'), path.join(binDir, 'bun-aarch64-apple-darwin'), (err) => { + if (err) { + console.log("Error Found:", err); + } + }) + } else if (platform === 'linux') { + copyFile(path.join(binDir, 'bun'), path.join(binDir, 'bun-x86_64-unknown-linux-gnu'), (err) => { + if (err) { + console.log("Error Found:", err); + } + }) + } + } catch (err) { + // Expect EEXIST error + } + try { + copySync( + path.join(tempBinDir, `bun-${bunPlatform}`, 'bun.exe'), + path.join(binDir) + ) + if (platform === 'win32') { + copyFile(path.join(binDir, 'bun.exe'), path.join(binDir, 'bun-x86_64-pc-windows-msvc.exe'), (err) => { + if (err) { + console.log("Error Found:", err); + } + }) + } + } catch (err) { + // Expect EEXIST error + } + console.log('Bun downloaded.') + + console.log(`Downloading UV for ${uvPlatform}...`) + const uvExt = platform === 'win32' ? `zip` : `tar.gz` + const uvSaveDir = path.join(tempBinDir, `uv-${uvPlatform}.${uvExt}`) + if (!fs.existsSync(uvSaveDir)) { + await download(uvUrl, uvSaveDir) + await decompress(uvPath, tempBinDir) + } + try { + copySync( + path.join(tempBinDir, `uv-${uvPlatform}`, 'uv'), + path.join(binDir) + ) + fs.chmod(path.join(binDir, 'uv'), 0o755, (err) => { + if (err) { + console.log('Add execution permission failed!', err) + } + }); + if (platform === 'darwin') { + copyFile(path.join(binDir, 'uv'), path.join(binDir, 'uv-x86_64-apple-darwin'), (err) => { + if (err) { + console.log("Error Found:", err); + } + }) + copyFile(path.join(binDir, 'uv'), path.join(binDir, 'uv-aarch64-apple-darwin'), (err) => { + if (err) { + console.log("Error Found:", err); + } + }) + } else if (platform === 'linux') { + copyFile(path.join(binDir, 'uv'), path.join(binDir, 'uv-x86_64-unknown-linux-gnu'), (err) => { + if (err) { + console.log("Error Found:", err); + } + }) + } + } catch (err) { + // Expect EEXIST error + } + try { + copySync( + path.join(tempBinDir, 'uv.exe'), + path.join(binDir) + ) + if (platform === 'win32') { + copyFile(path.join(binDir, 'uv.exe'), path.join(binDir, 'uv-x86_64-pc-windows-msvc.exe'), (err) => { + if (err) { + console.log("Error Found:", err); + } + }) + } + } catch (err) { + // Expect EEXIST error + } + console.log('UV downloaded.') + + console.log('Downloads completed.') +} + +// Ensure the downloads directory exists +if (!fs.existsSync('downloads')) { + fs.mkdirSync('downloads') +} + +main().catch((err) => { + console.error('Error:', err) + process.exit(1) +}) diff --git a/src-tauri/.gitignore b/src-tauri/.gitignore new file mode 100644 index 000000000..40726cbe0 --- /dev/null +++ b/src-tauri/.gitignore @@ -0,0 +1,7 @@ +# Generated by Cargo +# will have compiled files and executables +/target/ +/gen/schemas +binaries +!binaries/download.sh +!binaries/download.bat \ No newline at end of file diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml new file mode 100644 index 000000000..5ed527678 --- /dev/null +++ b/src-tauri/Cargo.toml @@ -0,0 +1,46 @@ +[package] +name = "Jan" +version = "0.5.16" +description = "Use offline LLMs with your own data. Run open source models like Llama2 or Falcon on your internal computers/servers." +authors = ["Jan "] +license = "MIT" +repository = "https://github.com/menloresearch/jan" +edition = "2021" +rust-version = "1.77.2" + +[lib] +name = "app_lib" +crate-type = ["staticlib", "cdylib", "rlib"] + +[build-dependencies] +tauri-build = { version = "2.0.2", features = [] } + +[dependencies] +serde_json = "1.0" +serde = { version = "1.0", features = ["derive"] } +log = "0.4" +tauri = { version = "2.1.0", features = [ "protocol-asset", "macos-private-api", + "test", +] } +tauri-plugin-log = "2.0.0-rc" +tauri-plugin-shell = "2.2.0" +flate2 = "1.0" +tar = "0.4" +rand = "0.8" +tauri-plugin-http = { version = "2", features = ["unsafe-headers"] } +tauri-plugin-store = "2" +hyper = { version = "0.14", features = ["server"] } +reqwest = { version = "0.11", features = ["json", "blocking"] } +tokio = { version = "1", features = ["full"] } +rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", branch = "main", features = [ + "client", + "transport-sse", + "transport-child-process", + "tower", +] } +uuid = { version = "1.7", features = ["v4"] } +env = "1.0.1" + +[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies] +tauri-plugin-updater = "2" +once_cell = "1.18" diff --git a/src-tauri/app-icon.png b/src-tauri/app-icon.png new file mode 100644 index 000000000..289f99ded Binary files /dev/null and b/src-tauri/app-icon.png differ diff --git a/src-tauri/binaries/download.bat b/src-tauri/binaries/download.bat new file mode 100644 index 000000000..1a51c49f3 --- /dev/null +++ b/src-tauri/binaries/download.bat @@ -0,0 +1,41 @@ +@echo off + +set CORTEX_VERSION=1.0.13-rc6 +set ENGINE_VERSION=b5371 +set ENGINE_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/%ENGINE_VERSION%/llama-%ENGINE_VERSION%-bin-win +set ENGINE_DOWNLOAD_GGML_URL=https://github.com/ggml-org/llama.cpp/releases/download/%ENGINE_VERSION%/llama-%ENGINE_VERSION%-bin-win +set CUDA_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/%ENGINE_VERSION% +@REM set SUBFOLDERS=windows-amd64-noavx-cuda-12-0 windows-amd64-noavx-cuda-11-7 windows-amd64-avx2-cuda-12-0 windows-amd64-avx2-cuda-11-7 windows-amd64-noavx windows-amd64-avx windows-amd64-avx2 windows-amd64-avx512 windows-amd64-vulkan +set BIN_PATH="./" +set DOWNLOAD_TOOL=..\..\extensions\inference-cortex-extension\node_modules\.bin\download + +@REM Download llama.cpp binaries +call %DOWNLOAD_TOOL% -e --strip 1 -o %BIN_PATH% https://github.com/menloresearch/cortex.cpp/releases/download/v%CORTEX_VERSION%/cortex-%CORTEX_VERSION%-windows-amd64.tar.gz +call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-avx2-cuda-cu12.0-x64.tar.gz -e --strip 2 -o./engines/llama.cpp/win-avx2-cuda-cu12.0-x64/%ENGINE_VERSION% +@REM call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-avx2-cuda-cu11.7-x64.tar.gz -e --strip 2 -o./engines/llama.cpp/win-avx2-cuda-cu11.7-x64/%ENGINE_VERSION% +@REM call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-noavx-cuda-cu12.0-x64.tar.gz -e --strip 2 -o./engines/llama.cpp/win-noavx-cuda-cu12.0-x64/%ENGINE_VERSION% +@REM call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-noavx-cuda-cu11.7-x64.tar.gz -e --strip 2 -o./engines/llama.cpp/win-noavx-cuda-cu11.7-x64/%ENGINE_VERSION% +call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-noavx-x64.tar.gz -e --strip 2 -o./engines/llama.cpp/win-noavx-x64/%ENGINE_VERSION% +call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-avx-x64.tar.gz -e --strip 2 -o./engines/llama.cpp/win-avx-x64/%ENGINE_VERSION% +call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-avx2-x64.tar.gz -e --strip 2 -o./engines/llama.cpp/win-avx2-x64/%ENGINE_VERSION% +call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_URL%-avx512-x64.tar.gz -e --strip 2 -o./engines/llama.cpp/win-avx512-x64/%ENGINE_VERSION% +call %DOWNLOAD_TOOL% %ENGINE_DOWNLOAD_GGML_URL%-vulkan-x64.zip -e --strip 1 -o./engines/llama.cpp/win-vulkan-x64/%ENGINE_VERSION% +call %DOWNLOAD_TOOL% %CUDA_DOWNLOAD_URL%/cudart-llama-bin-win-cu12.0-x64.tar.gz -e --strip 1 -o %BIN_PATH% +@REM call %DOWNLOAD_TOOL% %CUDA_DOWNLOAD_URL%/cudart-llama-bin-win-cu11.7-x64.tar.gz -e --strip 1 -o %BIN_PATH% + +move %BIN_PATH%cortex-server-beta.exe %BIN_PATH%cortex-server.exe +copy %BIN_PATH%cortex-server.exe %BIN_PATH%cortex-server-x86_64-pc-windows-msvc.exe +del %BIN_PATH%cortex-beta.exe +del %BIN_PATH%cortex.exe + +@REM Loop through each folder and move DLLs +for %%F in (%SUBFOLDERS%) do ( + echo Processing folder: .\engines\llama.cpp\%%F\%ENGINE_VERSION% + + @REM Move cu*.dll files + for %%D in (.\engines\engines\llama.cpp\%%F\%ENGINE_VERSION%\cu*.dll) do ( + move "%%D" "%BIN_PATH%" + ) +) + +echo DLL files moved successfully. diff --git a/src-tauri/binaries/download.sh b/src-tauri/binaries/download.sh new file mode 100755 index 000000000..e1ad30db9 --- /dev/null +++ b/src-tauri/binaries/download.sh @@ -0,0 +1,73 @@ +#!/bin/bash + +download() { + URL="$1" + EXTRA_ARGS="${@:3}" + OUTPUT_DIR="${EXTRA_ARGS[${#EXTRA_ARGS[@]} -1]}" + + mkdir -p "$OUTPUT_DIR" + + echo "Downloading $URL to $OUTPUT_DIR using curl..." + curl -L "$URL" -o "$OUTPUT_DIR/$(basename "$URL")" + tar -xzf "$OUTPUT_DIR/$(basename "$URL")" -C "$OUTPUT_DIR" --strip-components $2 + rm "$OUTPUT_DIR/$(basename "$URL")" +} + +# Read CORTEX_VERSION +CORTEX_VERSION=1.0.13-rc6 +ENGINE_VERSION=b5371 +CORTEX_RELEASE_URL="https://github.com/menloresearch/cortex.cpp/releases/download" +ENGINE_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/${ENGINE_VERSION}/llama-${ENGINE_VERSION}-bin +CUDA_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/${ENGINE_VERSION} +BIN_PATH=./ +SHARED_PATH="." +# Detect platform +OS_TYPE=$(uname) + +if ls ./cortex-server* 1> /dev/null 2>&1; then + echo "cortex-server file with prefix already exists. Exiting." + exit 0 +fi + +if [ "$OS_TYPE" == "Linux" ]; then + # Linux downloads + download "${CORTEX_RELEASE_URL}/v${CORTEX_VERSION}/cortex-${CORTEX_VERSION}-linux-amd64.tar.gz" 1 "${BIN_PATH}" + mv ./cortex-server-beta ./cortex-server + rm -rf ./cortex + rm -rf ./cortex-beta + chmod +x "./cortex-server" + cp ./cortex-server ./cortex-server-x86_64-unknown-linux-gnu + + # Download engines for Linux + download "${ENGINE_DOWNLOAD_URL}-linux-noavx-x64.tar.gz" 2 "${SHARED_PATH}/engines/llama.cpp/linux-noavx-x64/${ENGINE_VERSION}" + download "${ENGINE_DOWNLOAD_URL}-linux-avx-x64.tar.gz" 2 "${SHARED_PATH}/engines/llama.cpp/linux-avx-x64/${ENGINE_VERSION}" + download "${ENGINE_DOWNLOAD_URL}-linux-avx2-x64.tar.gz" 2 "${SHARED_PATH}/engines/llama.cpp/linux-avx2-x64/${ENGINE_VERSION}" + download "${ENGINE_DOWNLOAD_URL}-linux-avx512-x64.tar.gz" 2 "${SHARED_PATH}/engines/llama.cpp/linux-avx512-x64/${ENGINE_VERSION}" + download "${ENGINE_DOWNLOAD_URL}-linux-avx2-cuda-cu12.0-x64.tar.gz" 2 "${SHARED_PATH}/engines/llama.cpp/linux-avx2-cuda-cu12.0-x64/${ENGINE_VERSION}" + download "${ENGINE_DOWNLOAD_URL}-linux-avx2-cuda-cu11.7-x64.tar.gz" 2 "${SHARED_PATH}/engines/llama.cpp/linux-avx2-cuda-cu11.7-x64/${ENGINE_VERSION}" + download "${ENGINE_DOWNLOAD_URL}-linux-noavx-cuda-cu12.0-x64.tar.gz" 2 "${SHARED_PATH}/engines/llama.cpp/linux-noavx-cuda-cu12.0-x64/${ENGINE_VERSION}" + download "${ENGINE_DOWNLOAD_URL}-linux-noavx-cuda-cu11.7-x64.tar.gz" 2 "${SHARED_PATH}/engines/llama.cpp/linux-noavx-cuda-cu11.7-x64/${ENGINE_VERSION}" + download "${ENGINE_DOWNLOAD_URL}-linux-vulkan-x64.tar.gz" 2 "${SHARED_PATH}/engines/llama.cpp/linux-vulkan-x64/${ENGINE_VERSION}" + download "${CUDA_DOWNLOAD_URL}/cudart-llama-bin-linux-cu12.0-x64.tar.gz" 0 "${BIN_PATH}" + download "${CUDA_DOWNLOAD_URL}/cudart-llama-bin-linux-cu11.7-x64.tar.gz" 0 "${BIN_PATH}" + +elif [ "$OS_TYPE" == "Darwin" ]; then + # macOS downloads + download "${CORTEX_RELEASE_URL}/v${CORTEX_VERSION}/cortex-${CORTEX_VERSION}-mac-universal.tar.gz" 1 "${BIN_PATH}" + mv ./cortex-server-beta ./cortex-server + rm -rf ./cortex + rm -rf ./cortex-beta + chmod +x "./cortex-server" + mv ./cortex-server ./cortex-server-universal-apple-darwin + cp ./cortex-server-universal-apple-darwin ./cortex-server-aarch64-apple-darwin + cp ./cortex-server-universal-apple-darwin ./cortex-server-x86_64-apple-darwin + + # Download engines for macOS + download "${ENGINE_DOWNLOAD_URL}-macos-arm64.tar.gz" 2 "${SHARED_PATH}/engines/llama.cpp/macos-arm64/${ENGINE_VERSION}" + download "${ENGINE_DOWNLOAD_URL}-macos-x64.tar.gz" 2 "${SHARED_PATH}/engines/llama.cpp/macos-x64/${ENGINE_VERSION}" + + +else + echo "Unsupported operating system: $OS_TYPE" + exit 1 +fi diff --git a/src-tauri/build.rs b/src-tauri/build.rs new file mode 100644 index 000000000..d860e1e6a --- /dev/null +++ b/src-tauri/build.rs @@ -0,0 +1,3 @@ +fn main() { + tauri_build::build() +} diff --git a/src-tauri/capabilities/default.json b/src-tauri/capabilities/default.json new file mode 100644 index 000000000..fd73a02b3 --- /dev/null +++ b/src-tauri/capabilities/default.json @@ -0,0 +1,56 @@ +{ + "$schema": "../gen/schemas/desktop-schema.json", + "identifier": "default", + "description": "enables the default permissions", + "windows": ["main"], + "remote": { + "urls": ["http://*"] + }, + "permissions": [ + "core:default", + "core:window:allow-start-dragging", + "shell:allow-spawn", + "shell:allow-open", + "log:default", + { + "identifier": "http:default", + "allow": [ + { + "url": "https://*:*" + }, + { + "url": "http://*:*" + } + ], + "deny": [] + }, + { + "identifier": "shell:allow-execute", + "allow": [ + { + "args": [ + "--start-server", + { + "validator": "\\S+" + }, + "--port", + { + "validator": "\\S+" + }, + "--config_file_path", + { + "validator": "\\S+" + }, + "--data_folder_path", + { + "validator": "\\S+" + } + ], + "name": "binaries/cortex-server", + "sidecar": true + } + ] + }, + "store:default" + ] +} diff --git a/src-tauri/icons/icon.png b/src-tauri/icons/icon.png new file mode 100644 index 000000000..1b354a241 Binary files /dev/null and b/src-tauri/icons/icon.png differ diff --git a/src-tauri/latest.json.template b/src-tauri/latest.json.template new file mode 100644 index 000000000..50a570c9e --- /dev/null +++ b/src-tauri/latest.json.template @@ -0,0 +1,23 @@ +{ + "version": "", + "notes": "", + "pub_date": "", + "platforms": { + "linux-x86_64": { + "signature": "", + "url": "" + }, + "windows-x86_64": { + "signature": "", + "url": "" + }, + "darwin-aarch64": { + "signature": "", + "url": "" + }, + "darwin-x86_64": { + "signature": "", + "url": "" + } + } +} \ No newline at end of file diff --git a/src-tauri/sign.ps1 b/src-tauri/sign.ps1 new file mode 100644 index 000000000..a54d525fe --- /dev/null +++ b/src-tauri/sign.ps1 @@ -0,0 +1,12 @@ +param ( + [string]$Target +) + +AzureSignTool.exe sign ` + -tr http://timestamp.digicert.com ` + -kvu $env:AZURE_KEY_VAULT_URI ` + -kvi $env:AZURE_CLIENT_ID ` + -kvt $env:AZURE_TENANT_ID ` + -kvs $env:AZURE_CLIENT_SECRET ` + -kvc $env:AZURE_CERT_NAME ` + -v $Target \ No newline at end of file diff --git a/src-tauri/src/core/cmd.rs b/src-tauri/src/core/cmd.rs new file mode 100644 index 000000000..a9f90ca80 --- /dev/null +++ b/src-tauri/src/core/cmd.rs @@ -0,0 +1,395 @@ +use rmcp::model::{CallToolRequestParam, CallToolResult, Tool}; +use serde::{Deserialize, Serialize}; +use serde_json::{Map, Value}; +use std::{fs, path::PathBuf}; +use tauri::{AppHandle, Manager, Runtime, State}; + +use super::{server, setup, state::AppState}; + +const CONFIGURATION_FILE_NAME: &str = "settings.json"; +const DEFAULT_MCP_CONFIG: &str = r#"{ + "mcpServers": {} +}"#; + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct AppConfiguration { + pub data_folder: String, + // Add other fields as needed +} +impl AppConfiguration { + pub fn default() -> Self { + Self { + data_folder: String::from("./data"), // Set a default value for the data_folder + // Add other fields with default values as needed + } + } +} + +#[tauri::command] +pub fn get_app_configurations(app_handle: tauri::AppHandle) -> AppConfiguration { + let mut app_default_configuration = AppConfiguration::default(); + + if std::env::var("CI").unwrap_or_default() == "e2e" { + return app_default_configuration; + } + + let configuration_file = get_configuration_file_path(app_handle.clone()); + + let default_data_folder = default_data_folder_path(app_handle.clone()); + + if !configuration_file.exists() { + log::info!( + "App config not found, creating default config at {:?}", + configuration_file + ); + + app_default_configuration.data_folder = default_data_folder; + + if let Err(err) = fs::write( + &configuration_file, + serde_json::to_string(&app_default_configuration).unwrap(), + ) { + log::error!("Failed to create default config: {}", err); + } + + return app_default_configuration; + } + + match fs::read_to_string(&configuration_file) { + Ok(content) => match serde_json::from_str::(&content) { + Ok(app_configurations) => app_configurations, + Err(err) => { + log::error!( + "Failed to parse app config, returning default config instead. Error: {}", + err + ); + app_default_configuration + } + }, + Err(err) => { + log::error!( + "Failed to read app config, returning default config instead. Error: {}", + err + ); + app_default_configuration + } + } +} + +#[tauri::command] +pub fn update_app_configuration( + app_handle: tauri::AppHandle, + configuration: AppConfiguration, +) -> Result<(), String> { + let configuration_file = get_configuration_file_path(app_handle); + log::info!( + "update_app_configuration, configuration_file: {:?}", + configuration_file + ); + + fs::write( + configuration_file, + serde_json::to_string(&configuration).map_err(|e| e.to_string())?, + ) + .map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn get_jan_data_folder_path(app_handle: tauri::AppHandle) -> PathBuf { + if cfg!(test) { + return PathBuf::from("./data"); + } + + let app_configurations = get_app_configurations(app_handle); + log::info!("data_folder: {}", app_configurations.data_folder); + PathBuf::from(app_configurations.data_folder) +} + +#[tauri::command] +pub fn get_jan_extensions_path(app_handle: tauri::AppHandle) -> PathBuf { + get_jan_data_folder_path(app_handle).join("extensions") +} + +#[tauri::command] +pub fn get_themes(app_handle: tauri::AppHandle) -> Vec { + let mut themes = vec![]; + let themes_path = get_jan_data_folder_path(app_handle).join("themes"); + if themes_path.exists() { + for entry in fs::read_dir(themes_path).unwrap() { + let entry = entry.unwrap(); + if entry.path().is_dir() { + if let Some(name) = entry.file_name().to_str() { + themes.push(name.to_string()); + } + } + } + } + themes +} + +#[tauri::command] +pub fn read_theme(app_handle: tauri::AppHandle, theme_name: String) -> Result { + let themes_path = get_jan_data_folder_path(app_handle) + .join("themes") + .join(theme_name.clone()) + .join("theme.json"); + if themes_path.exists() { + let content = fs::read_to_string(themes_path).map_err(|e| e.to_string())?; + Ok(content) + } else { + Err(format!("Theme {} not found", theme_name.clone())) + } +} + +#[tauri::command] +pub fn get_configuration_file_path(app_handle: tauri::AppHandle) -> PathBuf { + let app_path = app_handle.path().app_data_dir().unwrap_or_else(|err| { + log::error!( + "Failed to get app data directory: {}. Using home directory instead.", + err + ); + + let home_dir = std::env::var(if cfg!(target_os = "windows") { + "USERPROFILE" + } else { + "HOME" + }) + .expect("Failed to determine the home directory"); + + PathBuf::from(home_dir) + }); + + let package_name = env!("CARGO_PKG_NAME"); + log::info!("Package name: {}", package_name); + let old_data_dir = app_path + .clone() + .parent() + .unwrap_or(&app_path.join("../")) + .join(package_name); + if old_data_dir.exists() { + return old_data_dir.join(CONFIGURATION_FILE_NAME); + } else { + return app_path.join(CONFIGURATION_FILE_NAME); + } +} + +#[tauri::command] +pub fn default_data_folder_path(app_handle: tauri::AppHandle) -> String { + return app_handle + .path() + .app_data_dir() + .unwrap() + .to_str() + .unwrap() + .to_string(); +} + +#[tauri::command] +pub fn relaunch(app: AppHandle) { + app.restart() +} + +#[tauri::command] +pub fn open_app_directory(app: AppHandle) { + let app_path = app.path().app_data_dir().unwrap(); + if cfg!(target_os = "windows") { + std::process::Command::new("explorer") + .arg(app_path) + .spawn() + .expect("Failed to open app directory"); + } else if cfg!(target_os = "macos") { + std::process::Command::new("open") + .arg(app_path) + .spawn() + .expect("Failed to open app directory"); + } else { + std::process::Command::new("xdg-open") + .arg(app_path) + .spawn() + .expect("Failed to open app directory"); + } +} + +#[tauri::command] +pub fn open_file_explorer(path: String) { + let path = PathBuf::from(path); + if cfg!(target_os = "windows") { + std::process::Command::new("explorer") + .arg(path) + .spawn() + .expect("Failed to open file explorer"); + } else if cfg!(target_os = "macos") { + std::process::Command::new("open") + .arg(path) + .spawn() + .expect("Failed to open file explorer"); + } else { + std::process::Command::new("xdg-open") + .arg(path) + .spawn() + .expect("Failed to open file explorer"); + } +} + +#[tauri::command] +pub fn install_extensions(app: AppHandle) { + if let Err(err) = setup::install_extensions(app, true) { + log::error!("Failed to install extensions: {}", err); + } +} + +#[tauri::command] +pub fn get_active_extensions(app: AppHandle) -> Vec { + let mut path = get_jan_extensions_path(app); + path.push("extensions.json"); + log::info!("get jan extensions, path: {:?}", path); + + let contents = fs::read_to_string(path); + let contents: Vec = match contents { + Ok(data) => match serde_json::from_str::>(&data) { + Ok(exts) => exts + .into_iter() + .map(|ext| { + serde_json::json!({ + "url": ext["url"], + "name": ext["name"], + "productName": ext["productName"], + "active": ext["_active"], + "description": ext["description"], + "version": ext["version"] + }) + }) + .collect(), + Err(_) => vec![], + }, + Err(_) => vec![], + }; + return contents; +} + +#[tauri::command] +pub fn get_user_home_path(app: AppHandle) -> String { + return get_app_configurations(app.clone()).data_folder; +} + +#[tauri::command] +pub fn app_token(state: State<'_, AppState>) -> Option { + state.app_token.clone() +} + +#[tauri::command] +pub async fn start_server( + app: AppHandle, + host: String, + port: u16, + prefix: String, +) -> Result { + server::start_server(host, port, prefix, app_token(app.state()).unwrap()) + .await + .map_err(|e| e.to_string())?; + Ok(true) +} + +#[tauri::command] +pub async fn stop_server() -> Result<(), String> { + server::stop_server().await.map_err(|e| e.to_string())?; + Ok(()) +} + +/// Retrieves all available tools from all MCP servers +/// +/// # Arguments +/// * `state` - Application state containing MCP server connections +/// +/// # Returns +/// * `Result, String>` - A vector of all tools if successful, or an error message if failed +/// +/// This function: +/// 1. Locks the MCP servers mutex to access server connections +/// 2. Iterates through all connected servers +/// 3. Gets the list of tools from each server +/// 4. Combines all tools into a single vector +/// 5. Returns the combined list of all available tools +#[tauri::command] +pub async fn get_tools(state: State<'_, AppState>) -> Result, String> { + let servers = state.mcp_servers.lock().await; + let mut all_tools: Vec = Vec::new(); + + for (_, service) in servers.iter() { + // List tools + let tools = service.list_all_tools().await.map_err(|e| e.to_string())?; + + for tool in tools { + all_tools.push(tool); + } + } + + Ok(all_tools) +} + +/// Calls a tool on an MCP server by name with optional arguments +/// +/// # Arguments +/// * `state` - Application state containing MCP server connections +/// * `tool_name` - Name of the tool to call +/// * `arguments` - Optional map of argument names to values +/// +/// # Returns +/// * `Result` - Result of the tool call if successful, or error message if failed +/// +/// This function: +/// 1. Locks the MCP servers mutex to access server connections +/// 2. Searches through all servers for one containing the named tool +/// 3. When found, calls the tool on that server with the provided arguments +/// 4. Returns error if no server has the requested tool +#[tauri::command] +pub async fn call_tool( + state: State<'_, AppState>, + tool_name: String, + arguments: Option>, +) -> Result { + let servers = state.mcp_servers.lock().await; + + // Iterate through servers and find the first one that contains the tool + for (_, service) in servers.iter() { + if let Ok(tools) = service.list_all_tools().await { + if tools.iter().any(|t| t.name == tool_name) { + return service + .call_tool(CallToolRequestParam { + name: tool_name.into(), + arguments, + }) + .await + .map_err(|e| e.to_string()); + } + } + } + + Err(format!("Tool {} not found", tool_name)) +} + +#[tauri::command] +pub async fn get_mcp_configs(app: AppHandle) -> Result { + let mut path = get_jan_data_folder_path(app); + path.push("mcp_config.json"); + log::info!("read mcp configs, path: {:?}", path); + + // Create default empty config if file doesn't exist + if !path.exists() { + log::info!("mcp_config.json not found, creating default empty config"); + fs::write(&path, DEFAULT_MCP_CONFIG) + .map_err(|e| format!("Failed to create default MCP config: {}", e))?; + } + + let contents = fs::read_to_string(path).map_err(|e| e.to_string())?; + return Ok(contents); +} + +#[tauri::command] +pub async fn save_mcp_configs(app: AppHandle, configs: String) -> Result<(), String> { + let mut path = get_jan_data_folder_path(app); + path.push("mcp_config.json"); + log::info!("save mcp configs, path: {:?}", path); + + fs::write(path, configs).map_err(|e| e.to_string()) +} diff --git a/src-tauri/src/core/fs.rs b/src-tauri/src/core/fs.rs new file mode 100644 index 000000000..66486cf0a --- /dev/null +++ b/src-tauri/src/core/fs.rs @@ -0,0 +1,197 @@ +// WARNING: These APIs will be deprecated soon due to removing FS API access from frontend. +// It's added to ensure the legacy implementation from frontend still functions before removal. +use crate::core::cmd::get_jan_data_folder_path; +use std::fs; +use std::path::PathBuf; +use tauri::Runtime; + +#[tauri::command] +pub fn rm(app_handle: tauri::AppHandle, args: Vec) -> Result<(), String> { + if args.is_empty() || args[0].is_empty() { + return Err("rm error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + fs::remove_dir_all(&path).map_err(|e| e.to_string()) +} +#[tauri::command] +pub fn mkdir(app_handle: tauri::AppHandle, args: Vec) -> Result<(), String> { + if args.is_empty() || args[0].is_empty() { + return Err("mkdir error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + fs::create_dir_all(&path).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn join_path( + app_handle: tauri::AppHandle, + args: Vec, +) -> Result { + if args.is_empty() { + return Err("join_path error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + let joined_path = path.join(args[1..].join("/")); + Ok(joined_path.to_string_lossy().to_string()) +} +#[tauri::command] +pub fn exists_sync( + app_handle: tauri::AppHandle, + args: Vec, +) -> Result { + if args.is_empty() || args[0].is_empty() { + return Err("exist_sync error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + Ok(path.exists()) +} + +#[tauri::command] +pub fn read_file_sync( + app_handle: tauri::AppHandle, + args: Vec, +) -> Result { + if args.is_empty() || args[0].is_empty() { + return Err("read_file_sync error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + fs::read_to_string(&path).map_err(|e| e.to_string()) +} + +#[tauri::command] +pub fn readdir_sync( + app_handle: tauri::AppHandle, + args: Vec, +) -> Result, String> { + if args.is_empty() || args[0].is_empty() { + return Err("read_dir_sync error: Invalid argument".to_string()); + } + + let path = resolve_path(app_handle, &args[0]); + log::error!("Reading directory: {:?}", path); + let entries = fs::read_dir(&path).map_err(|e| e.to_string())?; + let paths: Vec = entries + .filter_map(|entry| entry.ok()) + .map(|entry| entry.path().to_string_lossy().to_string()) + .collect(); + Ok(paths) +} + +fn normalize_file_path(path: &str) -> String { + path.replace("file:/", "").replace("file:\\", "") +} + +fn resolve_path(app_handle: tauri::AppHandle, path: &str) -> PathBuf { + let path = if path.starts_with("file:/") || path.starts_with("file:\\") { + let normalized = normalize_file_path(path); + let relative_normalized = normalized.strip_prefix("/").unwrap_or(&normalized); + get_jan_data_folder_path(app_handle).join(relative_normalized) + } else { + PathBuf::from(path) + }; + + if path.starts_with("http://") || path.starts_with("https://") { + path + } else { + path.canonicalize().unwrap_or(path) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs::{self, File}; + use std::io::Write; + use tauri::test::mock_app; + + #[test] + fn test_rm() { + let app = mock_app(); + let path = "test_rm_dir"; + fs::create_dir_all(get_jan_data_folder_path(app.handle().clone()).join(path)).unwrap(); + let args = vec![format!("file://{}", path).to_string()]; + let result = rm(app.handle().clone(), args); + assert!(result.is_ok()); + assert!(!get_jan_data_folder_path(app.handle().clone()) + .join(path) + .exists()); + } + + #[test] + fn test_mkdir() { + let app = mock_app(); + let path = "test_mkdir_dir"; + let args = vec![format!("file://{}", path).to_string()]; + let result = mkdir(app.handle().clone(), args); + assert!(result.is_ok()); + assert!(get_jan_data_folder_path(app.handle().clone()) + .join(path) + .exists()); + fs::remove_dir_all(get_jan_data_folder_path(app.handle().clone()).join(path)).unwrap(); + } + + #[test] + fn test_join_path() { + let app = mock_app(); + let path = "file://test_dir"; + let args = vec![path.to_string(), "test_file".to_string()]; + let result = join_path(app.handle().clone(), args).unwrap(); + assert_eq!( + result, + get_jan_data_folder_path(app.handle().clone()) + .join("test_dir/test_file") + .to_string_lossy() + .to_string() + ); + } + + #[test] + fn test_exists_sync() { + let app = mock_app(); + let path = "file://test_exists_sync_file"; + let dir_path = get_jan_data_folder_path(app.handle().clone()); + fs::create_dir_all(&dir_path).unwrap(); + let file_path = dir_path.join("test_exists_sync_file"); + File::create(&file_path).unwrap(); + let args: Vec = vec![path.to_string()]; + let result = exists_sync(app.handle().clone(), args).unwrap(); + assert!(result); + fs::remove_file(file_path).unwrap(); + } + + #[test] + fn test_read_file_sync() { + let app = mock_app(); + let path = "file://test_read_file_sync_file"; + let dir_path = get_jan_data_folder_path(app.handle().clone()); + fs::create_dir_all(&dir_path).unwrap(); + let file_path = dir_path.join("test_read_file_sync_file"); + let mut file = File::create(&file_path).unwrap(); + file.write_all(b"test content").unwrap(); + let args = vec![path.to_string()]; + let result = read_file_sync(app.handle().clone(), args).unwrap(); + assert_eq!(result, "test content".to_string()); + fs::remove_file(file_path).unwrap(); + } + + #[test] + fn test_readdir_sync() { + let app = mock_app(); + let path = "file://test_readdir_sync_dir"; + let dir_path = get_jan_data_folder_path(app.handle().clone()).join(path); + fs::create_dir_all(&dir_path).unwrap(); + File::create(dir_path.join("file1.txt")).unwrap(); + File::create(dir_path.join("file2.txt")).unwrap(); + + let args = vec![dir_path.to_string_lossy().to_string()]; + let result = readdir_sync(app.handle().clone(), args).unwrap(); + assert_eq!(result.len(), 2); + + fs::remove_dir_all(dir_path).unwrap(); + } +} diff --git a/src-tauri/src/core/mcp.rs b/src-tauri/src/core/mcp.rs new file mode 100644 index 000000000..91ffdbafd --- /dev/null +++ b/src-tauri/src/core/mcp.rs @@ -0,0 +1,157 @@ +use std::{collections::HashMap, env, sync::Arc}; + +use rmcp::{service::RunningService, transport::TokioChildProcess, RoleClient, ServiceExt}; +use serde_json::Value; +use tauri::{AppHandle, Emitter, State}; +use tokio::{process::Command, sync::Mutex}; + +use super::{cmd::get_jan_data_folder_path, state::AppState}; + +/// Runs MCP commands by reading configuration from a JSON file and initializing servers +/// +/// # Arguments +/// * `app_path` - Path to the application directory containing mcp_config.json +/// * `servers_state` - Shared state containing running MCP services +/// +/// # Returns +/// * `Ok(())` if servers were initialized successfully +/// * `Err(String)` if there was an error reading config or starting servers +pub async fn run_mcp_commands( + app_path: String, + servers_state: Arc>>>, +) -> Result<(), String> { + log::info!( + "Load MCP configs from {}", + app_path.clone() + "/mcp_config.json" + ); + let config_content = std::fs::read_to_string(app_path.clone() + "/mcp_config.json") + .map_err(|e| format!("Failed to read config file: {}", e))?; + + let mcp_servers: serde_json::Value = serde_json::from_str(&config_content) + .map_err(|e| format!("Failed to parse config: {}", e))?; + + if let Some(server_map) = mcp_servers.get("mcpServers").and_then(Value::as_object) { + log::info!("MCP Servers: {server_map:#?}"); + + let exe_path = env::current_exe().expect("Failed to get current exe path"); + let exe_parent_path = exe_path.parent().expect("Executable must have a parent directory"); + let bin_path = exe_parent_path.to_path_buf(); + for (name, config) in server_map { + if let Some((command, args, envs)) = extract_command_args(config) { + let mut cmd = Command::new(command.clone()); + if command.clone() == "npx" { + let bun_x_path = format!("{}/bun", bin_path.display()); + cmd = Command::new(bun_x_path); + cmd.arg("x"); + } + + if command.clone() == "uvx" { + let bun_x_path = format!("{}/uv", bin_path.display()); + cmd = Command::new(bun_x_path); + cmd.arg("tool run"); + cmd.arg("run"); + } + println!("Command: {cmd:#?}"); + + args.iter().filter_map(Value::as_str).for_each(|arg| { + cmd.arg(arg); + }); + envs.iter().for_each(|(k, v)| { + if let Some(v_str) = v.as_str() { + cmd.env(k, v_str); + } + }); + + let service = + ().serve(TokioChildProcess::new(&mut cmd).map_err(|e| e.to_string())?) + .await + .map_err(|e| e.to_string())?; + + servers_state.lock().await.insert(name.clone(), service); + } + } + } + + // Collect servers into a Vec to avoid holding the RwLockReadGuard across await points + let servers_map = servers_state.lock().await; + for (_, service) in servers_map.iter() { + // Initialize + let _server_info = service.peer_info(); + log::info!("Connected to server: {_server_info:#?}"); + } + Ok(()) +} + +fn extract_command_args( + config: &Value, +) -> Option<(String, Vec, serde_json::Map)> { + let obj = config.as_object()?; + let command = obj.get("command")?.as_str()?.to_string(); + let args = obj.get("args")?.as_array()?.clone(); + let envs = obj + .get("env") + .unwrap_or(&Value::Object(serde_json::Map::new())) + .as_object()? + .clone(); + Some((command, args, envs)) +} + +#[tauri::command] +pub async fn restart_mcp_servers(app: AppHandle, state: State<'_, AppState>) -> Result<(), String> { + let app_path = get_jan_data_folder_path(app.clone()); + let app_path_str = app_path.to_str().unwrap().to_string(); + let servers = state.mcp_servers.clone(); + // Stop the servers + stop_mcp_servers(state.mcp_servers.clone()).await?; + + // Restart the servers + run_mcp_commands(app_path_str, servers).await?; + + app.emit("mcp-update", "MCP servers updated") + .map_err(|e| format!("Failed to emit event: {}", e)) +} + +pub async fn stop_mcp_servers( + servers_state: Arc>>>, +) -> Result<(), String> { + let mut servers_map = servers_state.lock().await; + let keys: Vec = servers_map.keys().cloned().collect(); + for key in keys { + if let Some(service) = servers_map.remove(&key) { + service.cancel().await.map_err(|e| e.to_string())?; + } + } + drop(servers_map); // Release the lock after stopping + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::collections::HashMap; + use std::fs::File; + use std::io::Write; + use std::sync::Arc; + use tokio::sync::Mutex; + + #[tokio::test] + async fn test_run_mcp_commands() { + // Create a mock mcp_config.json file + let config_path = "mcp_config.json"; + let mut file = File::create(config_path).expect("Failed to create config file"); + file.write_all(b"{\"mcpServers\":{}}") + .expect("Failed to write to config file"); + + // Call the run_mcp_commands function + let app_path = ".".to_string(); + let servers_state: Arc>>> = + Arc::new(Mutex::new(HashMap::new())); + let result = run_mcp_commands(app_path, servers_state).await; + + // Assert that the function returns Ok(()) + assert!(result.is_ok()); + + // Clean up the mock config file + std::fs::remove_file(config_path).expect("Failed to remove config file"); + } +} diff --git a/src-tauri/src/core/mod.rs b/src-tauri/src/core/mod.rs new file mode 100644 index 000000000..8d4edde3c --- /dev/null +++ b/src-tauri/src/core/mod.rs @@ -0,0 +1,8 @@ +pub mod cmd; +pub mod fs; +pub mod mcp; +pub mod server; +pub mod setup; +pub mod state; +pub mod threads; +pub mod utils; \ No newline at end of file diff --git a/src-tauri/src/core/server.rs b/src-tauri/src/core/server.rs new file mode 100644 index 000000000..042ecf135 --- /dev/null +++ b/src-tauri/src/core/server.rs @@ -0,0 +1,203 @@ +use hyper::service::{make_service_fn, service_fn}; +use hyper::{Body, Request, Response, Server, StatusCode}; +use reqwest::Client; +use std::convert::Infallible; +use std::net::SocketAddr; +use std::sync::LazyLock; +use tokio::sync::Mutex; +use tokio::task::JoinHandle; + +/// Server handle type for managing the proxy server lifecycle +type ServerHandle = JoinHandle>>; + +/// Global singleton for the current server instance +static SERVER_HANDLE: LazyLock>> = LazyLock::new(|| Mutex::new(None)); + +/// Configuration for the proxy server +#[derive(Clone)] +struct ProxyConfig { + upstream: String, + prefix: String, + auth_token: String, +} + +/// Removes a prefix from a path, ensuring proper formatting +fn remove_prefix(path: &str, prefix: &str) -> String { + log::debug!("Processing path: {}, removing prefix: {}", path, prefix); + + if !prefix.is_empty() && path.starts_with(prefix) { + let result = path[prefix.len()..].to_string(); + if result.is_empty() { + "/".to_string() + } else { + result + } + } else { + path.to_string() + } +} + +/// Determines the final destination path based on the original request path +fn get_destination_path(original_path: &str, prefix: &str) -> String { + let removed_prefix_path = remove_prefix(original_path, prefix); + + // Special paths don't need the /v1 prefix + if !original_path.contains(prefix) + || removed_prefix_path.contains("/healthz") + || removed_prefix_path.contains("/process") + { + original_path.to_string() + } else { + format!("/v1{}", removed_prefix_path) + } +} + +/// Creates the full upstream URL for the proxied request +fn build_upstream_url(upstream: &str, path: &str) -> String { + let upstream_clean = upstream.trim_end_matches('/'); + let path_clean = path.trim_start_matches('/'); + + format!("{}/{}", upstream_clean, path_clean) +} + +/// Handles the proxy request logic +async fn proxy_request( + req: Request, + client: Client, + config: ProxyConfig, +) -> Result, hyper::Error> { + let original_path = req.uri().path(); + let path = get_destination_path(original_path, &config.prefix); + + // Block access to /configs endpoint + if path.contains("/configs") { + return Ok(Response::builder() + .status(StatusCode::NOT_FOUND) + .body(Body::from("Not Found")) + .unwrap()); + } + + // Build the outbound request + let upstream_url = build_upstream_url(&config.upstream, &path); + log::debug!("Proxying request to: {}", upstream_url); + + let mut outbound_req = client.request(req.method().clone(), &upstream_url); + + // Copy original headers + for (name, value) in req.headers() { + if name != hyper::header::HOST { + // Skip host header + outbound_req = outbound_req.header(name, value); + } + } + + // Add authorization header + outbound_req = outbound_req.header("Authorization", format!("Bearer {}", config.auth_token)); + + // Send the request and handle the response + match outbound_req.body(req.into_body()).send().await { + Ok(response) => { + let status = response.status(); + log::debug!("Received response with status: {}", status); + + let mut builder = Response::builder().status(status); + + // Copy response headers + for (name, value) in response.headers() { + builder = builder.header(name, value); + } + + // Read response body + match response.bytes().await { + Ok(bytes) => Ok(builder.body(Body::from(bytes)).unwrap()), + Err(e) => { + log::error!("Failed to read response body: {}", e); + Ok(Response::builder() + .status(StatusCode::INTERNAL_SERVER_ERROR) + .body(Body::from("Error reading upstream response")) + .unwrap()) + } + } + } + Err(e) => { + log::error!("Proxy request failed: {}", e); + Ok(Response::builder() + .status(StatusCode::BAD_GATEWAY) + .body(Body::from(format!("Upstream error: {}", e))) + .unwrap()) + } + } +} + +/// Starts the proxy server +pub async fn start_server( + host: String, + port: u16, + prefix: String, + auth_token: String, +) -> Result> { + // Check if server is already running + let mut handle_guard = SERVER_HANDLE.lock().await; + if handle_guard.is_some() { + return Err("Server is already running".into()); + } + + // Create server address + let addr: SocketAddr = format!("{}:{}", host, port) + .parse() + .map_err(|e| format!("Invalid address: {}", e))?; + + // Configure proxy settings + let config = ProxyConfig { + upstream: "http://127.0.0.1:39291".to_string(), + prefix, + auth_token, + }; + + // Create HTTP client + let client = Client::builder() + .timeout(std::time::Duration::from_secs(30)) + .build()?; + + // Create service handler + let make_svc = make_service_fn(move |_conn| { + let client = client.clone(); + let config = config.clone(); + + async move { + Ok::<_, Infallible>(service_fn(move |req| { + proxy_request(req, client.clone(), config.clone()) + })) + } + }); + + // Create and start the server + let server = Server::bind(&addr).serve(make_svc); + log::info!("Proxy server started on http://{}", addr); + + // Spawn server task + let server_handle = tokio::spawn(async move { + if let Err(e) = server.await { + log::error!("Server error: {}", e); + return Err(Box::new(e) as Box); + } + Ok(()) + }); + + *handle_guard = Some(server_handle); + Ok(true) +} + +/// Stops the currently running proxy server +pub async fn stop_server() -> Result<(), Box> { + let mut handle_guard = SERVER_HANDLE.lock().await; + + if let Some(handle) = handle_guard.take() { + handle.abort(); + log::info!("Proxy server stopped"); + } else { + log::debug!("No server was running"); + } + + Ok(()) +} diff --git a/src-tauri/src/core/setup.rs b/src-tauri/src/core/setup.rs new file mode 100644 index 000000000..d70af1c70 --- /dev/null +++ b/src-tauri/src/core/setup.rs @@ -0,0 +1,302 @@ +use flate2::read::GzDecoder; +use std::{ + fs::{self, File}, + io::Read, + path::PathBuf, + sync::{Arc, Mutex}, +}; +use tar::Archive; +use tauri::{App, Emitter, Listener, Manager}; +use tauri_plugin_shell::process::CommandEvent; +use tauri_plugin_shell::ShellExt; +use tauri_plugin_store::StoreExt; + +// MCP +use super::{ + cmd::{get_jan_data_folder_path, get_jan_extensions_path}, + mcp::run_mcp_commands, + state::AppState, +}; + +pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), String> { + let mut store_path = get_jan_data_folder_path(app.clone()); + store_path.push("store.json"); + let store = app.store(store_path).expect("Store not initialized"); + let stored_version = store + .get("version") + .and_then(|v| v.as_str().map(String::from)) + .unwrap_or_default(); + + let app_version = app + .config() + .version + .clone() + .unwrap_or_else(|| "".to_string()); + + if !force && stored_version == app_version { + return Ok(()); + } + let extensions_path = get_jan_extensions_path(app.clone()); + let pre_install_path = app + .path() + .resource_dir() + .unwrap() + .join("resources") + .join("pre-install"); + + // Attempt to remove extensions folder + if extensions_path.exists() { + fs::remove_dir_all(&extensions_path).unwrap_or_else(|_| { + log::info!("Failed to remove existing extensions folder, it may not exist."); + }); + } + + if !force { + return Ok(()); + }; + + // Attempt to create it again + if !extensions_path.exists() { + fs::create_dir_all(&extensions_path).map_err(|e| e.to_string())?; + } + + let extensions_json_path = extensions_path.join("extensions.json"); + let mut extensions_list = if extensions_json_path.exists() { + let existing_data = + fs::read_to_string(&extensions_json_path).unwrap_or_else(|_| "[]".to_string()); + serde_json::from_str::>(&existing_data).unwrap_or_else(|_| vec![]) + } else { + vec![] + }; + + for entry in fs::read_dir(&pre_install_path).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let path = entry.path(); + + if path.extension().map_or(false, |ext| ext == "tgz") { + log::info!("Installing extension from {:?}", path); + let tar_gz = File::open(&path).map_err(|e| e.to_string())?; + let gz_decoder = GzDecoder::new(tar_gz); + let mut archive = Archive::new(gz_decoder); + + let mut extension_name = None; + let mut extension_manifest = None; + extract_extension_manifest(&mut archive) + .map_err(|e| e.to_string()) + .and_then(|manifest| match manifest { + Some(manifest) => { + extension_name = manifest["name"].as_str().map(|s| s.to_string()); + extension_manifest = Some(manifest); + Ok(()) + } + None => Err("Manifest is None".to_string()), + })?; + + let extension_name = extension_name.ok_or("package.json not found in archive")?; + let extension_dir = extensions_path.join(extension_name.clone()); + fs::create_dir_all(&extension_dir).map_err(|e| e.to_string())?; + + let tar_gz = File::open(&path).map_err(|e| e.to_string())?; + let gz_decoder = GzDecoder::new(tar_gz); + let mut archive = Archive::new(gz_decoder); + for entry in archive.entries().map_err(|e| e.to_string())? { + let mut entry = entry.map_err(|e| e.to_string())?; + let file_path = entry.path().map_err(|e| e.to_string())?; + let components: Vec<_> = file_path.components().collect(); + if components.len() > 1 { + let relative_path: PathBuf = components[1..].iter().collect(); + let target_path = extension_dir.join(relative_path); + if let Some(parent) = target_path.parent() { + fs::create_dir_all(parent).map_err(|e| e.to_string())?; + } + let _result = entry.unpack(&target_path).map_err(|e| e.to_string())?; + } + } + + let main_entry = extension_manifest + .as_ref() + .and_then(|manifest| manifest["main"].as_str()) + .unwrap_or("index.js"); + let url = extension_dir.join(main_entry).to_string_lossy().to_string(); + + let new_extension = serde_json::json!({ + "url": url, + "name": extension_name.clone(), + "origin": extension_dir.to_string_lossy(), + "active": true, + "description": extension_manifest + .as_ref() + .and_then(|manifest| manifest["description"].as_str()) + .unwrap_or(""), + "version": extension_manifest + .as_ref() + .and_then(|manifest| manifest["version"].as_str()) + .unwrap_or(""), + "productName": extension_manifest + .as_ref() + .and_then(|manifest| manifest["productName"].as_str()) + .unwrap_or(""), + }); + + extensions_list.push(new_extension); + + log::info!("Installed extension to {:?}", extension_dir); + } + } + fs::write( + &extensions_json_path, + serde_json::to_string_pretty(&extensions_list).map_err(|e| e.to_string())?, + ) + .map_err(|e| e.to_string())?; + + // Store the new app version + store.set("version", serde_json::json!(app_version)); + store.save().expect("Failed to save store"); + + Ok(()) +} + +fn extract_extension_manifest( + archive: &mut Archive, +) -> Result, String> { + let entry = archive + .entries() + .map_err(|e| e.to_string())? + .filter_map(|e| e.ok()) // Ignore errors in individual entries + .find(|entry| { + if let Ok(file_path) = entry.path() { + let path_str = file_path.to_string_lossy(); + path_str == "package/package.json" || path_str == "package.json" + } else { + false + } + }); + + if let Some(mut entry) = entry { + let mut content = String::new(); + entry + .read_to_string(&mut content) + .map_err(|e| e.to_string())?; + + let package_json: serde_json::Value = + serde_json::from_str(&content).map_err(|e| e.to_string())?; + return Ok(Some(package_json)); + } + + Ok(None) +} + +pub fn setup_mcp(app: &App) { + let app_path = get_jan_data_folder_path(app.handle().clone()); + + let state = app.state::().inner(); + let app_path_str = app_path.to_str().unwrap().to_string(); + let servers = state.mcp_servers.clone(); + let app_handle = app.handle().clone(); + tauri::async_runtime::spawn(async move { + if let Err(e) = run_mcp_commands(app_path_str, servers).await { + log::error!("Failed to run mcp commands: {}", e); + } + app_handle.emit("mcp-update", "MCP servers updated").unwrap(); + }); +} + +pub fn setup_sidecar(app: &App) -> Result<(), String> { + // Setup sidecar + + let app_state = app.state::(); + let app_data_dir = get_jan_data_folder_path(app.handle().clone()); + let mut sidecar_command = app.shell().sidecar("cortex-server").unwrap().args([ + "--start-server", + "--port", + "39291", + "--config_file_path", + app_data_dir.join(".janrc").to_str().unwrap(), + "--data_folder_path", + app_data_dir.to_str().unwrap(), + "--cors", + "ON", + "--allowed_origins", + "http://localhost:3000,tauri://localhost,http://tauri.localhost", + "config", + "--api_keys", + app_state.inner().app_token.as_deref().unwrap_or(""), + ]); + + #[cfg(target_os = "windows")] + { + sidecar_command = sidecar_command.env("PATH", { + let app_data_dir = app.app_handle().path().app_data_dir().unwrap(); + let dest = app_data_dir.to_str().unwrap(); + let path = std::env::var("PATH").unwrap_or_default(); + format!("{}{}{}", path, std::path::MAIN_SEPARATOR, dest) + }); + } + + #[cfg(not(target_os = "windows"))] + { + sidecar_command = sidecar_command.env("LD_LIBRARY_PATH", { + let app_data_dir = app.app_handle().path().app_data_dir().unwrap(); + let dest = app_data_dir.to_str().unwrap(); + let ld_library_path = std::env::var("LD_LIBRARY_PATH").unwrap_or_default(); + format!("{}{}{}", ld_library_path, std::path::MAIN_SEPARATOR, dest) + }); + } + + let (mut rx, _child) = sidecar_command.spawn().expect("Failed to spawn sidecar"); + let child = Arc::new(Mutex::new(Some(_child))); + let child_clone = child.clone(); + + tauri::async_runtime::spawn(async move { + // read events such as stdout + while let Some(event) = rx.recv().await { + if let CommandEvent::Stdout(line_bytes) = event { + let line = String::from_utf8_lossy(&line_bytes); + log::info!("Outputs: {:?}", line) + } + } + }); + + app.handle().listen("kill-sidecar", move |_| { + let mut child_guard = child_clone.lock().unwrap(); + if let Some(actual_child) = child_guard.take() { + actual_child.kill().unwrap(); + } + }); + Ok(()) +} + +fn copy_dir_all(src: PathBuf, dst: PathBuf) -> Result<(), String> { + fs::create_dir_all(&dst).map_err(|e| e.to_string())?; + log::info!("Copying from {:?} to {:?}", src, dst); + for entry in fs::read_dir(src).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let ty = entry.file_type().map_err(|e| e.to_string())?; + if ty.is_dir() { + copy_dir_all(entry.path(), dst.join(entry.file_name())).map_err(|e| e.to_string())?; + } else { + fs::copy(entry.path(), dst.join(entry.file_name())).map_err(|e| e.to_string())?; + } + } + Ok(()) +} + +pub fn setup_engine_binaries(app: &App) -> Result<(), String> { + // Copy engine binaries to app_data + let app_data_dir = get_jan_data_folder_path(app.handle().clone()); + let binaries_dir = app.handle().path().resource_dir().unwrap().join("binaries"); + let themes_dir = app + .handle() + .path() + .resource_dir() + .unwrap() + .join("resources"); + + if let Err(e) = copy_dir_all(binaries_dir, app_data_dir.clone()) { + log::error!("Failed to copy binaries: {}", e); + } + if let Err(e) = copy_dir_all(themes_dir, app_data_dir.clone()) { + log::error!("Failed to copy themes: {}", e); + } + Ok(()) +} diff --git a/src-tauri/src/core/state.rs b/src-tauri/src/core/state.rs new file mode 100644 index 000000000..925030085 --- /dev/null +++ b/src-tauri/src/core/state.rs @@ -0,0 +1,18 @@ +use std::{collections::HashMap, sync::Arc}; + +use rand::{distributions::Alphanumeric, Rng}; +use rmcp::{service::RunningService, RoleClient}; +use tokio::sync::Mutex; + +#[derive(Default)] +pub struct AppState { + pub app_token: Option, + pub mcp_servers: Arc>>>, +} +pub fn generate_app_token() -> String { + rand::thread_rng() + .sample_iter(&Alphanumeric) + .take(32) + .map(char::from) + .collect() +} diff --git a/src-tauri/src/core/threads.rs b/src-tauri/src/core/threads.rs new file mode 100644 index 000000000..051837992 --- /dev/null +++ b/src-tauri/src/core/threads.rs @@ -0,0 +1,613 @@ +/*! + Thread and Message Persistence Module + + This module provides all logic for managing threads and their messages, including creation, modification, deletion, and listing. + Messages for each thread are persisted in a JSONL file (messages.jsonl) per thread directory. + + **Concurrency and Consistency Guarantee:** + - All operations that write or modify messages for a thread are protected by a global, per-thread asynchronous lock. + - This design ensures that only one operation can write to a thread's messages.jsonl file at a time, preventing race conditions. + - As a result, the messages.jsonl file for each thread is always consistent and never corrupted, even under concurrent access. +*/ + +use serde::{Deserialize, Serialize}; +use std::fs::{self, File}; +use std::io::{BufRead, BufReader, Write}; +use tauri::command; +use tauri::Runtime; +use uuid::Uuid; + +// For async file write serialization +use once_cell::sync::Lazy; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::Mutex; + +// Global per-thread locks for message file writes +static MESSAGE_LOCKS: Lazy>>>> = + Lazy::new(|| Mutex::new(HashMap::new())); + +use super::utils::{ + ensure_data_dirs, ensure_thread_dir_exists, get_data_dir, get_messages_path, get_thread_dir, + get_thread_metadata_path, THREADS_FILE, +}; + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct Thread { + pub id: String, + pub object: String, + pub title: String, + pub assistants: Vec, + pub created: i64, + pub updated: i64, + pub metadata: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ThreadMessage { + pub id: String, + pub object: String, + pub thread_id: String, + pub assistant_id: Option, + pub attachments: Option>, + pub role: String, + pub content: Vec, + pub status: String, + pub created_at: i64, + pub completed_at: i64, + pub metadata: Option, + pub type_: Option, + pub error_code: Option, + pub tool_call_id: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct Attachment { + pub file_id: Option, + pub tools: Option>, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(tag = "type")] +pub enum Tool { + #[serde(rename = "file_search")] + FileSearch, + #[serde(rename = "code_interpreter")] + CodeInterpreter, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ThreadContent { + pub type_: String, + pub text: Option, + pub image_url: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ContentValue { + pub value: String, + pub annotations: Vec, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ImageContentValue { + pub detail: Option, + pub url: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ThreadAssistantInfo { + pub assistant_id: String, + pub assistant_name: String, + pub model: ModelInfo, + pub instructions: Option, + pub tools: Option>, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ModelInfo { + pub id: String, + pub name: String, + pub settings: serde_json::Value, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(tag = "type")] +pub enum AssistantTool { + #[serde(rename = "code_interpreter")] + CodeInterpreter, + #[serde(rename = "retrieval")] + Retrieval, + #[serde(rename = "function")] + Function { + name: String, + description: Option, + parameters: Option, + }, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ThreadState { + pub has_more: bool, + pub waiting_for_response: bool, + pub error: Option, + pub last_message: Option, +} + +/// Lists all threads by reading their metadata from the threads directory. +/// Returns a vector of thread metadata as JSON values. +#[command] +pub async fn list_threads( + app_handle: tauri::AppHandle, +) -> Result, String> { + ensure_data_dirs(app_handle.clone())?; + let data_dir = get_data_dir(app_handle.clone()); + let mut threads = Vec::new(); + + if !data_dir.exists() { + return Ok(threads); + } + + for entry in fs::read_dir(&data_dir).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + let path = entry.path(); + if path.is_dir() { + let thread_metadata_path = path.join(THREADS_FILE); + if thread_metadata_path.exists() { + let data = fs::read_to_string(&thread_metadata_path).map_err(|e| e.to_string())?; + match serde_json::from_str(&data) { + Ok(thread) => threads.push(thread), + Err(e) => { + println!("Failed to parse thread file: {}", e); + continue; // skip invalid thread files + } + } + } + } + } + + Ok(threads) +} + +/// Creates a new thread, assigns it a unique ID, and persists its metadata. +/// Ensures the thread directory exists and writes thread.json. +#[command] +pub async fn create_thread( + app_handle: tauri::AppHandle, + mut thread: serde_json::Value, +) -> Result { + ensure_data_dirs(app_handle.clone())?; + let uuid = Uuid::new_v4().to_string(); + thread["id"] = serde_json::Value::String(uuid.clone()); + let thread_dir = get_thread_dir(app_handle.clone(), &uuid); + if !thread_dir.exists() { + fs::create_dir_all(&thread_dir).map_err(|e| e.to_string())?; + } + let path = get_thread_metadata_path(app_handle.clone(), &uuid); + let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?; + fs::write(path, data).map_err(|e| e.to_string())?; + Ok(thread) +} + +/// Modifies an existing thread's metadata by overwriting its thread.json file. +/// Returns an error if the thread directory does not exist. +#[command] +pub async fn modify_thread( + app_handle: tauri::AppHandle, + thread: serde_json::Value, +) -> Result<(), String> { + let thread_id = thread + .get("id") + .and_then(|id| id.as_str()) + .ok_or("Missing thread id")?; + let thread_dir = get_thread_dir(app_handle.clone(), thread_id); + if !thread_dir.exists() { + return Err("Thread directory does not exist".to_string()); + } + let path = get_thread_metadata_path(app_handle.clone(), thread_id); + let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?; + fs::write(path, data).map_err(|e| e.to_string())?; + Ok(()) +} + +/// Deletes a thread and all its associated files by removing its directory. +#[command] +pub async fn delete_thread( + app_handle: tauri::AppHandle, + thread_id: String, +) -> Result<(), String> { + let thread_dir = get_thread_dir(app_handle.clone(), &thread_id); + if thread_dir.exists() { + fs::remove_dir_all(thread_dir).map_err(|e| e.to_string())?; + } + Ok(()) +} + +/// Lists all messages for a given thread by reading and parsing its messages.jsonl file. +/// Returns a vector of message JSON values. +#[command] +pub async fn list_messages( + app_handle: tauri::AppHandle, + thread_id: String, +) -> Result, String> { + let path = get_messages_path(app_handle, &thread_id); + if !path.exists() { + return Ok(vec![]); + } + + let file = File::open(&path).map_err(|e| { + eprintln!("Error opening file {}: {}", path.display(), e); + e.to_string() + })?; + let reader = BufReader::new(file); + + let mut messages = Vec::new(); + for line in reader.lines() { + let line = line.map_err(|e| { + eprintln!("Error reading line from file {}: {}", path.display(), e); + e.to_string() + })?; + let message: serde_json::Value = serde_json::from_str(&line).map_err(|e| { + eprintln!( + "Error parsing JSON from line in file {}: {}", + path.display(), + e + ); + e.to_string() + })?; + messages.push(message); + } + + Ok(messages) +} + +/// Appends a new message to a thread's messages.jsonl file. +/// Uses a per-thread async lock to prevent race conditions and ensure file consistency. +#[command] +pub async fn create_message( + app_handle: tauri::AppHandle, + mut message: serde_json::Value, +) -> Result { + let thread_id = { + let id = message + .get("thread_id") + .and_then(|v| v.as_str()) + .ok_or("Missing thread_id")?; + id.to_string() + }; + ensure_thread_dir_exists(app_handle.clone(), &thread_id)?; + let path = get_messages_path(app_handle.clone(), &thread_id); + + if message.get("id").is_none() { + let uuid = Uuid::new_v4().to_string(); + message["id"] = serde_json::Value::String(uuid); + } + + // Acquire per-thread lock before writing + { + let mut locks = MESSAGE_LOCKS.lock().await; + let lock = locks + .entry(thread_id.to_string()) + .or_insert_with(|| Arc::new(Mutex::new(()))) + .clone(); + drop(locks); // Release the map lock before awaiting the file lock + + let _guard = lock.lock().await; + + let mut file: File = fs::OpenOptions::new() + .create(true) + .append(true) + .open(path) + .map_err(|e| e.to_string())?; + + let data = serde_json::to_string(&message).map_err(|e| e.to_string())?; + writeln!(file, "{}", data).map_err(|e| e.to_string())?; + } + + Ok(message) +} + +/// Modifies an existing message in a thread's messages.jsonl file. +/// Uses a per-thread async lock to prevent race conditions and ensure file consistency. +/// Rewrites the entire messages.jsonl file for the thread. +#[command] +pub async fn modify_message( + app_handle: tauri::AppHandle, + message: serde_json::Value, +) -> Result { + let thread_id = message + .get("thread_id") + .and_then(|v| v.as_str()) + .ok_or("Missing thread_id")?; + let message_id = message + .get("id") + .and_then(|v| v.as_str()) + .ok_or("Missing message id")?; + + // Acquire per-thread lock before modifying + { + let mut locks = MESSAGE_LOCKS.lock().await; + let lock = locks + .entry(thread_id.to_string()) + .or_insert_with(|| Arc::new(Mutex::new(()))) + .clone(); + drop(locks); // Release the map lock before awaiting the file lock + + let _guard = lock.lock().await; + + let mut messages = list_messages(app_handle.clone(), thread_id.to_string()).await?; + if let Some(index) = messages + .iter() + .position(|m| m.get("id").and_then(|v| v.as_str()) == Some(message_id)) + { + messages[index] = message.clone(); + + // Rewrite all messages + let path = get_messages_path(app_handle.clone(), thread_id); + let mut file = File::create(path).map_err(|e| e.to_string())?; + for msg in messages { + let data = serde_json::to_string(&msg).map_err(|e| e.to_string())?; + writeln!(file, "{}", data).map_err(|e| e.to_string())?; + } + } + } + Ok(message) +} + +/// Deletes a message from a thread's messages.jsonl file by message ID. +/// Rewrites the entire messages.jsonl file for the thread. +/// Uses a per-thread async lock to prevent race conditions and ensure file consistency. +#[command] +pub async fn delete_message( + app_handle: tauri::AppHandle, + thread_id: String, + message_id: String, +) -> Result<(), String> { + // Acquire per-thread lock before modifying + { + let mut locks = MESSAGE_LOCKS.lock().await; + let lock = locks + .entry(thread_id.to_string()) + .or_insert_with(|| Arc::new(Mutex::new(()))) + .clone(); + drop(locks); // Release the map lock before awaiting the file lock + + let _guard = lock.lock().await; + + let mut messages = list_messages(app_handle.clone(), thread_id.clone()).await?; + messages.retain(|m| m.get("id").and_then(|v| v.as_str()) != Some(message_id.as_str())); + + // Rewrite remaining messages + let path = get_messages_path(app_handle.clone(), &thread_id); + let mut file = File::create(path).map_err(|e| e.to_string())?; + for msg in messages { + let data = serde_json::to_string(&msg).map_err(|e| e.to_string())?; + writeln!(file, "{}", data).map_err(|e| e.to_string())?; + } + } + + Ok(()) +} + +/// Retrieves the first assistant associated with a thread. +/// Returns an error if the thread or assistant is not found. +#[command] +pub async fn get_thread_assistant( + app_handle: tauri::AppHandle, + thread_id: String, +) -> Result { + let path = get_thread_metadata_path(app_handle, &thread_id); + if !path.exists() { + return Err("Thread not found".to_string()); + } + let data = fs::read_to_string(&path).map_err(|e| e.to_string())?; + let thread: serde_json::Value = serde_json::from_str(&data).map_err(|e| e.to_string())?; + if let Some(assistants) = thread.get("assistants").and_then(|a| a.as_array()) { + if let Some(first) = assistants.get(0) { + Ok(first.clone()) + } else { + Err("Assistant not found".to_string()) + } + } else { + Err("Assistant not found".to_string()) + } +} + +/// Adds a new assistant to a thread's metadata. +/// Updates thread.json with the new assistant information. +#[command] +pub async fn create_thread_assistant( + app_handle: tauri::AppHandle, + thread_id: String, + assistant: serde_json::Value, +) -> Result { + let path = get_thread_metadata_path(app_handle.clone(), &thread_id); + if !path.exists() { + return Err("Thread not found".to_string()); + } + let mut thread: serde_json::Value = { + let data = fs::read_to_string(&path).map_err(|e| e.to_string())?; + serde_json::from_str(&data).map_err(|e| e.to_string())? + }; + if let Some(assistants) = thread.get_mut("assistants").and_then(|a| a.as_array_mut()) { + assistants.push(assistant.clone()); + } else { + thread["assistants"] = serde_json::Value::Array(vec![assistant.clone()]); + } + let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?; + fs::write(&path, data).map_err(|e| e.to_string())?; + Ok(assistant) +} + +/// Modifies an existing assistant's information in a thread's metadata. +/// Updates thread.json with the modified assistant data. +#[command] +pub async fn modify_thread_assistant( + app_handle: tauri::AppHandle, + thread_id: String, + assistant: serde_json::Value, +) -> Result { + let path = get_thread_metadata_path(app_handle.clone(), &thread_id); + if !path.exists() { + return Err("Thread not found".to_string()); + } + let mut thread: serde_json::Value = { + let data = fs::read_to_string(&path).map_err(|e| e.to_string())?; + serde_json::from_str(&data).map_err(|e| e.to_string())? + }; + let assistant_id = assistant + .get("assistant_id") + .and_then(|v| v.as_str()) + .ok_or("Missing assistant_id")?; + if let Some(assistants) = thread + .get_mut("assistants") + .and_then(|a: &mut serde_json::Value| a.as_array_mut()) + { + if let Some(index) = assistants + .iter() + .position(|a| a.get("assistant_id").and_then(|v| v.as_str()) == Some(assistant_id)) + { + assistants[index] = assistant.clone(); + let data = serde_json::to_string_pretty(&thread).map_err(|e| e.to_string())?; + fs::write(&path, data).map_err(|e| e.to_string())?; + } + } + Ok(assistant) +} + +#[cfg(test)] +mod tests { + use crate::core::cmd::get_jan_data_folder_path; + + use super::*; + use serde_json::json; + use std::fs; + use std::path::PathBuf; + use tauri::test::{mock_app, MockRuntime}; + + // Helper to create a mock app handle with a temp data dir + fn mock_app_with_temp_data_dir() -> (tauri::App, PathBuf) { + let app = mock_app(); + let data_dir = get_jan_data_folder_path(app.handle().clone()); + println!("Mock app data dir: {}", data_dir.display()); + // Patch get_data_dir to use temp dir (requires get_data_dir to be overridable or injectable) + // For now, we assume get_data_dir uses tauri::api::path::app_data_dir(&app_handle) + // and that we can set the environment variable to redirect it. + (app, data_dir) + } + + #[tokio::test] + async fn test_create_and_list_threads() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + // Create a thread + let thread = json!({ + "object": "thread", + "title": "Test Thread", + "assistants": [], + "created": 1234567890, + "updated": 1234567890, + "metadata": null + }); + let created = create_thread(app.handle().clone(), thread.clone()) + .await + .unwrap(); + assert_eq!(created["title"], "Test Thread"); + + // List threads + let threads = list_threads(app.handle().clone()).await.unwrap(); + assert!(threads.len() > 0); + + // Clean up + fs::remove_dir_all(data_dir).unwrap(); + } + + #[tokio::test] + async fn test_create_and_list_messages() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + // Create a thread first + let thread = json!({ + "object": "thread", + "title": "Msg Thread", + "assistants": [], + "created": 123, + "updated": 123, + "metadata": null + }); + let created = create_thread(app.handle().clone(), thread.clone()) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap().to_string(); + + // Create a message + let message = json!({ + "object": "message", + "thread_id": thread_id, + "assistant_id": null, + "attachments": null, + "role": "user", + "content": [], + "status": "sent", + "created_at": 123, + "completed_at": 123, + "metadata": null, + "type_": null, + "error_code": null, + "tool_call_id": null + }); + let created_msg = create_message(app.handle().clone(), message).await.unwrap(); + assert_eq!(created_msg["role"], "user"); + + // List messages + let messages = list_messages(app.handle().clone(), thread_id.clone()) + .await + .unwrap(); + assert!(messages.len() > 0); + assert_eq!(messages[0]["role"], "user"); + + // Clean up + fs::remove_dir_all(data_dir).unwrap(); + } + + #[tokio::test] + async fn test_create_and_get_thread_assistant() { + let (app, data_dir) = mock_app_with_temp_data_dir(); + // Create a thread + let thread = json!({ + "object": "thread", + "title": "Assistant Thread", + "assistants": [], + "created": 1, + "updated": 1, + "metadata": null + }); + let created = create_thread(app.handle().clone(), thread.clone()) + .await + .unwrap(); + let thread_id = created["id"].as_str().unwrap().to_string(); + + // Add assistant + let assistant = json!({ + "id": "assistant-1", + "assistant_name": "Test Assistant", + "model": { + "id": "model-1", + "name": "Test Model", + "settings": json!({}) + }, + "instructions": null, + "tools": null + }); + let _ = create_thread_assistant(app.handle().clone(), thread_id.clone(), assistant.clone()) + .await + .unwrap(); + + // Get assistant + let got = get_thread_assistant(app.handle().clone(), thread_id.clone()) + .await + .unwrap(); + assert_eq!(got["assistant_name"], "Test Assistant"); + + // Clean up + fs::remove_dir_all(data_dir).unwrap(); + } +} diff --git a/src-tauri/src/core/utils/mod.rs b/src-tauri/src/core/utils/mod.rs new file mode 100644 index 000000000..7f80e6f3a --- /dev/null +++ b/src-tauri/src/core/utils/mod.rs @@ -0,0 +1,48 @@ +use std::fs; +use std::path::PathBuf; +use tauri::Runtime; + +use super::cmd::get_jan_data_folder_path; + +pub const THREADS_DIR: &str = "threads"; +pub const THREADS_FILE: &str = "thread.json"; +pub const MESSAGES_FILE: &str = "messages.jsonl"; + +pub fn get_data_dir(app_handle: tauri::AppHandle) -> PathBuf { + get_jan_data_folder_path(app_handle).join(THREADS_DIR) +} + +pub fn get_thread_dir(app_handle: tauri::AppHandle, thread_id: &str) -> PathBuf { + get_data_dir(app_handle).join(thread_id) +} + +pub fn get_thread_metadata_path( + app_handle: tauri::AppHandle, + thread_id: &str, +) -> PathBuf { + get_thread_dir(app_handle, thread_id).join(THREADS_FILE) +} + +pub fn get_messages_path(app_handle: tauri::AppHandle, thread_id: &str) -> PathBuf { + get_thread_dir(app_handle, thread_id).join(MESSAGES_FILE) +} + +pub fn ensure_data_dirs(app_handle: tauri::AppHandle) -> Result<(), String> { + let data_dir = get_data_dir(app_handle.clone()); + if !data_dir.exists() { + fs::create_dir_all(&data_dir).map_err(|e| e.to_string())?; + } + Ok(()) +} + +pub fn ensure_thread_dir_exists( + app_handle: tauri::AppHandle, + thread_id: &str, +) -> Result<(), String> { + ensure_data_dirs(app_handle.clone())?; + let thread_dir = get_thread_dir(app_handle, thread_id); + if !thread_dir.exists() { + fs::create_dir(&thread_dir).map_err(|e| e.to_string())?; + } + Ok(()) +} diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs new file mode 100644 index 000000000..afe5dc735 --- /dev/null +++ b/src-tauri/src/lib.rs @@ -0,0 +1,101 @@ +mod core; +use core::{ + cmd::get_jan_data_folder_path, + setup::{self, setup_engine_binaries, setup_mcp, setup_sidecar}, + state::{generate_app_token, AppState}, +}; +use std::{collections::HashMap, sync::Arc}; + +use tauri::Emitter; +use tokio::sync::Mutex; + +use reqwest::blocking::Client; + +#[cfg_attr(mobile, tauri::mobile_entry_point)] +pub fn run() { + tauri::Builder::default() + .plugin(tauri_plugin_http::init()) + .plugin(tauri_plugin_store::Builder::new().build()) + .plugin(tauri_plugin_shell::init()) + .invoke_handler(tauri::generate_handler![ + // FS commands - Deperecate soon + core::fs::join_path, + core::fs::mkdir, + core::fs::exists_sync, + core::fs::readdir_sync, + core::fs::read_file_sync, + core::fs::rm, + // App commands + core::cmd::get_themes, + core::cmd::get_app_configurations, + core::cmd::get_active_extensions, + core::cmd::get_user_home_path, + core::cmd::update_app_configuration, + core::cmd::get_jan_data_folder_path, + core::cmd::get_jan_extensions_path, + core::cmd::relaunch, + core::cmd::open_app_directory, + core::cmd::open_file_explorer, + core::cmd::install_extensions, + core::cmd::read_theme, + core::cmd::app_token, + core::cmd::start_server, + core::cmd::stop_server, + core::cmd::save_mcp_configs, + core::cmd::get_mcp_configs, + // MCP commands + core::cmd::get_tools, + core::cmd::call_tool, + core::mcp::restart_mcp_servers, + // Threads + core::threads::list_threads, + core::threads::create_thread, + core::threads::modify_thread, + core::threads::delete_thread, + core::threads::list_messages, + core::threads::create_message, + core::threads::modify_message, + core::threads::delete_message, + core::threads::get_thread_assistant, + core::threads::create_thread_assistant, + core::threads::modify_thread_assistant + ]) + .manage(AppState { + app_token: Some(generate_app_token()), + mcp_servers: Arc::new(Mutex::new(HashMap::new())), + }) + .setup(|app| { + app.handle().plugin( + tauri_plugin_log::Builder::default() + .targets([if cfg!(debug_assertions) { + tauri_plugin_log::Target::new(tauri_plugin_log::TargetKind::Stdout) + } else { + tauri_plugin_log::Target::new(tauri_plugin_log::TargetKind::Folder { + path: get_jan_data_folder_path(app.handle().clone()).join("logs"), + file_name: Some("app".to_string()), + }) + }]) + .build(), + )?; + // Install extensions + if let Err(e) = setup::install_extensions(app.handle().clone(), false) { + log::error!("Failed to install extensions: {}", e); + } + setup_mcp(app); + setup_sidecar(app).expect("Failed to setup sidecar"); + setup_engine_binaries(app).expect("Failed to setup engine binaries"); + Ok(()) + }) + .on_window_event(|window, event| match event { + tauri::WindowEvent::CloseRequested { .. } => { + let client = Client::new(); + let url = "http://127.0.0.1:39291/processManager/destroy"; + let _ = client.delete(url).send(); + + window.emit("kill-sidecar", ()).unwrap(); + } + _ => {} + }) + .run(tauri::generate_context!()) + .expect("error while running tauri application"); +} diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs new file mode 100644 index 000000000..69c3a72ec --- /dev/null +++ b/src-tauri/src/main.rs @@ -0,0 +1,6 @@ +// Prevents additional console window on Windows in release, DO NOT REMOVE!! +#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] + +fn main() { + app_lib::run(); +} diff --git a/src-tauri/tauri.bundle.windows.nsis.template b/src-tauri/tauri.bundle.windows.nsis.template new file mode 100644 index 000000000..a1e97e525 --- /dev/null +++ b/src-tauri/tauri.bundle.windows.nsis.template @@ -0,0 +1,964 @@ +Unicode true +ManifestDPIAware true +; Add in `dpiAwareness` `PerMonitorV2` to manifest for Windows 10 1607+ (note this should not affect lower versions since they should be able to ignore this and pick up `dpiAware` `true` set by `ManifestDPIAware true`) +; Currently undocumented on NSIS's website but is in the Docs folder of source tree, see +; https://github.com/kichik/nsis/blob/5fc0b87b819a9eec006df4967d08e522ddd651c9/Docs/src/attributes.but#L286-L300 +; https://github.com/tauri-apps/tauri/pull/10106 +ManifestDPIAwareness PerMonitorV2 + +!if "lzma" == "none" + SetCompress off +!else + ; Set the compression algorithm. We default to LZMA. + SetCompressor /SOLID "lzma" +!endif + +!include MUI2.nsh +!include FileFunc.nsh +!include x64.nsh +!include WordFunc.nsh +!include "utils.nsh" +!include "FileAssociation.nsh" +!include "Win\COM.nsh" +!include "Win\Propkey.nsh" +!include "StrFunc.nsh" +${StrCase} +${StrLoc} + + +!define WEBVIEW2APPGUID "{F3017226-FE2A-4295-8BDF-00C3A9A7E4C5}" + +!define MANUFACTURER "ai" +!define PRODUCTNAME "jan_productname" +!define VERSION "jan_version" +!define VERSIONWITHBUILD "jan_build" +!define HOMEPAGE "" +!define INSTALLMODE "currentUser" +!define LICENSE "" +!define INSTALLERICON "D:\a\jan\jan\src-tauri\icons\icon.ico" +!define SIDEBARIMAGE "" +!define HEADERIMAGE "" +!define MAINBINARYNAME "jan_mainbinaryname" +!define MAINBINARYSRCPATH "D:\a\jan\jan\src-tauri\target\release\jan_mainbinaryname.exe" +!define BUNDLEID "jan_mainbinaryname.ai.app" +!define COPYRIGHT "" +!define OUTFILE "nsis-output.exe" +!define ARCH "x64" +!define ADDITIONALPLUGINSPATH "D:\a\jan\jan\src-tauri\target\release\nsis\x64\Plugins\x86-unicode\additional" +!define ALLOWDOWNGRADES "true" +!define DISPLAYLANGUAGESELECTOR "false" +!define INSTALLWEBVIEW2MODE "downloadBootstrapper" +!define WEBVIEW2INSTALLERARGS "/silent" +!define WEBVIEW2BOOTSTRAPPERPATH "" +!define WEBVIEW2INSTALLERPATH "" +!define MINIMUMWEBVIEW2VERSION "" +!define UNINSTKEY "Software\Microsoft\Windows\CurrentVersion\Uninstall\${PRODUCTNAME}" +!define MANUKEY "Software\${MANUFACTURER}" +!define MANUPRODUCTKEY "${MANUKEY}\${PRODUCTNAME}" +!define UNINSTALLERSIGNCOMMAND "$\"powershell$\" $\"-ExecutionPolicy$\" $\"Bypass$\" $\"-File$\" $\"./sign.ps1$\" $\"%1$\"" +!define ESTIMATEDSIZE "793795" +!define STARTMENUFOLDER "" + +Var PassiveMode +Var UpdateMode +Var NoShortcutMode +Var WixMode +Var OldMainBinaryName + +Name "${PRODUCTNAME}" +BrandingText "${COPYRIGHT}" +OutFile "${OUTFILE}" + +ShowInstDetails nevershow +ShowUninstDetails nevershow + +; We don't actually use this value as default install path, +; it's just for nsis to append the product name folder in the directory selector +; https://nsis.sourceforge.io/Reference/InstallDir +!define PLACEHOLDER_INSTALL_DIR "placeholder\${PRODUCTNAME}" +InstallDir "${PLACEHOLDER_INSTALL_DIR}" + +VIProductVersion "${VERSIONWITHBUILD}" +VIAddVersionKey "ProductName" "${PRODUCTNAME}" +VIAddVersionKey "FileDescription" "${PRODUCTNAME}" +VIAddVersionKey "LegalCopyright" "${COPYRIGHT}" +VIAddVersionKey "FileVersion" "${VERSION}" +VIAddVersionKey "ProductVersion" "${VERSION}" + +# additional plugins +!addplugindir "${ADDITIONALPLUGINSPATH}" + +; Uninstaller signing command +!if "${UNINSTALLERSIGNCOMMAND}" != "" + !uninstfinalize '${UNINSTALLERSIGNCOMMAND}' +!endif + +; Handle install mode, `perUser`, `perMachine` or `both` +!if "${INSTALLMODE}" == "perMachine" + RequestExecutionLevel highest +!endif + +!if "${INSTALLMODE}" == "currentUser" + RequestExecutionLevel user +!endif + +!if "${INSTALLMODE}" == "both" + !define MULTIUSER_MUI + !define MULTIUSER_INSTALLMODE_INSTDIR "${PRODUCTNAME}" + !define MULTIUSER_INSTALLMODE_COMMANDLINE + !if "${ARCH}" == "x64" + !define MULTIUSER_USE_PROGRAMFILES64 + !else if "${ARCH}" == "arm64" + !define MULTIUSER_USE_PROGRAMFILES64 + !endif + !define MULTIUSER_INSTALLMODE_DEFAULT_REGISTRY_KEY "${UNINSTKEY}" + !define MULTIUSER_INSTALLMODE_DEFAULT_REGISTRY_VALUENAME "CurrentUser" + !define MULTIUSER_INSTALLMODEPAGE_SHOWUSERNAME + !define MULTIUSER_INSTALLMODE_FUNCTION RestorePreviousInstallLocation + !define MULTIUSER_EXECUTIONLEVEL Highest + !include MultiUser.nsh +!endif + +; Installer icon +!if "${INSTALLERICON}" != "" + !define MUI_ICON "${INSTALLERICON}" +!endif + +; Installer sidebar image +!if "${SIDEBARIMAGE}" != "" + !define MUI_WELCOMEFINISHPAGE_BITMAP "${SIDEBARIMAGE}" +!endif + +; Installer header image +!if "${HEADERIMAGE}" != "" + !define MUI_HEADERIMAGE + !define MUI_HEADERIMAGE_BITMAP "${HEADERIMAGE}" +!endif + +; Define registry key to store installer language +!define MUI_LANGDLL_REGISTRY_ROOT "HKCU" +!define MUI_LANGDLL_REGISTRY_KEY "${MANUPRODUCTKEY}" +!define MUI_LANGDLL_REGISTRY_VALUENAME "Installer Language" + +; Installer pages, must be ordered as they appear +; 1. Welcome Page +!define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive +!insertmacro MUI_PAGE_WELCOME + +; 2. License Page (if defined) +!if "${LICENSE}" != "" + !define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive + !insertmacro MUI_PAGE_LICENSE "${LICENSE}" +!endif + +; 3. Install mode (if it is set to `both`) +!if "${INSTALLMODE}" == "both" + !define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive + !insertmacro MULTIUSER_PAGE_INSTALLMODE +!endif + +; 4. Custom page to ask user if he wants to reinstall/uninstall +; only if a previous installation was detected +Var ReinstallPageCheck +Page custom PageReinstall PageLeaveReinstall +Function PageReinstall + ; Uninstall previous WiX installation if exists. + ; + ; A WiX installer stores the installation info in registry + ; using a UUID and so we have to loop through all keys under + ; `HKLM\SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall` + ; and check if `DisplayName` and `Publisher` keys match ${PRODUCTNAME} and ${MANUFACTURER} + ; + ; This has a potential issue that there maybe another installation that matches + ; our ${PRODUCTNAME} and ${MANUFACTURER} but wasn't installed by our WiX installer, + ; however, this should be fine since the user will have to confirm the uninstallation + ; and they can chose to abort it if doesn't make sense. + StrCpy $0 0 + wix_loop: + EnumRegKey $1 HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall" $0 + StrCmp $1 "" wix_loop_done ; Exit loop if there is no more keys to loop on + IntOp $0 $0 + 1 + ReadRegStr $R0 HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\$1" "DisplayName" + ReadRegStr $R1 HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\$1" "Publisher" + StrCmp "$R0$R1" "${PRODUCTNAME}${MANUFACTURER}" 0 wix_loop + ReadRegStr $R0 HKLM "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\$1" "UninstallString" + ${StrCase} $R1 $R0 "L" + ${StrLoc} $R0 $R1 "msiexec" ">" + StrCmp $R0 0 0 wix_loop_done + StrCpy $WixMode 1 + StrCpy $R6 "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\$1" + Goto compare_version + wix_loop_done: + + ; Check if there is an existing installation, if not, abort the reinstall page + ReadRegStr $R0 SHCTX "${UNINSTKEY}" "" + ReadRegStr $R1 SHCTX "${UNINSTKEY}" "UninstallString" + ${IfThen} "$R0$R1" == "" ${|} Abort ${|} + + ; Compare this installar version with the existing installation + ; and modify the messages presented to the user accordingly + compare_version: + StrCpy $R4 "$(older)" + ${If} $WixMode = 1 + ReadRegStr $R0 HKLM "$R6" "DisplayVersion" + ${Else} + ReadRegStr $R0 SHCTX "${UNINSTKEY}" "DisplayVersion" + ${EndIf} + ${IfThen} $R0 == "" ${|} StrCpy $R4 "$(unknown)" ${|} + + nsis_tauri_utils::SemverCompare "${VERSION}" $R0 + Pop $R0 + ; Reinstalling the same version + ${If} $R0 = 0 + StrCpy $R1 "$(alreadyInstalledLong)" + StrCpy $R2 "$(addOrReinstall)" + StrCpy $R3 "$(uninstallApp)" + !insertmacro MUI_HEADER_TEXT "$(alreadyInstalled)" "$(chooseMaintenanceOption)" + ; Upgrading + ${ElseIf} $R0 = 1 + StrCpy $R1 "$(olderOrUnknownVersionInstalled)" + StrCpy $R2 "$(uninstallBeforeInstalling)" + StrCpy $R3 "$(dontUninstall)" + !insertmacro MUI_HEADER_TEXT "$(alreadyInstalled)" "$(choowHowToInstall)" + ; Downgrading + ${ElseIf} $R0 = -1 + StrCpy $R1 "$(newerVersionInstalled)" + StrCpy $R2 "$(uninstallBeforeInstalling)" + !if "${ALLOWDOWNGRADES}" == "true" + StrCpy $R3 "$(dontUninstall)" + !else + StrCpy $R3 "$(dontUninstallDowngrade)" + !endif + !insertmacro MUI_HEADER_TEXT "$(alreadyInstalled)" "$(choowHowToInstall)" + ${Else} + Abort + ${EndIf} + + ; Skip showing the page if passive + ; + ; Note that we don't call this earlier at the begining + ; of this function because we need to populate some variables + ; related to current installed version if detected and whether + ; we are downgrading or not. + ${If} $PassiveMode = 1 + Call PageLeaveReinstall + ${Else} + nsDialogs::Create 1018 + Pop $R4 + ${IfThen} $(^RTL) = 1 ${|} nsDialogs::SetRTL $(^RTL) ${|} + + ${NSD_CreateLabel} 0 0 100% 24u $R1 + Pop $R1 + + ${NSD_CreateRadioButton} 30u 50u -30u 8u $R2 + Pop $R2 + ${NSD_OnClick} $R2 PageReinstallUpdateSelection + + ${NSD_CreateRadioButton} 30u 70u -30u 8u $R3 + Pop $R3 + ; Disable this radio button if downgrading and downgrades are disabled + !if "${ALLOWDOWNGRADES}" == "false" + ${IfThen} $R0 = -1 ${|} EnableWindow $R3 0 ${|} + !endif + ${NSD_OnClick} $R3 PageReinstallUpdateSelection + + ; Check the first radio button if this the first time + ; we enter this page or if the second button wasn't + ; selected the last time we were on this page + ${If} $ReinstallPageCheck <> 2 + SendMessage $R2 ${BM_SETCHECK} ${BST_CHECKED} 0 + ${Else} + SendMessage $R3 ${BM_SETCHECK} ${BST_CHECKED} 0 + ${EndIf} + + ${NSD_SetFocus} $R2 + nsDialogs::Show + ${EndIf} +FunctionEnd +Function PageReinstallUpdateSelection + ${NSD_GetState} $R2 $R1 + ${If} $R1 == ${BST_CHECKED} + StrCpy $ReinstallPageCheck 1 + ${Else} + StrCpy $ReinstallPageCheck 2 + ${EndIf} +FunctionEnd +Function PageLeaveReinstall + ${NSD_GetState} $R2 $R1 + + ; If migrating from Wix, always uninstall + ${If} $WixMode = 1 + Goto reinst_uninstall + ${EndIf} + + ; In update mode, always proceeds without uninstalling + ${If} $UpdateMode = 1 + Goto reinst_done + ${EndIf} + + ; $R0 holds whether same(0)/upgrading(1)/downgrading(-1) version + ; $R1 holds the radio buttons state: + ; 1 => first choice was selected + ; 0 => second choice was selected + ${If} $R0 = 0 ; Same version, proceed + ${If} $R1 = 1 ; User chose to add/reinstall + Goto reinst_done + ${Else} ; User chose to uninstall + Goto reinst_uninstall + ${EndIf} + ${ElseIf} $R0 = 1 ; Upgrading + ${If} $R1 = 1 ; User chose to uninstall + Goto reinst_uninstall + ${Else} + Goto reinst_done ; User chose NOT to uninstall + ${EndIf} + ${ElseIf} $R0 = -1 ; Downgrading + ${If} $R1 = 1 ; User chose to uninstall + Goto reinst_uninstall + ${Else} + Goto reinst_done ; User chose NOT to uninstall + ${EndIf} + ${EndIf} + + reinst_uninstall: + HideWindow + ClearErrors + + ${If} $WixMode = 1 + ReadRegStr $R1 HKLM "$R6" "UninstallString" + ExecWait '$R1' $0 + ${Else} + ReadRegStr $4 SHCTX "${MANUPRODUCTKEY}" "" + ReadRegStr $R1 SHCTX "${UNINSTKEY}" "UninstallString" + ${IfThen} $UpdateMode = 1 ${|} StrCpy $R1 "$R1 /UPDATE" ${|} ; append /UPDATE + ${IfThen} $PassiveMode = 1 ${|} StrCpy $R1 "$R1 /P" ${|} ; append /P + StrCpy $R1 "$R1 _?=$4" ; append uninstall directory + ExecWait '$R1' $0 + ${EndIf} + + BringToFront + + ${IfThen} ${Errors} ${|} StrCpy $0 2 ${|} ; ExecWait failed, set fake exit code + + ${If} $0 <> 0 + ${OrIf} ${FileExists} "$INSTDIR\${MAINBINARYNAME}.exe" + ; User cancelled wix uninstaller? return to select un/reinstall page + ${If} $WixMode = 1 + ${AndIf} $0 = 1602 + Abort + ${EndIf} + + ; User cancelled NSIS uninstaller? return to select un/reinstall page + ${If} $0 = 1 + Abort + ${EndIf} + + ; Other erros? show generic error message and return to select un/reinstall page + MessageBox MB_ICONEXCLAMATION "$(unableToUninstall)" + Abort + ${EndIf} + reinst_done: +FunctionEnd + +; 5. Choose install directory page +!define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive +!insertmacro MUI_PAGE_DIRECTORY + +; 6. Start menu shortcut page +Var AppStartMenuFolder +!if "${STARTMENUFOLDER}" != "" + !define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive + !define MUI_STARTMENUPAGE_DEFAULTFOLDER "${STARTMENUFOLDER}" +!else + !define MUI_PAGE_CUSTOMFUNCTION_PRE Skip +!endif +!insertmacro MUI_PAGE_STARTMENU Application $AppStartMenuFolder + +; 7. Installation page +!insertmacro MUI_PAGE_INSTFILES + +; 8. Finish page +; +; Don't auto jump to finish page after installation page, +; because the installation page has useful info that can be used debug any issues with the installer. +!define MUI_FINISHPAGE_NOAUTOCLOSE +; Use show readme button in the finish page as a button create a desktop shortcut +!define MUI_FINISHPAGE_SHOWREADME +!define MUI_FINISHPAGE_SHOWREADME_TEXT "$(createDesktop)" +!define MUI_FINISHPAGE_SHOWREADME_FUNCTION CreateOrUpdateDesktopShortcut +; Show run app after installation. +!define MUI_FINISHPAGE_RUN +!define MUI_FINISHPAGE_RUN_FUNCTION RunMainBinary +!define MUI_PAGE_CUSTOMFUNCTION_PRE SkipIfPassive +!insertmacro MUI_PAGE_FINISH + +Function RunMainBinary + nsis_tauri_utils::RunAsUser "$INSTDIR\${MAINBINARYNAME}.exe" "" +FunctionEnd + +; Uninstaller Pages +; 1. Confirm uninstall page +Var DeleteAppDataCheckbox +Var DeleteAppDataCheckboxState +!define /ifndef WS_EX_LAYOUTRTL 0x00400000 +!define MUI_PAGE_CUSTOMFUNCTION_SHOW un.ConfirmShow +Function un.ConfirmShow ; Add add a `Delete app data` check box + ; $1 inner dialog HWND + ; $2 window DPI + ; $3 style + ; $4 x + ; $5 y + ; $6 width + ; $7 height + FindWindow $1 "#32770" "" $HWNDPARENT ; Find inner dialog + System::Call "user32::GetDpiForWindow(p r1) i .r2" + ${If} $(^RTL) = 1 + StrCpy $3 "${__NSD_CheckBox_EXSTYLE} | ${WS_EX_LAYOUTRTL}" + IntOp $4 50 * $2 + ${Else} + StrCpy $3 "${__NSD_CheckBox_EXSTYLE}" + IntOp $4 0 * $2 + ${EndIf} + IntOp $5 100 * $2 + IntOp $6 400 * $2 + IntOp $7 25 * $2 + IntOp $4 $4 / 96 + IntOp $5 $5 / 96 + IntOp $6 $6 / 96 + IntOp $7 $7 / 96 + System::Call 'user32::CreateWindowEx(i r3, w "${__NSD_CheckBox_CLASS}", w "$(deleteAppData)", i ${__NSD_CheckBox_STYLE}, i r4, i r5, i r6, i r7, p r1, i0, i0, i0) i .s' + Pop $DeleteAppDataCheckbox + SendMessage $HWNDPARENT ${WM_GETFONT} 0 0 $1 + SendMessage $DeleteAppDataCheckbox ${WM_SETFONT} $1 1 +FunctionEnd +!define MUI_PAGE_CUSTOMFUNCTION_LEAVE un.ConfirmLeave +Function un.ConfirmLeave + SendMessage $DeleteAppDataCheckbox ${BM_GETCHECK} 0 0 $DeleteAppDataCheckboxState +FunctionEnd +!define MUI_PAGE_CUSTOMFUNCTION_PRE un.SkipIfPassive +!insertmacro MUI_UNPAGE_CONFIRM + +; 2. Uninstalling Page +!insertmacro MUI_UNPAGE_INSTFILES + +;Languages +!insertmacro MUI_LANGUAGE "English" +!insertmacro MUI_RESERVEFILE_LANGDLL + !include "D:\a\jan\jan\src-tauri\target\release\nsis\x64\English.nsh" + +Function .onInit + ${GetOptions} $CMDLINE "/P" $PassiveMode + ${IfNot} ${Errors} + StrCpy $PassiveMode 1 + ${EndIf} + ; always run in passive mode + StrCpy $PassiveMode 1 + + ${GetOptions} $CMDLINE "/NS" $NoShortcutMode + ${IfNot} ${Errors} + StrCpy $NoShortcutMode 1 + ${EndIf} + + ${GetOptions} $CMDLINE "/UPDATE" $UpdateMode + ${IfNot} ${Errors} + StrCpy $UpdateMode 1 + ${EndIf} + + !if "${DISPLAYLANGUAGESELECTOR}" == "true" + !insertmacro MUI_LANGDLL_DISPLAY + !endif + + !insertmacro SetContext + + ${If} $INSTDIR == "${PLACEHOLDER_INSTALL_DIR}" + ; Set default install location + !if "${INSTALLMODE}" == "perMachine" + ${If} ${RunningX64} + !if "${ARCH}" == "x64" + StrCpy $INSTDIR "$PROGRAMFILES64\${PRODUCTNAME}" + !else if "${ARCH}" == "arm64" + StrCpy $INSTDIR "$PROGRAMFILES64\${PRODUCTNAME}" + !else + StrCpy $INSTDIR "$PROGRAMFILES\${PRODUCTNAME}" + !endif + ${Else} + StrCpy $INSTDIR "$PROGRAMFILES\${PRODUCTNAME}" + ${EndIf} + !else if "${INSTALLMODE}" == "currentUser" + StrCpy $INSTDIR "$LOCALAPPDATA\Programs\${PRODUCTNAME}" + !endif + + Call RestorePreviousInstallLocation + ${EndIf} + + ; Remove old Jan if it exists + ${If} ${FileExists} "$INSTDIR\LICENSE.electron.txt" + DeleteRegKey HKLM "Software\${PRODUCTNAME}" + RMDir /r "$INSTDIR" + Delete "$INSTDIR\*.*" + ${EndIf} + + !if "${INSTALLMODE}" == "both" + !insertmacro MULTIUSER_INIT + !endif +FunctionEnd + + +Section EarlyChecks + ; Abort silent installer if downgrades is disabled + !if "${ALLOWDOWNGRADES}" == "false" + ${If} ${Silent} + ; If downgrading + ${If} $R0 = -1 + System::Call 'kernel32::AttachConsole(i -1)i.r0' + ${If} $0 <> 0 + System::Call 'kernel32::GetStdHandle(i -11)i.r0' + System::call 'kernel32::SetConsoleTextAttribute(i r0, i 0x0004)' ; set red color + FileWrite $0 "$(silentDowngrades)" + ${EndIf} + Abort + ${EndIf} + ${EndIf} + !endif + +SectionEnd + +Section WebView2 + ; Check if Webview2 is already installed and skip this section + ${If} ${RunningX64} + ReadRegStr $4 HKLM "SOFTWARE\WOW6432Node\Microsoft\EdgeUpdate\Clients\${WEBVIEW2APPGUID}" "pv" + ${Else} + ReadRegStr $4 HKLM "SOFTWARE\Microsoft\EdgeUpdate\Clients\${WEBVIEW2APPGUID}" "pv" + ${EndIf} + ${If} $4 == "" + ReadRegStr $4 HKCU "SOFTWARE\Microsoft\EdgeUpdate\Clients\${WEBVIEW2APPGUID}" "pv" + ${EndIf} + + ${If} $4 == "" + ; Webview2 installation + ; + ; Skip if updating + ${If} $UpdateMode <> 1 + !if "${INSTALLWEBVIEW2MODE}" == "downloadBootstrapper" + Delete "$TEMP\MicrosoftEdgeWebview2Setup.exe" + DetailPrint "$(webview2Downloading)" + NSISdl::download "https://go.microsoft.com/fwlink/p/?LinkId=2124703" "$TEMP\MicrosoftEdgeWebview2Setup.exe" + Pop $0 + ${If} $0 == "success" + DetailPrint "$(webview2DownloadSuccess)" + ${Else} + DetailPrint "$(webview2DownloadError)" + Abort "$(webview2AbortError)" + ${EndIf} + StrCpy $6 "$TEMP\MicrosoftEdgeWebview2Setup.exe" + Goto install_webview2 + !endif + + !if "${INSTALLWEBVIEW2MODE}" == "embedBootstrapper" + Delete "$TEMP\MicrosoftEdgeWebview2Setup.exe" + File "/oname=$TEMP\MicrosoftEdgeWebview2Setup.exe" "${WEBVIEW2BOOTSTRAPPERPATH}" + DetailPrint "$(installingWebview2)" + StrCpy $6 "$TEMP\MicrosoftEdgeWebview2Setup.exe" + Goto install_webview2 + !endif + + !if "${INSTALLWEBVIEW2MODE}" == "offlineInstaller" + Delete "$TEMP\MicrosoftEdgeWebView2RuntimeInstaller.exe" + File "/oname=$TEMP\MicrosoftEdgeWebView2RuntimeInstaller.exe" "${WEBVIEW2INSTALLERPATH}" + DetailPrint "$(installingWebview2)" + StrCpy $6 "$TEMP\MicrosoftEdgeWebView2RuntimeInstaller.exe" + Goto install_webview2 + !endif + + Goto webview2_done + + install_webview2: + DetailPrint "$(installingWebview2)" + ; $6 holds the path to the webview2 installer + ExecWait "$6 ${WEBVIEW2INSTALLERARGS} /install" $1 + ${If} $1 = 0 + DetailPrint "$(webview2InstallSuccess)" + ${Else} + DetailPrint "$(webview2InstallError)" + Abort "$(webview2AbortError)" + ${EndIf} + webview2_done: + ${EndIf} + ${Else} + !if "${MINIMUMWEBVIEW2VERSION}" != "" + ${VersionCompare} "${MINIMUMWEBVIEW2VERSION}" "$4" $R0 + ${If} $R0 = 1 + update_webview: + DetailPrint "$(installingWebview2)" + ${If} ${RunningX64} + ReadRegStr $R1 HKLM "SOFTWARE\WOW6432Node\Microsoft\EdgeUpdate" "path" + ${Else} + ReadRegStr $R1 HKLM "SOFTWARE\Microsoft\EdgeUpdate" "path" + ${EndIf} + ${If} $R1 == "" + ReadRegStr $R1 HKCU "SOFTWARE\Microsoft\EdgeUpdate" "path" + ${EndIf} + ${If} $R1 != "" + ; Chromium updater docs: https://source.chromium.org/chromium/chromium/src/+/main:docs/updater/user_manual.md + ; Modified from "HKEY_LOCAL_MACHINE\SOFTWARE\WOW6432Node\Microsoft\Windows\CurrentVersion\Uninstall\Microsoft EdgeWebView\ModifyPath" + ExecWait `"$R1" /install appguid=${WEBVIEW2APPGUID}&needsadmin=true` $1 + ${If} $1 = 0 + DetailPrint "$(webview2InstallSuccess)" + ${Else} + MessageBox MB_ICONEXCLAMATION|MB_ABORTRETRYIGNORE "$(webview2InstallError)" IDIGNORE ignore IDRETRY update_webview + Quit + ignore: + ${EndIf} + ${EndIf} + ${EndIf} + !endif + ${EndIf} +SectionEnd + +Section Install + SetDetailsPrint none + SetOutPath $INSTDIR + + !ifmacrodef NSIS_HOOK_PREINSTALL + !insertmacro NSIS_HOOK_PREINSTALL + !endif + + !insertmacro CheckIfAppIsRunning + + ; Copy main executable + File "${MAINBINARYSRCPATH}" + + ; Copy resources + CreateDirectory "$INSTDIR\resources\themes\joi-light" + CreateDirectory "$INSTDIR\resources\pre-install" + CreateDirectory "$INSTDIR\resources\themes\night-blue" + CreateDirectory "$INSTDIR\resources\themes\joi-dark" + CreateDirectory "$INSTDIR\binaries" + CreateDirectory "$INSTDIR\resources\themes\dark-dimmed" + SetOutPath "$INSTDIR\binaries" + File /nonfatal /a /r "D:\a\jan\jan\src-tauri\binaries\engines\" + SetOutPath $INSTDIR + File /r "D:\a\jan\jan\src-tauri\binaries\*.dll" + File /a "/oname=resources\pre-install\janhq-assistant-extension-1.0.2.tgz" "D:\a\jan\jan\src-tauri\resources\pre-install\janhq-assistant-extension-1.0.2.tgz" + File /a "/oname=resources\pre-install\janhq-conversational-extension-1.0.0.tgz" "D:\a\jan\jan\src-tauri\resources\pre-install\janhq-conversational-extension-1.0.0.tgz" + File /a "/oname=resources\pre-install\janhq-engine-management-extension-1.0.3.tgz" "D:\a\jan\jan\src-tauri\resources\pre-install\janhq-engine-management-extension-1.0.3.tgz" + File /a "/oname=resources\pre-install\janhq-hardware-management-extension-1.0.0.tgz" "D:\a\jan\jan\src-tauri\resources\pre-install\janhq-hardware-management-extension-1.0.0.tgz" + File /a "/oname=resources\pre-install\janhq-inference-cortex-extension-1.0.25.tgz" "D:\a\jan\jan\src-tauri\resources\pre-install\janhq-inference-cortex-extension-1.0.25.tgz" + File /a "/oname=resources\pre-install\janhq-model-extension-1.0.36.tgz" "D:\a\jan\jan\src-tauri\resources\pre-install\janhq-model-extension-1.0.36.tgz" + File /a "/oname=resources\themes\dark-dimmed\theme.json" "D:\a\jan\jan\src-tauri\resources\themes\dark-dimmed\theme.json" + File /a "/oname=resources\themes\joi-dark\theme.json" "D:\a\jan\jan\src-tauri\resources\themes\joi-dark\theme.json" + File /a "/oname=resources\themes\joi-light\theme.json" "D:\a\jan\jan\src-tauri\resources\themes\joi-light\theme.json" + File /a "/oname=resources\themes\night-blue\theme.json" "D:\a\jan\jan\src-tauri\resources\themes\night-blue\theme.json" + + ; Copy external binaries + File /a "/oname=cortex-server.exe" "D:\a\jan\jan\src-tauri\binaries\cortex-server-x86_64-pc-windows-msvc.exe" + File /a "/oname=bun.exe" "D:\a\jan\jan\src-tauri\resources\bin\bun-x86_64-pc-windows-msvc.exe" + File /a "/oname=uv.exe" "D:\a\jan\jan\src-tauri\resources\bin\uv-x86_64-pc-windows-msvc.exe" + + ; Create file associations + + ; Register deep links + + ; Create uninstaller + WriteUninstaller "$INSTDIR\uninstall.exe" + + ; Save $INSTDIR in registry for future installations + WriteRegStr SHCTX "${MANUPRODUCTKEY}" "" $INSTDIR + + !if "${INSTALLMODE}" == "both" + ; Save install mode to be selected by default for the next installation such as updating + ; or when uninstalling + WriteRegStr SHCTX "${UNINSTKEY}" $MultiUser.InstallMode 1 + !endif + + ; Remove old main binary if it doesn't match new main binary name + ReadRegStr $OldMainBinaryName SHCTX "${UNINSTKEY}" "MainBinaryName" + ${If} $OldMainBinaryName != "" + ${AndIf} $OldMainBinaryName != "${MAINBINARYNAME}.exe" + Delete "$INSTDIR\$OldMainBinaryName" + ${EndIf} + + ; Save current MAINBINARYNAME for future updates + WriteRegStr SHCTX "${UNINSTKEY}" "MainBinaryName" "${MAINBINARYNAME}.exe" + + ; Registry information for add/remove programs + WriteRegStr SHCTX "${UNINSTKEY}" "DisplayName" "${PRODUCTNAME}" + WriteRegStr SHCTX "${UNINSTKEY}" "DisplayIcon" "$\"$INSTDIR\${MAINBINARYNAME}.exe$\"" + WriteRegStr SHCTX "${UNINSTKEY}" "DisplayVersion" "${VERSION}" + WriteRegStr SHCTX "${UNINSTKEY}" "Publisher" "${MANUFACTURER}" + WriteRegStr SHCTX "${UNINSTKEY}" "InstallLocation" "$\"$INSTDIR$\"" + WriteRegStr SHCTX "${UNINSTKEY}" "UninstallString" "$\"$INSTDIR\uninstall.exe$\"" + WriteRegDWORD SHCTX "${UNINSTKEY}" "NoModify" "1" + WriteRegDWORD SHCTX "${UNINSTKEY}" "NoRepair" "1" + + ${GetSize} "$INSTDIR" "/M=uninstall.exe /S=0K /G=0" $0 $1 $2 + IntOp $0 $0 + ${ESTIMATEDSIZE} + IntFmt $0 "0x%08X" $0 + WriteRegDWORD SHCTX "${UNINSTKEY}" "EstimatedSize" "$0" + + !if "${HOMEPAGE}" != "" + WriteRegStr SHCTX "${UNINSTKEY}" "URLInfoAbout" "${HOMEPAGE}" + WriteRegStr SHCTX "${UNINSTKEY}" "URLUpdateInfo" "${HOMEPAGE}" + WriteRegStr SHCTX "${UNINSTKEY}" "HelpLink" "${HOMEPAGE}" + !endif + + ; Create start menu shortcut + !insertmacro MUI_STARTMENU_WRITE_BEGIN Application + Call CreateOrUpdateStartMenuShortcut + !insertmacro MUI_STARTMENU_WRITE_END + + ; Create desktop shortcut for silent and passive installers + ; because finish page will be skipped + ${If} $PassiveMode = 1 + ${OrIf} ${Silent} + Call CreateOrUpdateDesktopShortcut + ${EndIf} + + !ifmacrodef NSIS_HOOK_POSTINSTALL + !insertmacro NSIS_HOOK_POSTINSTALL + !endif + + ; Auto close this page for passive mode + ${If} $PassiveMode = 1 + SetAutoClose true + ${EndIf} +SectionEnd + +Function .onInstSuccess + ; Check for `/R` flag only in silent and passive installers because + ; GUI installer has a toggle for the user to (re)start the app + ${If} $PassiveMode = 1 + ${OrIf} ${Silent} + ; ${GetOptions} $CMDLINE "/R" $R0 + ; ${IfNot} ${Errors} + ${GetOptions} $CMDLINE "/ARGS" $R0 + nsis_tauri_utils::RunAsUser "$INSTDIR\${MAINBINARYNAME}.exe" "$R0" + ; ${EndIf} + ${EndIf} +FunctionEnd + +Function un.onInit + !insertmacro SetContext + + !if "${INSTALLMODE}" == "both" + !insertmacro MULTIUSER_UNINIT + !endif + + !insertmacro MUI_UNGETLANGUAGE + + ${GetOptions} $CMDLINE "/P" $PassiveMode + ${IfNot} ${Errors} + StrCpy $PassiveMode 1 + ${EndIf} + + ${GetOptions} $CMDLINE "/UPDATE" $UpdateMode + ${IfNot} ${Errors} + StrCpy $UpdateMode 1 + ${EndIf} +FunctionEnd + +Section Uninstall + SetDetailsPrint none + + !ifmacrodef NSIS_HOOK_PREUNINSTALL + !insertmacro NSIS_HOOK_PREUNINSTALL + !endif + + !insertmacro CheckIfAppIsRunning + + ; Delete the app directory and its content from disk + ; Copy main executable + Delete "$INSTDIR\${MAINBINARYNAME}.exe" + + ; Delete resources + Delete "$INSTDIR\resources\pre-install\janhq-assistant-extension-1.0.2.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-conversational-extension-1.0.0.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-engine-management-extension-1.0.3.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-hardware-management-extension-1.0.0.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-inference-cortex-extension-1.0.25.tgz" + Delete "$INSTDIR\resources\pre-install\janhq-model-extension-1.0.36.tgz" + Delete "$INSTDIR\resources\themes\dark-dimmed\theme.json" + Delete "$INSTDIR\resources\themes\joi-dark\theme.json" + Delete "$INSTDIR\resources\themes\joi-light\theme.json" + Delete "$INSTDIR\resources\themes\night-blue\theme.json" + + ; Delete external binaries + Delete "$INSTDIR\cortex-server.exe" + Delete "$INSTDIR\bun.exe" + Delete "$INSTDIR\uv.exe" + + ; Delete app associations + + ; Delete deep links + + + ; Delete uninstaller + Delete "$INSTDIR\uninstall.exe" + + RMDir /REBOOTOK "$INSTDIR\resources\themes\dark-dimmed" + RMDir /REBOOTOK "$INSTDIR\resources\themes\joi-dark" + RMDir /REBOOTOK "$INSTDIR\resources\themes\joi-light" + RMDir /REBOOTOK "$INSTDIR\resources\themes\night-blue" + RMDir /r /REBOOTOK "$INSTDIR\binaries\engines" + RMDir /REBOOTOK "$INSTDIR\resources\pre-install" + RMDir /REBOOTOK "$INSTDIR\resources\themes" + RMDir /r /REBOOTOK "$INSTDIR\binaries" + RMDir /REBOOTOK "$INSTDIR\resources" + RMDir "$INSTDIR" + + ; Remove shortcuts if not updating + ${If} $UpdateMode <> 1 + !insertmacro DeleteAppUserModelId + + ; Remove start menu shortcut + !insertmacro MUI_STARTMENU_GETFOLDER Application $AppStartMenuFolder + !insertmacro IsShortcutTarget "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + Pop $0 + ${If} $0 = 1 + !insertmacro UnpinShortcut "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" + Delete "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" + RMDir "$SMPROGRAMS\$AppStartMenuFolder" + ${EndIf} + !insertmacro IsShortcutTarget "$SMPROGRAMS\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + Pop $0 + ${If} $0 = 1 + !insertmacro UnpinShortcut "$SMPROGRAMS\${PRODUCTNAME}.lnk" + Delete "$SMPROGRAMS\${PRODUCTNAME}.lnk" + ${EndIf} + + ; Remove desktop shortcuts + !insertmacro IsShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + Pop $0 + ${If} $0 = 1 + !insertmacro UnpinShortcut "$DESKTOP\${PRODUCTNAME}.lnk" + Delete "$DESKTOP\${PRODUCTNAME}.lnk" + ${EndIf} + ${EndIf} + + ; Remove registry information for add/remove programs + !if "${INSTALLMODE}" == "both" + DeleteRegKey SHCTX "${UNINSTKEY}" + !else if "${INSTALLMODE}" == "perMachine" + DeleteRegKey HKLM "${UNINSTKEY}" + !else + DeleteRegKey HKCU "${UNINSTKEY}" + !endif + + ; Removes the Autostart entry for ${PRODUCTNAME} from the HKCU Run key if it exists. + ; This ensures the program does not launch automatically after uninstallation if it exists. + ; If it doesn't exist, it does nothing. + ; We do this when not updating (to preserve the registry value on updates) + ${If} $UpdateMode <> 1 + DeleteRegValue HKCU "Software\Microsoft\Windows\CurrentVersion\Run" "${PRODUCTNAME}" + ${EndIf} + + ; Delete app data if the checkbox is selected + ; and if not updating + ${If} $DeleteAppDataCheckboxState = 1 + ${AndIf} $UpdateMode <> 1 + ; Clear the install location $INSTDIR from registry + DeleteRegKey SHCTX "${MANUPRODUCTKEY}" + DeleteRegKey /ifempty SHCTX "${MANUKEY}" + + ; Clear the install language from registry + DeleteRegValue HKCU "${MANUPRODUCTKEY}" "Installer Language" + DeleteRegKey /ifempty HKCU "${MANUPRODUCTKEY}" + DeleteRegKey /ifempty HKCU "${MANUKEY}" + + SetShellVarContext current + RmDir /r "$APPDATA\${BUNDLEID}" + RmDir /r "$LOCALAPPDATA\${BUNDLEID}" + ${EndIf} + + !ifmacrodef NSIS_HOOK_POSTUNINSTALL + !insertmacro NSIS_HOOK_POSTUNINSTALL + !endif + + ; Auto close if passive mode or updating + ${If} $PassiveMode = 1 + ${OrIf} $UpdateMode = 1 + SetAutoClose true + ${EndIf} +SectionEnd + +Function RestorePreviousInstallLocation + ReadRegStr $4 SHCTX "${MANUPRODUCTKEY}" "" + StrCmp $4 "" +2 0 + StrCpy $INSTDIR $4 +FunctionEnd + +Function Skip + Abort +FunctionEnd + +Function SkipIfPassive + ${IfThen} $PassiveMode = 1 ${|} Abort ${|} +FunctionEnd +Function un.SkipIfPassive + ${IfThen} $PassiveMode = 1 ${|} Abort ${|} +FunctionEnd + +Function CreateOrUpdateStartMenuShortcut + ; We used to use product name as MAINBINARYNAME + ; migrate old shortcuts to target the new MAINBINARYNAME + StrCpy $R0 0 + + !insertmacro IsShortcutTarget "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" "$INSTDIR\$OldMainBinaryName" + Pop $0 + ${If} $0 = 1 + !insertmacro SetShortcutTarget "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + StrCpy $R0 1 + ${EndIf} + + !insertmacro IsShortcutTarget "$SMPROGRAMS\${PRODUCTNAME}.lnk" "$INSTDIR\$OldMainBinaryName" + Pop $0 + ${If} $0 = 1 + !insertmacro SetShortcutTarget "$SMPROGRAMS\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + StrCpy $R0 1 + ${EndIf} + + ${If} $R0 = 1 + Return + ${EndIf} + + ; Skip creating shortcut if in update mode or no shortcut mode + ; but always create if migrating from wix + ${If} $WixMode = 0 + ${If} $UpdateMode = 1 + ${OrIf} $NoShortcutMode = 1 + Return + ${EndIf} + ${EndIf} + + !if "${STARTMENUFOLDER}" != "" + CreateDirectory "$SMPROGRAMS\$AppStartMenuFolder" + CreateShortcut "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + !insertmacro SetLnkAppUserModelId "$SMPROGRAMS\$AppStartMenuFolder\${PRODUCTNAME}.lnk" + !else + CreateShortcut "$SMPROGRAMS\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + !insertmacro SetLnkAppUserModelId "$SMPROGRAMS\${PRODUCTNAME}.lnk" + !endif +FunctionEnd + +Function CreateOrUpdateDesktopShortcut + ; We used to use product name as MAINBINARYNAME + ; migrate old shortcuts to target the new MAINBINARYNAME + !insertmacro IsShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\$OldMainBinaryName" + Pop $0 + ${If} $0 = 1 + !insertmacro SetShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + Return + ${EndIf} + + ; Skip creating shortcut if in update mode or no shortcut mode + ; but always create if migrating from wix + ${If} $WixMode = 0 + ${If} $UpdateMode = 1 + ${OrIf} $NoShortcutMode = 1 + Return + ${EndIf} + ${EndIf} + + CreateShortcut "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe" + !insertmacro SetLnkAppUserModelId "$DESKTOP\${PRODUCTNAME}.lnk" +FunctionEnd diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json new file mode 100644 index 000000000..35e7b1fa5 --- /dev/null +++ b/src-tauri/tauri.conf.json @@ -0,0 +1,111 @@ +{ + "$schema": "https://schema.tauri.app/config/2", + "productName": "Jan", + "version": "0.5.16", + "identifier": "jan.ai.app", + "build": { + "frontendDist": "../web/out", + "devUrl": "http://localhost:3000", + "beforeDevCommand": "cross-env IS_TAURI=true yarn dev:web", + "beforeBuildCommand": "cross-env IS_TAURI=true yarn build:web" + }, + "app": { + "macOSPrivateApi": true, + "windows": [ + { + "title": "Jan", + "width": 1024, + "height": 768, + "resizable": true, + "fullscreen": false, + "hiddenTitle": true, + "transparent": true, + "titleBarStyle": "Overlay", + "windowEffects": { + "effects": [ + "fullScreenUI", + "mica", + "blur", + "acrylic" + ], + "state": "active" + } + } + ], + "security": { + "csp": { + "default-src": "'self' customprotocol: asset: http://localhost:* http://127.0.0.1:* ws://localhost:* ws://127.0.0.1:*", + "connect-src": "ipc: http://ipc.localhost http://127.0.0.1:* ws://localhost:* ws://127.0.0.1:* https://registry.npmjs.org", + "font-src": [ + "https://fonts.gstatic.com blob: data:" + ], + "img-src": "'self' asset: http://asset.localhost blob: data:", + "style-src": "'unsafe-inline' 'self' https://fonts.googleapis.com", + "script-src": "'self' asset: $APPDATA/**.* http://asset.localhost" + }, + "assetProtocol": { + "enable": true, + "scope": { + "requireLiteralLeadingDot": false, + "allow": [ + "**/*" + ] + } + } + } + }, + "plugins": { + "updater": { + "pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IDJFNDEzMEVCMUEzNUFENDQKUldSRXJUVWE2ekJCTGc1Mm1BVXgrWmtES3huUlBFR0lCdG5qbWFvMzgyNDhGN3VTTko5Q1NtTW0K", + "endpoints": [ + "https://github.com/menloresearch/jan/releases/latest/download/latest.json" + ], + "windows": { + "installMode": "passive" + } + } + }, + "bundle": { + "active": true, + "targets": [ + "nsis", + "app", + "dmg", + "deb", + "appimage" + ], + "createUpdaterArtifacts": false, + "icon": [ + "icons/32x32.png", + "icons/128x128.png", + "icons/128x128@2x.png", + "icons/icon.icns", + "icons/icon.ico" + ], + "resources": [ + "binaries/engines/**/*", + "resources/themes/**/*", + "resources/pre-install/**/*" + ], + "externalBin": [ + "binaries/cortex-server", + "resources/bin/bun", + "resources/bin/uv" + ], + "linux": { + "appimage": { + "bundleMediaFramework": false, + "files": {} + }, + "deb": { + "files": { + "usr/bin/bun": "resources/bin/bun", + "usr/lib/Jan/binaries/engines": "binaries/engines" + } + } + }, + "windows": { + "signCommand": "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1" + } + } +} \ No newline at end of file diff --git a/web/containers/Layout/BottomPanel/SystemMonitor/index.tsx b/web/containers/Layout/BottomPanel/SystemMonitor/index.tsx index d9a0b289a..05ec643fe 100644 --- a/web/containers/Layout/BottomPanel/SystemMonitor/index.tsx +++ b/web/containers/Layout/BottomPanel/SystemMonitor/index.tsx @@ -91,7 +91,7 @@ const SystemMonitor = () => {
onRevealInFinder('Logs')} + onClick={() => onRevealInFinder('logs')} > App Log
diff --git a/web/containers/Layout/TopPanel/index.tsx b/web/containers/Layout/TopPanel/index.tsx index a00c2d22c..386c315de 100644 --- a/web/containers/Layout/TopPanel/index.tsx +++ b/web/containers/Layout/TopPanel/index.tsx @@ -70,9 +70,13 @@ const TopPanel = () => { reduceTransparent && 'border-b border-[hsla(var(--app-border))] bg-[hsla(var(--top-panel-bg))]' )} + data-tauri-drag-region > {!isMac && } -
+
{!isMac && ( + + + + + + + +
+
+ } + /> + ) + }, [isOpen, modalProps]) + return { showModal, PromiseModal } +} diff --git a/web/extension/ExtensionManager.ts b/web/extension/ExtensionManager.ts index 811126f85..f0e613f0d 100644 --- a/web/extension/ExtensionManager.ts +++ b/web/extension/ExtensionManager.ts @@ -2,8 +2,9 @@ import { AIEngine, BaseExtension, ExtensionTypeEnum } from '@janhq/core' -import Extension from './Extension' +import { convertFileSrc } from '@tauri-apps/api/core' +import Extension from './Extension' /** * Manages the registration and retrieval of extensions. */ @@ -123,33 +124,30 @@ export class ExtensionManager { */ async activateExtension(extension: Extension) { // Import class - const extensionUrl = window.electronAPI - ? extension.url - : extension.url.replace( - 'extension://', - `${window.core?.api?.baseApiUrl ?? ''}/extensions/` - ) - await import(/* webpackIgnore: true */ extensionUrl).then( - (extensionClass) => { - // Register class if it has a default export - if ( - typeof extensionClass.default === 'function' && - extensionClass.default.prototype - ) { - this.register( + const extensionUrl = extension.url + await import( + /* webpackIgnore: true */ IS_TAURI + ? convertFileSrc(extensionUrl) + : extensionUrl + ).then((extensionClass) => { + // Register class if it has a default export + if ( + typeof extensionClass.default === 'function' && + extensionClass.default.prototype + ) { + this.register( + extension.name, + new extensionClass.default( + extension.url, extension.name, - new extensionClass.default( - extension.url, - extension.name, - extension.productName, - extension.active, - extension.description, - extension.version - ) + extension.productName, + extension.active, + extension.description, + extension.version ) - } + ) } - ) + }) } /** @@ -158,7 +156,7 @@ export class ExtensionManager { */ async registerActive() { // Get active extensions - const activeExtensions = await this.getActive() + const activeExtensions = (await this.getActive()) ?? [] // Activate all await Promise.all( activeExtensions.map((ext: Extension) => this.activateExtension(ext)) diff --git a/web/helpers/atoms/ChatMessage.atom.ts b/web/helpers/atoms/ChatMessage.atom.ts index 1847aa422..faae6e298 100644 --- a/web/helpers/atoms/ChatMessage.atom.ts +++ b/web/helpers/atoms/ChatMessage.atom.ts @@ -165,6 +165,7 @@ export const updateMessageAtom = atom( id: string, conversationId: string, text: ThreadContent[], + metadata: Record | undefined, status: MessageStatus ) => { const messages = get(chatMessages)[conversationId] ?? [] @@ -172,6 +173,7 @@ export const updateMessageAtom = atom( if (message) { message.content = text message.status = status + message.metadata = metadata const updatedMessages = [...messages] const newData: Record = { @@ -192,6 +194,7 @@ export const updateMessageAtom = atom( created_at: Date.now() / 1000, completed_at: Date.now() / 1000, object: 'thread.message', + metadata: metadata, }) } } diff --git a/web/helpers/atoms/Setting.atom.ts b/web/helpers/atoms/Setting.atom.ts index d8ad38a6d..914e7ddd1 100644 --- a/web/helpers/atoms/Setting.atom.ts +++ b/web/helpers/atoms/Setting.atom.ts @@ -24,7 +24,7 @@ export const themesOptionsAtom = atomWithStorage< export const selectedThemeIdAtom = atomWithStorage( THEME, - '', + 'joi-light', undefined, { getOnInit: true } ) @@ -36,7 +36,7 @@ export const themeDataAtom = atomWithStorage( ) export const reduceTransparentAtom = atomWithStorage( REDUCE_TRANSPARENT, - false, + true, undefined, { getOnInit: true } ) diff --git a/web/helpers/atoms/Thread.atom.ts b/web/helpers/atoms/Thread.atom.ts index 474dadeba..578ec6ac6 100644 --- a/web/helpers/atoms/Thread.atom.ts +++ b/web/helpers/atoms/Thread.atom.ts @@ -24,6 +24,7 @@ enum ThreadStorageAtomKeys { ThreadStates = 'threadStates', ThreadList = 'threadList', ThreadListReady = 'threadListReady', + DisabledTools = 'disabledTools', } //// Threads Atom @@ -72,6 +73,19 @@ export const threadDataReadyAtom = atomWithStorage( */ export const threadModelParamsAtom = atom>({}) +/** + * Store the tool call approval for thread id + */ +export const approvedThreadToolsAtom = atom>({}) + +/** + * Store the tool call disabled for thread id + */ +export const disabledThreadToolsAtom = atomWithStorage( + ThreadStorageAtomKeys.DisabledTools, + [] +) + //// End Thread Atom /// Active Thread Atom diff --git a/web/hooks/useActiveModel.ts b/web/hooks/useActiveModel.ts index b4b8a5033..0031c13aa 100644 --- a/web/hooks/useActiveModel.ts +++ b/web/hooks/useActiveModel.ts @@ -157,10 +157,13 @@ export function useActiveModel() { stopModel() return } - if (!activeModel) return + // if (!activeModel) return - const engine = EngineManager.instance().get(InferenceEngine.cortex) - engine?.stopInference() + // const engine = EngineManager.instance().get(InferenceEngine.cortex) + // engine?.stopInference() + // NOTE: this only works correctly if there is only 1 concurrent request + // at any point in time, which is a reasonable assumption to have. + EngineManager.instance().controller?.abort() }, [activeModel, stateModel, stopModel]) return { activeModel, startModel, stopModel, stopInference, stateModel } diff --git a/web/hooks/useCreateNewThread.ts b/web/hooks/useCreateNewThread.ts index 57ceeb385..423ce4151 100644 --- a/web/hooks/useCreateNewThread.ts +++ b/web/hooks/useCreateNewThread.ts @@ -59,21 +59,6 @@ export const useCreateNewThread = () => { ) => { const defaultModel = model || selectedModel || recommendedModel - if (!model) { - // if we have model, which means user wants to create new thread from Model hub. Allow them. - - // check last thread message, if there empty last message use can not create thread - const lastMessage = threads[0]?.metadata?.lastMessage - - if (!lastMessage && threads.length) { - return toaster({ - title: 'No new thread created.', - description: `To avoid piling up empty threads, please reuse previous one before creating new.`, - type: 'warning', - }) - } - } - // modify assistant tools when experimental on, retieval toggle enabled in default const assistantTools: AssistantTool = { type: 'retrieval', @@ -146,7 +131,7 @@ export const useCreateNewThread = () => { } catch (ex) { return toaster({ title: 'Thread created failed.', - description: `To avoid piling up empty threads, please reuse previous one before creating new.`, + description: `Could not create a new thread. Please try again.`, type: 'error', }) } @@ -180,7 +165,7 @@ export const useCreateNewThread = () => { updateThreadCallback(thread) if (thread.assistants && thread.assistants?.length > 0) { setActiveAssistant(thread.assistants[0]) - updateAssistantCallback(thread.id, thread.assistants[0]) + return updateAssistantCallback(thread.id, thread.assistants[0]) } }, [ diff --git a/web/hooks/useDeleteThread.test.ts b/web/hooks/useDeleteThread.test.ts index 8d616cb42..bf53589ea 100644 --- a/web/hooks/useDeleteThread.test.ts +++ b/web/hooks/useDeleteThread.test.ts @@ -9,8 +9,6 @@ import { extensionManager } from '@/extension/ExtensionManager' import { useCreateNewThread } from './useCreateNewThread' import { Thread } from '@janhq/core/dist/types/types' import { currentPromptAtom } from '@/containers/Providers/Jotai' -import { setActiveThreadIdAtom, deleteThreadStateAtom } from '@/helpers/atoms/Thread.atom' -import { deleteChatMessageAtom as deleteChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom' // Mock the necessary dependencies // Mock dependencies jest.mock('jotai', () => ({ @@ -44,6 +42,7 @@ describe('useDeleteThread', () => { extensionManager.get = jest.fn().mockReturnValue({ deleteThread: mockDeleteThread, + getThreadAssistant: jest.fn().mockResolvedValue({}), }) const { result } = renderHook(() => useDeleteThread()) diff --git a/web/hooks/useDeleteThread.ts b/web/hooks/useDeleteThread.ts index 59aa3a83b..bdb82a268 100644 --- a/web/hooks/useDeleteThread.ts +++ b/web/hooks/useDeleteThread.ts @@ -38,12 +38,13 @@ export default function useDeleteThread() { ?.listMessages(threadId) .catch(console.error) if (messages) { - messages.forEach((message) => { - extensionManager + for (const message of messages) { + await extensionManager .get(ExtensionTypeEnum.Conversational) ?.deleteMessage(threadId, message.id) .catch(console.error) - }) + } + const thread = threads.find((e) => e.id === threadId) if (thread) { const updatedThread = { diff --git a/web/hooks/useEngineManagement.ts b/web/hooks/useEngineManagement.ts index d9eacb592..8c19737ac 100644 --- a/web/hooks/useEngineManagement.ts +++ b/web/hooks/useEngineManagement.ts @@ -1,3 +1,4 @@ +import 'openai/shims/web' import { useCallback, useMemo, useState } from 'react' import { @@ -18,11 +19,66 @@ import { useAtom, useAtomValue } from 'jotai' import { atomWithStorage } from 'jotai/utils' import useSWR from 'swr' +import { models, TokenJS } from 'token.js' +import { LLMProvider } from 'token.js/dist/chat' + import { getDescriptionByEngine, getTitleByEngine } from '@/utils/modelEngine' import { extensionManager } from '@/extension/ExtensionManager' import { downloadedModelsAtom } from '@/helpers/atoms/Model.atom' +export const builtInEngines = [ + 'openai', + 'ai21', + 'anthropic', + 'gemini', + 'cohere', + 'bedrock', + 'mistral', + 'groq', + 'perplexity', + 'openrouter', + 'openai-compatible', +] + +export const convertBuiltInEngine = (engine?: string): LLMProvider => { + const engineName = normalizeBuiltInEngineName(engine) ?? '' + return ( + builtInEngines.includes(engineName) ? engineName : 'openai-compatible' + ) as LLMProvider +} + +export const normalizeBuiltInEngineName = ( + engine?: string +): string | undefined => { + return engine === ('google_gemini' as InferenceEngine) ? 'gemini' : engine +} + +export const extendBuiltInEngineModels = ( + tokenJS: TokenJS, + provider: LLMProvider, + model?: string +) => { + if (provider !== 'openrouter' && provider !== 'openai-compatible' && model) { + if ( + provider in Object.keys(models) && + (models[provider].models as unknown as string[]).includes(model) + ) { + return + } + + try { + // @ts-expect-error Unknown extendModelList provider type + tokenJS.extendModelList(provider, model, { + streaming: true, + toolCalls: true, + }) + } catch (error) { + console.error('Failed to extend model list:', error) + } + } +} + export const releasedEnginesCacheAtom = atomWithStorage<{ data: EngineReleased[] timestamp: number diff --git a/web/hooks/useFactoryReset.test.ts b/web/hooks/useFactoryReset.test.ts index a5b5844bc..c66ecce20 100644 --- a/web/hooks/useFactoryReset.test.ts +++ b/web/hooks/useFactoryReset.test.ts @@ -20,9 +20,7 @@ jest.mock('@janhq/core', () => ({ EngineManager: { instance: jest.fn().mockReturnValue({ get: jest.fn(), - engines: { - values: jest.fn().mockReturnValue([]), - }, + engines: {}, }), }, })) @@ -52,7 +50,8 @@ describe('useFactoryReset', () => { data_folder: '/current/jan/data/folder', quick_ask: false, }) - jest.spyOn(global, 'setTimeout') + // @ts-ignore + jest.spyOn(global, 'setTimeout').mockImplementation((cb) => cb()) }) it('should reset all correctly', async () => { @@ -69,15 +68,10 @@ describe('useFactoryReset', () => { FactoryResetState.StoppingModel ) expect(mockStopModel).toHaveBeenCalled() - expect(setTimeout).toHaveBeenCalledWith(expect.any(Function), 4000) expect(mockSetFactoryResetState).toHaveBeenCalledWith( FactoryResetState.DeletingData ) - expect(fs.rm).toHaveBeenCalledWith('/current/jan/data/folder') - expect(mockUpdateAppConfiguration).toHaveBeenCalledWith({ - data_folder: '/default/jan/data/folder', - quick_ask: false, - }) + expect(fs.rm).toHaveBeenCalledWith({ args: ['/current/jan/data/folder'] }) expect(mockSetFactoryResetState).toHaveBeenCalledWith( FactoryResetState.ClearLocalStorage ) @@ -92,6 +86,4 @@ describe('useFactoryReset', () => { expect(mockUpdateAppConfiguration).not.toHaveBeenCalled() }) - - // Add more tests as needed for error cases, edge cases, etc. }) diff --git a/web/hooks/useFactoryReset.ts b/web/hooks/useFactoryReset.ts index 7344b2eb1..c582cf685 100644 --- a/web/hooks/useFactoryReset.ts +++ b/web/hooks/useFactoryReset.ts @@ -39,11 +39,9 @@ export default function useFactoryReset() { await stopModel() await Promise.all( - EngineManager.instance() - .engines.values() - .map(async (engine) => { - await engine.onUnload() - }) + Object.values(EngineManager.instance().engines).map(async (engine) => { + await engine.onUnload() + }) ) await new Promise((resolve) => setTimeout(resolve, 4000)) @@ -60,20 +58,21 @@ export default function useFactoryReset() { quick_ask: appConfiguration?.quick_ask ?? false, distinct_id: appConfiguration?.distinct_id, } - await window.core?.api?.updateAppConfiguration(configuration) + await window.core?.api?.updateAppConfiguration({ configuration }) } + await window.core?.api?.installExtensions() + // Perform factory reset - await window.core?.api?.factoryReset() + // await window.core?.api?.factoryReset() // 4: Clear app local storage setFactoryResetState(FactoryResetState.ClearLocalStorage) // reset the localStorage localStorage.clear() - window.core = undefined // 5: Relaunch the app - window.location.reload() + window.core.api.relaunch() }, [defaultJanDataFolder, stopModel, setFactoryResetState] ) diff --git a/web/hooks/useLoadTheme.test.ts b/web/hooks/useLoadTheme.test.ts index 8d352a52c..c378fb2e6 100644 --- a/web/hooks/useLoadTheme.test.ts +++ b/web/hooks/useLoadTheme.test.ts @@ -4,7 +4,6 @@ import { fs, joinPath } from '@janhq/core' import { useAtom, useAtomValue, useSetAtom } from 'jotai' import { useLoadTheme } from './useLoadTheme' -import { janDataFolderPathAtom } from '@/helpers/atoms/AppConfig.atom' import { selectedThemeIdAtom, themeDataAtom, @@ -27,8 +26,6 @@ describe('useLoadTheme', () => { jest.clearAllMocks() }) - const mockJanDataFolderPath = '/mock/path' - const mockThemesPath = '/mock/path/themes' const mockSelectedThemeId = 'joi-light' const mockThemeData = { id: 'joi-light', @@ -40,11 +37,17 @@ describe('useLoadTheme', () => { } it('should load theme and set variables', async () => { + const readTheme = jest.fn().mockResolvedValue("{}") + + global.window.core = { + api: { + getThemes: () => ['joi-light', 'joi-dark'], + readTheme, + }, + } // Mock Jotai hooks ;(useAtomValue as jest.Mock).mockImplementation((atom) => { switch (atom) { - case janDataFolderPathAtom: - return mockJanDataFolderPath default: return undefined } @@ -72,15 +75,6 @@ describe('useLoadTheme', () => { const mockSetTheme = jest.fn() ;(useTheme as jest.Mock).mockReturnValue({ setTheme: mockSetTheme }) - // Mock window.electronAPI - Object.defineProperty(window, 'electronAPI', { - value: { - setNativeThemeLight: jest.fn(), - setNativeThemeDark: jest.fn(), - }, - writable: true, - }) - const { result } = renderHook(() => useLoadTheme()) await act(async () => { @@ -88,18 +82,11 @@ describe('useLoadTheme', () => { }) // Assertions - expect(fs.readdirSync).toHaveBeenCalledWith(mockThemesPath) - expect(fs.readFileSync).toHaveBeenCalledWith( - `${mockThemesPath}/${mockSelectedThemeId}/theme.json`, - 'utf-8' - ) - expect(mockSetTheme).toHaveBeenCalledWith('light') - expect(window.electronAPI.setNativeThemeLight).toHaveBeenCalled() + expect(readTheme).toHaveBeenLastCalledWith({ themeName: 'joi-light' }) }) it('should set default theme if no selected theme', async () => { // Mock Jotai hooks with empty selected theme - ;(useAtomValue as jest.Mock).mockReturnValue(mockJanDataFolderPath) ;(useSetAtom as jest.Mock).mockReturnValue(jest.fn()) ;(useAtom as jest.Mock).mockReturnValue(['', jest.fn()]) ;(useAtom as jest.Mock).mockReturnValue([{}, jest.fn()]) diff --git a/web/hooks/useLoadTheme.ts b/web/hooks/useLoadTheme.ts index cbeb9e644..7631ee235 100644 --- a/web/hooks/useLoadTheme.ts +++ b/web/hooks/useLoadTheme.ts @@ -2,13 +2,10 @@ import { useCallback, useEffect } from 'react' import { useTheme } from 'next-themes' -import { fs, joinPath } from '@janhq/core' - -import { useAtom, useAtomValue } from 'jotai' +import { useAtom } from 'jotai' import cssVars from '@/utils/jsonToCssVariables' -import { janDataFolderPathAtom } from '@/helpers/atoms/AppConfig.atom' import { selectedThemeIdAtom, themeDataAtom, @@ -18,7 +15,6 @@ import { type NativeThemeProps = 'light' | 'dark' export const useLoadTheme = () => { - const janDataFolderPath = useAtomValue(janDataFolderPathAtom) const [themeOptions, setThemeOptions] = useAtom(themesOptionsAtom) const [themeData, setThemeData] = useAtom(themeDataAtom) const [selectedIdTheme, setSelectedIdTheme] = useAtom(selectedThemeIdAtom) @@ -26,12 +22,14 @@ export const useLoadTheme = () => { const setNativeTheme = useCallback( (nativeTheme: NativeThemeProps) => { + if (!window.electronAPI) return + if (nativeTheme === 'dark') { - window?.electronAPI?.setNativeThemeDark() + window?.core?.api?.setNativeThemeDark() setTheme('dark') localStorage.setItem('nativeTheme', 'dark') } else { - window?.electronAPI?.setNativeThemeLight() + window?.core?.api?.setNativeThemeLight() setTheme('light') localStorage.setItem('nativeTheme', 'light') } @@ -40,6 +38,7 @@ export const useLoadTheme = () => { ) const applyTheme = (theme: Theme) => { + if (!theme.variables) return const variables = cssVars(theme.variables) const headTag = document.getElementsByTagName('head')[0] const styleTag = document.createElement('style') @@ -48,45 +47,32 @@ export const useLoadTheme = () => { } const getThemes = useCallback(async () => { - if (!janDataFolderPath.length) return - const folderPath = await joinPath([janDataFolderPath, 'themes']) - const installedThemes = await fs.readdirSync(folderPath) + const installedThemes = await window.core.api.getThemes() - const themesOptions: { name: string; value: string }[] = installedThemes - .filter((x: string) => x !== '.DS_Store') - .map(async (x: string) => { - const y = await joinPath([`${folderPath}/${x}`, `theme.json`]) - const c: Theme = JSON.parse(await fs.readFileSync(y, 'utf-8')) - return { name: c?.displayName, value: c.id } + const themesOptions: { name: string; value: string }[] = + installedThemes.map((x: string) => ({ + name: x + .replace(/-/g, ' ') + .replace(/\b\w/g, (char) => char.toUpperCase()), + value: x, + })) + setThemeOptions(themesOptions) + + if (!selectedIdTheme.length) return setSelectedIdTheme('joi-light') + const theme: Theme = JSON.parse( + await window.core.api.readTheme({ + themeName: selectedIdTheme, }) - Promise.all(themesOptions).then((results) => { - setThemeOptions(results) - }) + ) - if (janDataFolderPath.length > 0) { - if (!selectedIdTheme.length) return setSelectedIdTheme('joi-light') - const filePath = await joinPath([ - `${folderPath}/${selectedIdTheme}`, - `theme.json`, - ]) - const theme: Theme = JSON.parse(await fs.readFileSync(filePath, 'utf-8')) - - setThemeData(theme) - setNativeTheme(theme.nativeTheme) - applyTheme(theme) - } - }, [ - janDataFolderPath, - selectedIdTheme, - setNativeTheme, - setSelectedIdTheme, - setThemeData, - setThemeOptions, - ]) + setThemeData(theme) + setNativeTheme(theme.nativeTheme) + applyTheme(theme) + }, [selectedIdTheme]) const configureTheme = useCallback(async () => { if (!themeData || !themeOptions) { - await getThemes() + getThemes() } else { applyTheme(themeData) } @@ -95,11 +81,9 @@ export const useLoadTheme = () => { useEffect(() => { configureTheme() - }, [ - configureTheme, - selectedIdTheme, - setNativeTheme, - setSelectedIdTheme, - themeData?.nativeTheme, - ]) + }, [themeData]) + + useEffect(() => { + getThemes() + }, []) } diff --git a/web/hooks/useModels.ts b/web/hooks/useModels.ts index ec03e2158..3f11ba58c 100644 --- a/web/hooks/useModels.ts +++ b/web/hooks/useModels.ts @@ -42,10 +42,9 @@ const useModels = () => { })) .filter((e) => !('status' in e) || e.status !== 'downloadable') - const remoteModels = ModelManager.instance() - .models.values() - .toArray() - .filter((e) => e.engine !== InferenceEngine.cortex_llamacpp) + const remoteModels = Array.from( + ModelManager.instance().models.values() + ).filter((e) => e.engine !== InferenceEngine.cortex_llamacpp) const toUpdate = [ ...localModels, ...remoteModels.filter( @@ -70,7 +69,7 @@ const useModels = () => { } const getExtensionModels = () => { - const models = ModelManager.instance().models.values().toArray() + const models = Array.from(ModelManager.instance().models.values()) setExtensionModels(models) } // Fetch all data @@ -81,7 +80,7 @@ const useModels = () => { const reloadData = useDebouncedCallback(() => getData(), 300) const updateStates = useCallback(() => { - const cachedModels = ModelManager.instance().models.values().toArray() + const cachedModels = Array.from(ModelManager.instance().models.values()) setDownloadedModels((downloadedModels) => [ ...downloadedModels, ...cachedModels.filter( diff --git a/web/hooks/usePath.ts b/web/hooks/usePath.ts index 464ff0b58..6ff1a73bd 100644 --- a/web/hooks/usePath.ts +++ b/web/hooks/usePath.ts @@ -4,45 +4,14 @@ import { useAtomValue } from 'jotai' import { getFileInfo } from '@/utils/file' import { janDataFolderPathAtom } from '@/helpers/atoms/AppConfig.atom' -import { activeAssistantAtom } from '@/helpers/atoms/Assistant.atom' -import { selectedModelAtom } from '@/helpers/atoms/Model.atom' import { activeThreadAtom } from '@/helpers/atoms/Thread.atom' export const usePath = () => { const janDataFolderPath = useAtomValue(janDataFolderPathAtom) const activeThread = useAtomValue(activeThreadAtom) - const selectedModel = useAtomValue(selectedModelAtom) - const activeAssistant = useAtomValue(activeAssistantAtom) - const onRevealInFinder = async (type: string) => { - // TODO: this logic should be refactored. - if (type !== 'Model' && !activeThread) return - - let filePath = undefined - const assistantId = activeAssistant?.assistant_id - switch (type) { - case 'Engine': - case 'Thread': - filePath = await joinPath(['threads', activeThread?.id ?? '']) - break - case 'Model': - if (!selectedModel) return - filePath = await joinPath(['models', selectedModel.id]) - break - case 'Tools': - case 'Assistant': - if (!assistantId) return - filePath = await joinPath(['assistants', assistantId]) - break - case 'Logs': - filePath = 'logs' - break - default: - break - } - - if (!filePath) return - const fullPath = await joinPath([janDataFolderPath, filePath]) + const onRevealInFinder = async (path: string) => { + const fullPath = await joinPath([janDataFolderPath, path]) openFileExplorer(fullPath) } diff --git a/web/hooks/useSendChatMessage.ts b/web/hooks/useSendChatMessage.ts index 65124fcab..3e6131fac 100644 --- a/web/hooks/useSendChatMessage.ts +++ b/web/hooks/useSendChatMessage.ts @@ -1,21 +1,32 @@ +import 'openai/shims/web' + import { useEffect, useRef } from 'react' import { - ChatCompletionRole, MessageRequestType, ExtensionTypeEnum, Thread, ThreadMessage, Model, ConversationalExtension, - EngineManager, - ToolManager, ThreadAssistantInfo, + events, + MessageEvent, + EngineManager, InferenceEngine, + MessageStatus, } from '@janhq/core' import { extractInferenceParams, extractModelLoadParams } from '@janhq/core' import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai' +import { + ChatCompletionMessageParam, + ChatCompletionTool, + ChatCompletionMessageToolCall, +} from 'openai/resources/chat' +import { CompletionResponse, StreamCompletionResponse, TokenJS } from 'token.js' +import { ulid } from 'ulidx' + import { modelDropdownStateAtom } from '@/containers/ModelDropdown' import { currentPromptAtom, @@ -24,12 +35,23 @@ import { } from '@/containers/Providers/Jotai' import { compressImage, getBase64 } from '@/utils/base64' +import { + createMessage, + createMessageContent, + emptyMessageContent, +} from '@/utils/createMessage' import { MessageRequestBuilder } from '@/utils/messageRequestBuilder' import { ThreadMessageBuilder } from '@/utils/threadMessageBuilder' import { useActiveModel } from './useActiveModel' +import { + convertBuiltInEngine, + extendBuiltInEngineModels, + useGetEngines, +} from './useEngineManagement' + import { extensionManager } from '@/extension/ExtensionManager' import { activeAssistantAtom } from '@/helpers/atoms/Assistant.atom' import { @@ -41,16 +63,21 @@ import { import { selectedModelAtom } from '@/helpers/atoms/Model.atom' import { activeThreadAtom, + approvedThreadToolsAtom, + disabledThreadToolsAtom, engineParamsUpdateAtom, getActiveThreadModelParamsAtom, isGeneratingResponseAtom, updateThreadAtom, updateThreadWaitingForResponseAtom, } from '@/helpers/atoms/Thread.atom' +import { ModelTool } from '@/types/model' export const reloadModelAtom = atom(false) -export default function useSendChatMessage() { +export default function useSendChatMessage( + showModal?: (toolName: string, threadId: string) => Promise +) { const activeThread = useAtomValue(activeThreadAtom) const activeAssistant = useAtomValue(activeAssistantAtom) const addNewMessage = useSetAtom(addNewMessageAtom) @@ -59,6 +86,8 @@ export default function useSendChatMessage() { const setCurrentPrompt = useSetAtom(currentPromptAtom) const deleteMessage = useSetAtom(deleteMessageAtom) const setEditPrompt = useSetAtom(editPromptAtom) + const approvedTools = useAtomValue(approvedThreadToolsAtom) + const disabledTools = useAtomValue(disabledThreadToolsAtom) const currentMessages = useAtomValue(getCurrentChatMessagesAtom) const selectedModel = useAtomValue(selectedModelAtom) @@ -79,6 +108,8 @@ export default function useSendChatMessage() { const selectedModelRef = useRef() + const { engines } = useGetEngines() + useEffect(() => { modelRef.current = activeModel }, [activeModel]) @@ -100,7 +131,7 @@ export default function useSendChatMessage() { const newConvoData = Array.from(currentMessages) let toSendMessage = newConvoData.pop() - while (toSendMessage && toSendMessage?.role !== ChatCompletionRole.User) { + while (toSendMessage && toSendMessage?.role !== 'user') { await extensionManager .get(ExtensionTypeEnum.Conversational) ?.deleteMessage(toSendMessage.thread_id, toSendMessage.id) @@ -120,12 +151,16 @@ export default function useSendChatMessage() { ) => { if (!message || message.trim().length === 0) return - if (!activeThreadRef.current || !activeAssistantRef.current) { + const activeThread = activeThreadRef.current + const activeAssistant = activeAssistantRef.current + const currentModel = selectedModelRef.current + + if (!activeThread || !activeAssistant) { console.error('No active thread or assistant') return } - if (selectedModelRef.current?.id === undefined) { + if (!currentModel?.id) { setModelDropdownState(true) return } @@ -138,111 +173,404 @@ export default function useSendChatMessage() { const prompt = message.trim() - updateThreadWaiting(activeThreadRef.current.id, true) + updateThreadWaiting(activeThread.id, true) setCurrentPrompt('') setEditPrompt('') - let base64Blob = fileUpload ? await getBase64(fileUpload.file) : undefined + try { + let base64Blob = fileUpload ? await getBase64(fileUpload.file) : undefined - if (base64Blob && fileUpload?.type === 'image') { - // Compress image - base64Blob = await compressImage(base64Blob, 512) - } - - const modelRequest = - selectedModelRef?.current ?? activeAssistantRef.current?.model - - // Fallback support for previous broken threads - if (activeAssistantRef.current?.model?.id === '*') { - activeAssistantRef.current.model = { - id: modelRequest.id, - settings: modelRequest.settings, - parameters: modelRequest.parameters, + if (base64Blob && fileUpload?.type === 'image') { + // Compress image + base64Blob = await compressImage(base64Blob, 512) } - } - if (runtimeParams.stream == null) { - runtimeParams.stream = true - } - // Build Message Request - const requestBuilder = new MessageRequestBuilder( - MessageRequestType.Thread, - { - ...modelRequest, - settings: settingParams, - parameters: runtimeParams, - }, - activeThreadRef.current, - messages ?? currentMessages - ).addSystemMessage(activeAssistantRef.current?.instructions) + const modelRequest = selectedModel ?? activeAssistant.model - requestBuilder.pushMessage(prompt, base64Blob, fileUpload) - - // Build Thread Message to persist - const threadMessageBuilder = new ThreadMessageBuilder( - requestBuilder - ).pushMessage(prompt, base64Blob, fileUpload) - - const newMessage = threadMessageBuilder.build() - - // Update thread state - const updatedThread: Thread = { - ...activeThreadRef.current, - updated: newMessage.created_at, - metadata: { - ...activeThreadRef.current.metadata, - lastMessage: prompt, - }, - } - updateThread(updatedThread) - - if ( - !isResend && - (newMessage.content.length || newMessage.attachments?.length) - ) { - // Add message - const createdMessage = await extensionManager - .get(ExtensionTypeEnum.Conversational) - ?.createMessage(newMessage) - .catch(() => undefined) - - if (!createdMessage) return - - // Push to states - addNewMessage(createdMessage) - } - - // Start Model if not started - const modelId = - selectedModelRef.current?.id ?? activeAssistantRef.current?.model.id - - if (base64Blob) { - setFileUpload(undefined) - } - - if (modelRef.current?.id !== modelId && modelId) { - const error = await startModel(modelId).catch((error: Error) => error) - if (error) { - updateThreadWaiting(activeThreadRef.current.id, false) - return + // Fallback support for previous broken threads + if (activeAssistant.model?.id === '*') { + activeAssistant.model = { + id: currentModel.id, + settings: currentModel.settings, + parameters: currentModel.parameters, + } } + if (runtimeParams.stream == null) { + runtimeParams.stream = true + } + + // Build Message Request + // TODO: detect if model supports tools + const tools = (await window.core.api.getTools()) + ?.filter((tool: ModelTool) => !disabledTools.includes(tool.name)) + .map((tool: ModelTool) => ({ + type: 'function' as const, + function: { + name: tool.name, + description: tool.description?.slice(0, 1024), + parameters: tool.inputSchema, + strict: false, + }, + })) + const requestBuilder = new MessageRequestBuilder( + MessageRequestType.Thread, + { + ...modelRequest, + settings: settingParams, + parameters: runtimeParams, + }, + activeThread, + messages ?? currentMessages, + tools && tools.length ? tools : undefined + ).addSystemMessage(activeAssistant.instructions) + + requestBuilder.pushMessage(prompt, base64Blob, fileUpload) + + // Build Thread Message to persist + const threadMessageBuilder = new ThreadMessageBuilder( + requestBuilder + ).pushMessage(prompt, base64Blob, fileUpload) + + const newMessage = threadMessageBuilder.build() + + // Update thread state + const updatedThread: Thread = { + ...activeThread, + updated: newMessage.created_at, + metadata: { + ...activeThread.metadata, + lastMessage: prompt, + }, + } + updateThread(updatedThread) + + if ( + !isResend && + (newMessage.content.length || newMessage.attachments?.length) + ) { + // Add message + const createdMessage = await extensionManager + .get(ExtensionTypeEnum.Conversational) + ?.createMessage(newMessage) + .catch(() => undefined) + + if (!createdMessage) return + + // Push to states + addNewMessage(createdMessage) + } + + // Start Model if not started + const isCortex = + modelRequest.engine == InferenceEngine.cortex || + modelRequest.engine == InferenceEngine.cortex_llamacpp + const modelId = selectedModel?.id ?? activeAssistantRef.current?.model.id + + if (base64Blob) { + setFileUpload(undefined) + } + + if (modelRef.current?.id !== modelId && modelId && isCortex) { + const error = await startModel(modelId).catch((error: Error) => error) + if (error) { + updateThreadWaiting(activeThread.id, false) + return + } + } + setIsGeneratingResponse(true) + + let isDone = false + + const engine = + engines?.[requestBuilder.model.engine as InferenceEngine]?.[0] + const apiKey = engine?.api_key + const provider = convertBuiltInEngine(engine?.engine) + + const tokenJS = new TokenJS({ + apiKey: apiKey ?? (await window.core.api.appToken()), + baseURL: apiKey ? undefined : `${API_BASE_URL}/v1`, + }) + + extendBuiltInEngineModels(tokenJS, provider, modelId) + + // llama.cpp currently does not support streaming when tools are used. + const useStream = + requestBuilder.tools && isCortex + ? false + : modelRequest.parameters?.stream + + let parentMessageId: string | undefined + while (!isDone) { + let messageId = ulid() + if (!parentMessageId) { + parentMessageId = ulid() + messageId = parentMessageId + } + const data = requestBuilder.build() + const message: ThreadMessage = createMessage({ + id: messageId, + thread_id: activeThread.id, + assistant_id: activeAssistant.assistant_id, + metadata: { + ...(messageId !== parentMessageId + ? { parent_id: parentMessageId } + : {}), + }, + }) + events.emit(MessageEvent.OnMessageResponse, message) + + // we need to separate into 2 cases to appease linter + const controller = new AbortController() + EngineManager.instance().controller = controller + if (useStream) { + const response = await tokenJS.chat.completions.create( + { + stream: true, + provider, + messages: requestBuilder.messages as ChatCompletionMessageParam[], + model: data.model?.id ?? '', + tools: data.tools as ChatCompletionTool[], + tool_choice: data.tools ? 'auto' : undefined, + }, + { + signal: controller.signal, + } + ) + // Variables to track and accumulate streaming content + if (!message.content.length) { + message.content = emptyMessageContent + } + isDone = await processStreamingResponse( + response, + requestBuilder, + message + ) + } else { + const response = await tokenJS.chat.completions.create( + { + stream: false, + provider, + messages: requestBuilder.messages as ChatCompletionMessageParam[], + model: data.model?.id ?? '', + tools: data.tools as ChatCompletionTool[], + tool_choice: data.tools ? 'auto' : undefined, + }, + { + signal: controller.signal, + } + ) + // Variables to track and accumulate streaming content + if (!message.content.length) { + message.content = emptyMessageContent + } + isDone = await processNonStreamingResponse( + response, + requestBuilder, + message + ) + } + + message.status = MessageStatus.Ready + events.emit(MessageEvent.OnMessageUpdate, message) + } + } catch (error) { + setIsGeneratingResponse(false) + updateThreadWaiting(activeThread.id, false) + const errorMessage: ThreadMessage = createMessage({ + thread_id: activeThread.id, + assistant_id: activeAssistant.assistant_id, + content: createMessageContent( + typeof error === 'object' && error && 'message' in error + ? (error as { message: string }).message + : JSON.stringify(error) + ), + }) + events.emit(MessageEvent.OnMessageResponse, errorMessage) + + errorMessage.status = MessageStatus.Error + events.emit(MessageEvent.OnMessageUpdate, errorMessage) } - setIsGeneratingResponse(true) - - // Process message request with Assistants tools - const request = await ToolManager.instance().process( - requestBuilder.build(), - activeAssistantRef?.current.tools ?? [] - ) - - // Request for inference - EngineManager.instance().get(InferenceEngine.cortex)?.inference(request) // Reset states setReloadModel(false) setEngineParamsUpdate(false) } + const processNonStreamingResponse = async ( + response: CompletionResponse, + requestBuilder: MessageRequestBuilder, + message: ThreadMessage + ): Promise => { + // Handle tool calls in the response + const toolCalls: ChatCompletionMessageToolCall[] = + response.choices[0]?.message?.tool_calls ?? [] + const content = response.choices[0].message?.content + message.content = createMessageContent(content ?? '') + events.emit(MessageEvent.OnMessageUpdate, message) + await postMessageProcessing( + toolCalls ?? [], + requestBuilder, + message, + content ?? '' + ) + return !toolCalls || !toolCalls.length + } + + const processStreamingResponse = async ( + response: StreamCompletionResponse, + requestBuilder: MessageRequestBuilder, + message: ThreadMessage + ): Promise => { + // Variables to track and accumulate streaming content + let currentToolCall: { + id: string + function: { name: string; arguments: string } + } | null = null + let accumulatedContent = '' + const toolCalls: ChatCompletionMessageToolCall[] = [] + // Process the streaming chunks + for await (const chunk of response) { + // Handle tool calls in the chunk + if (chunk.choices[0]?.delta?.tool_calls) { + const deltaToolCalls = chunk.choices[0].delta.tool_calls + + // Handle the beginning of a new tool call + if ( + deltaToolCalls[0]?.index !== undefined && + deltaToolCalls[0]?.function + ) { + const index = deltaToolCalls[0].index + + // Create new tool call if this is the first chunk for it + if (!toolCalls[index]) { + toolCalls[index] = { + id: deltaToolCalls[0]?.id || '', + function: { + name: deltaToolCalls[0]?.function?.name || '', + arguments: deltaToolCalls[0]?.function?.arguments || '', + }, + type: 'function', + } + currentToolCall = toolCalls[index] + } else { + // Continuation of existing tool call + currentToolCall = toolCalls[index] + + // Append to function name or arguments if they exist in this chunk + if (deltaToolCalls[0]?.function?.name) { + currentToolCall!.function.name += deltaToolCalls[0].function.name + } + + if (deltaToolCalls[0]?.function?.arguments) { + currentToolCall!.function.arguments += + deltaToolCalls[0].function.arguments + } + } + } + } + + // Handle regular content in the chunk + if (chunk.choices[0]?.delta?.content) { + const content = chunk.choices[0].delta.content + accumulatedContent += content + + message.content = createMessageContent(accumulatedContent) + events.emit(MessageEvent.OnMessageUpdate, message) + } + } + + await postMessageProcessing( + toolCalls ?? [], + requestBuilder, + message, + accumulatedContent ?? '' + ) + return !toolCalls || !toolCalls.length + } + + const postMessageProcessing = async ( + toolCalls: ChatCompletionMessageToolCall[], + requestBuilder: MessageRequestBuilder, + message: ThreadMessage, + content: string + ) => { + requestBuilder.pushAssistantMessage({ + content, + role: 'assistant', + refusal: null, + tool_calls: toolCalls, + }) + + // Handle completed tool calls + if (toolCalls.length > 0) { + for (const toolCall of toolCalls) { + const toolId = ulid() + const toolCallsMetadata = + message.metadata?.tool_calls && + Array.isArray(message.metadata?.tool_calls) + ? message.metadata?.tool_calls + : [] + message.metadata = { + ...(message.metadata ?? {}), + tool_calls: [ + ...toolCallsMetadata, + { + tool: { + ...toolCall, + id: toolId, + }, + response: undefined, + state: 'pending', + }, + ], + } + events.emit(MessageEvent.OnMessageUpdate, message) + + const approved = + approvedTools[message.thread_id]?.includes(toolCall.function.name) || + (showModal + ? await showModal(toolCall.function.name, message.thread_id) + : true) + + const result = approved + ? await window.core.api.callTool({ + toolName: toolCall.function.name, + arguments: JSON.parse(toolCall.function.arguments), + }) + : { + content: [ + { + type: 'text', + text: 'The user has chosen to disallow the tool call.', + }, + ], + } + if (result.error) break + + message.metadata = { + ...(message.metadata ?? {}), + tool_calls: [ + ...toolCallsMetadata, + { + tool: { + ...toolCall, + id: toolId, + }, + response: result, + state: 'ready', + }, + ], + } + + requestBuilder.pushToolMessage( + result.content[0]?.text ?? '', + toolCall.id + ) + events.emit(MessageEvent.OnMessageUpdate, message) + } + } + } + return { sendChatMessage, resendChatMessage, diff --git a/web/hooks/useUpdateModelParameters.ts b/web/hooks/useUpdateModelParameters.ts index dab2f6e28..8bab0c357 100644 --- a/web/hooks/useUpdateModelParameters.ts +++ b/web/hooks/useUpdateModelParameters.ts @@ -1,6 +1,7 @@ import { useCallback } from 'react' import { + AssistantTool, ConversationalExtension, ExtensionTypeEnum, InferenceEngine, @@ -51,7 +52,11 @@ export default function useUpdateModelParameters() { ) const updateModelParameter = useCallback( - async (thread: Thread, settings: UpdateModelParameter) => { + async ( + thread: Thread, + settings: UpdateModelParameter, + tools?: AssistantTool[] + ) => { if (!activeAssistant) return const toUpdateSettings = processStopWords(settings.params ?? {}) @@ -70,6 +75,7 @@ export default function useUpdateModelParameters() { const settingParams = extractModelLoadParams(updatedModelParams) const assistantInfo = { ...activeAssistant, + tools: tools ?? activeAssistant.tools, model: { ...activeAssistant?.model, parameters: runtimeParams, diff --git a/web/next.config.js b/web/next.config.js index dfb336a2d..c36eae42a 100644 --- a/web/next.config.js +++ b/web/next.config.js @@ -42,6 +42,7 @@ const nextConfig = { isWindows: process.platform === 'win32', isLinux: process.platform === 'linux', PLATFORM: JSON.stringify(process.platform), + IS_TAURI: process.env.IS_TAURI === 'true', }), ] return config diff --git a/web/package.json b/web/package.json index 1d5cef480..dd02f3de5 100644 --- a/web/package.json +++ b/web/package.json @@ -20,6 +20,9 @@ "@janhq/joi": "link:../joi", "@radix-ui/react-icons": "^1.3.2", "@tanstack/react-virtual": "^3.10.9", + "@tauri-apps/api": "^2.4.0", + "@tauri-apps/plugin-http": "^2.4.2", + "@tauri-apps/plugin-updater": "~2", "@uppy/core": "^4.3.0", "@uppy/react": "^4.0.4", "@uppy/xhr-upload": "^4.2.3", @@ -34,6 +37,8 @@ "marked": "^9.1.2", "next": "14.2.3", "next-themes": "^0.2.1", + "npx-scope-finder": "^1.3.0", + "openai": "^4.90.0", "postcss": "8.4.31", "postcss-url": "10.1.3", "posthog-js": "^1.194.6", @@ -60,6 +65,7 @@ "swr": "^2.2.5", "tailwind-merge": "^2.0.0", "tailwindcss": "3.4.17", + "token.js": "npm:token.js-fork@0.7.6", "ulidx": "^2.3.0", "use-debounce": "^10.0.0", "uuid": "^9.0.1", @@ -81,6 +87,7 @@ "@types/uuid": "^9.0.6", "@typescript-eslint/eslint-plugin": "^6.8.0", "@typescript-eslint/parser": "^6.8.0", + "babel-loader": "^10.0.0", "encoding": "^0.1.13", "eslint": "8.52.0", "eslint-config-next": "14.0.1", diff --git a/web/screens/Hub/ModelFilter/ModelSize/index.tsx b/web/screens/Hub/ModelFilter/ModelSize/index.tsx index b95d57f8b..a8d411e33 100644 --- a/web/screens/Hub/ModelFilter/ModelSize/index.tsx +++ b/web/screens/Hub/ModelFilter/ModelSize/index.tsx @@ -1,9 +1,8 @@ -import { useRef, useState } from 'react' +import { useState } from 'react' -import { Slider, Input, Tooltip } from '@janhq/joi' +import { Slider, Input } from '@janhq/joi' import { atom, useAtom } from 'jotai' -import { InfoIcon } from 'lucide-react' export const hubModelSizeMinAtom = atom(0) export const hubModelSizeMaxAtom = atom(100) diff --git a/web/screens/LocalServer/LocalServerLeftPanel/index.tsx b/web/screens/LocalServer/LocalServerLeftPanel/index.tsx index 99c2d7488..660ff305b 100644 --- a/web/screens/LocalServer/LocalServerLeftPanel/index.tsx +++ b/web/screens/LocalServer/LocalServerLeftPanel/index.tsx @@ -89,7 +89,7 @@ const LocalServerLeftPanel = () => { setIsLoading(true) const isStarted = await window.core?.api?.startServer({ host, - port, + port: parseInt(port), prefix, isCorsEnabled, isVerboseEnabled, diff --git a/web/screens/Settings/Advanced/index.tsx b/web/screens/Settings/Advanced/index.tsx index 2dd0dfb0d..5a317e892 100644 --- a/web/screens/Settings/Advanced/index.tsx +++ b/web/screens/Settings/Advanced/index.tsx @@ -73,7 +73,9 @@ const Advanced = ({ setSubdir }: { setSubdir: (subdir: string) => void }) => { const appConfiguration: AppConfiguration = await window.core?.api?.getAppConfigurations() appConfiguration.quick_ask = e - await window.core?.api?.updateAppConfiguration(appConfiguration) + await window.core?.api?.updateAppConfiguration({ + configuration: appConfiguration, + }) if (relaunchApp) relaunch() } diff --git a/web/screens/Settings/Appearance/index.tsx b/web/screens/Settings/Appearance/index.tsx index 2eea7c105..c9bd0fb93 100644 --- a/web/screens/Settings/Appearance/index.tsx +++ b/web/screens/Settings/Appearance/index.tsx @@ -2,7 +2,6 @@ import { useCallback } from 'react' import { useTheme } from 'next-themes' -import { fs, joinPath } from '@janhq/core' import { Button, ScrollArea, Select, Switch } from '@janhq/joi' import { useAtom, useAtomValue } from 'jotai' @@ -51,21 +50,17 @@ export default function AppearanceOptions() { const handleClickTheme = useCallback( async (e: string) => { setSelectedIdTheme(e) - const janThemesPath = await joinPath([janDataFolderPath, 'themes']) - const filePath = await joinPath([`${janThemesPath}/${e}`, `theme.json`]) - const theme: Theme = JSON.parse(await fs.readFileSync(filePath, 'utf-8')) + const theme: Theme = JSON.parse( + await window.core.api.readTheme({ + themeName: e, + }) + ) setThemeData(theme) setTheme(String(theme?.nativeTheme)) - if (theme?.reduceTransparent) { - setReduceTransparent(reduceTransparent) - } else { - setReduceTransparent(true) - } }, [ janDataFolderPath, reduceTransparent, - setReduceTransparent, setSelectedIdTheme, setTheme, setThemeData, diff --git a/web/screens/Settings/Engines/RemoteEngineSettings.tsx b/web/screens/Settings/Engines/RemoteEngineSettings.tsx index 1ddacd432..e773b1957 100644 --- a/web/screens/Settings/Engines/RemoteEngineSettings.tsx +++ b/web/screens/Settings/Engines/RemoteEngineSettings.tsx @@ -32,6 +32,8 @@ import { twMerge } from 'tailwind-merge' import Spinner from '@/containers/Loader/Spinner' import { + builtInEngines, + normalizeBuiltInEngineName, updateEngine, useGetEngines, useRefreshModelList, @@ -366,105 +368,111 @@ const RemoteEngineSettings = ({
-
-
-
-
-
-
-
- Request Headers Template -
-

- HTTP headers template required for API authentication - and version specification. -

-
-
-