Merge branch 'dev' into local-server-documentation
This commit is contained in:
commit
c85b75610e
2
.github/scripts/auto-sign.sh
vendored
2
.github/scripts/auto-sign.sh
vendored
@ -8,3 +8,5 @@ fi
|
||||
|
||||
# If both variables are set, execute the following commands
|
||||
find "$APP_PATH" \( -type f -perm +111 -o -name "*.node" \) -exec codesign -s "$DEVELOPER_ID" --options=runtime {} \;
|
||||
|
||||
find "$APP_PATH" -type f -name "*.o" -exec codesign -s "$DEVELOPER_ID" --options=runtime {} \;
|
||||
|
||||
62
.github/workflows/jan-electron-build-nightly.yml
vendored
62
.github/workflows/jan-electron-build-nightly.yml
vendored
@ -48,8 +48,17 @@ jobs:
|
||||
get-update-version:
|
||||
uses: ./.github/workflows/template-get-update-version.yml
|
||||
|
||||
build-macos:
|
||||
uses: ./.github/workflows/template-build-macos.yml
|
||||
build-macos-x64:
|
||||
uses: ./.github/workflows/template-build-macos-x64.yml
|
||||
needs: [get-update-version, set-public-provider]
|
||||
secrets: inherit
|
||||
with:
|
||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
||||
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||
|
||||
build-macos-arm64:
|
||||
uses: ./.github/workflows/template-build-macos-arm64.yml
|
||||
needs: [get-update-version, set-public-provider]
|
||||
secrets: inherit
|
||||
with:
|
||||
@ -76,8 +85,51 @@ jobs:
|
||||
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||
|
||||
combine-latest-mac-yml:
|
||||
needs: [set-public-provider, build-macos-x64, build-macos-arm64]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Getting the repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
||||
- name: Download mac-x64 artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: latest-mac-x64
|
||||
path: ./latest-mac-x64
|
||||
- name: Download mac-arm artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: latest-mac-arm64
|
||||
path: ./latest-mac-arm64
|
||||
|
||||
- name: 'Merge latest-mac.yml'
|
||||
# unfortunately electron-builder doesn't understand that we have two different releases for mac-x64 and mac-arm, so we need to manually merge the latest files
|
||||
# see https://github.com/electron-userland/electron-builder/issues/5592
|
||||
run: |
|
||||
ls -la .
|
||||
ls -la ./latest-mac-x64
|
||||
ls -la ./latest-mac-arm64
|
||||
ls -la ./electron
|
||||
cp ./electron/merge-latest-ymls.js /tmp/merge-latest-ymls.js
|
||||
npm install js-yaml --prefix /tmp
|
||||
node /tmp/merge-latest-ymls.js ./latest-mac-x64/latest-mac.yml ./latest-mac-arm64/latest-mac.yml ./latest-mac.yml
|
||||
cat ./latest-mac.yml
|
||||
|
||||
- name: Upload latest-mac.yml
|
||||
if: ${{ needs.set-public-provider.outputs.public_provider == 'cloudflare-r2' }}
|
||||
run: |
|
||||
aws s3api put-object --endpoint-url https://${{ secrets.CLOUDFLARE_ACCOUNT_ID }}.r2.cloudflarestorage.com --bucket ${{ secrets.CLOUDFLARE_R2_BUCKET_NAME }} --key "latest/latest-mac.yml" --body "./latest-mac.yml"
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CLOUDFLARE_R2_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CLOUDFLARE_R2_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
AWS_EC2_METADATA_DISABLED: "true"
|
||||
|
||||
|
||||
noti-discord-nightly-and-update-url-readme:
|
||||
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider]
|
||||
needs: [build-macos-x64, build-macos-arm64, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, combine-latest-mac-yml]
|
||||
secrets: inherit
|
||||
if: github.event_name == 'schedule'
|
||||
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
||||
@ -88,7 +140,7 @@ jobs:
|
||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||
|
||||
noti-discord-pre-release-and-update-url-readme:
|
||||
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider]
|
||||
needs: [build-macos-x64, build-macos-arm64, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, combine-latest-mac-yml]
|
||||
secrets: inherit
|
||||
if: github.event_name == 'push'
|
||||
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
||||
@ -99,7 +151,7 @@ jobs:
|
||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||
|
||||
noti-discord-manual-and-update-url-readme:
|
||||
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider]
|
||||
needs: [build-macos-x64, build-macos-arm64, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, combine-latest-mac-yml]
|
||||
secrets: inherit
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'cloudflare-r2'
|
||||
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
||||
|
||||
84
.github/workflows/jan-electron-build.yml
vendored
84
.github/workflows/jan-electron-build.yml
vendored
@ -9,8 +9,42 @@ jobs:
|
||||
get-update-version:
|
||||
uses: ./.github/workflows/template-get-update-version.yml
|
||||
|
||||
build-macos:
|
||||
uses: ./.github/workflows/template-build-macos.yml
|
||||
create-draft-release:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
outputs:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Extract tag name without v prefix
|
||||
id: get_version
|
||||
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
|
||||
env:
|
||||
GITHUB_REF: ${{ github.ref }}
|
||||
- name: Create Draft Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref_name }}
|
||||
release_name: "${{ env.VERSION }}"
|
||||
draft: true
|
||||
prerelease: false
|
||||
|
||||
build-macos-x64:
|
||||
uses: ./.github/workflows/template-build-macos-x64.yml
|
||||
secrets: inherit
|
||||
needs: [get-update-version]
|
||||
with:
|
||||
ref: ${{ github.ref }}
|
||||
public_provider: github
|
||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||
|
||||
build-macos-arm64:
|
||||
uses: ./.github/workflows/template-build-macos-arm64.yml
|
||||
secrets: inherit
|
||||
needs: [get-update-version]
|
||||
with:
|
||||
@ -36,8 +70,52 @@ jobs:
|
||||
public_provider: github
|
||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||
|
||||
combine-latest-mac-yml:
|
||||
needs: [build-macos-x64, build-macos-arm64, create-draft-release]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Getting the repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Download mac-x64 artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: latest-mac-x64
|
||||
path: ./latest-mac-x64
|
||||
- name: Download mac-arm artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: latest-mac-arm64
|
||||
path: ./latest-mac-arm64
|
||||
|
||||
- name: 'Merge latest-mac.yml'
|
||||
# unfortunately electron-builder doesn't understand that we have two different releases for mac-x64 and mac-arm, so we need to manually merge the latest files
|
||||
# see https://github.com/electron-userland/electron-builder/issues/5592
|
||||
run: |
|
||||
ls -la .
|
||||
ls -la ./latest-mac-x64
|
||||
ls -la ./latest-mac-arm64
|
||||
ls -la ./electron
|
||||
cp ./electron/merge-latest-ymls.js /tmp/merge-latest-ymls.js
|
||||
npm install js-yaml --prefix /tmp
|
||||
node /tmp/merge-latest-ymls.js ./latest-mac-x64/latest-mac.yml ./latest-mac-arm64/latest-mac.yml ./latest-mac.yml
|
||||
cat ./latest-mac.yml
|
||||
|
||||
- name: Yet Another Upload Release Asset Action
|
||||
uses: shogo82148/actions-upload-release-asset@v1.7.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||
asset_path: ./latest-mac.yml
|
||||
asset_name: latest-mac.yml
|
||||
asset_content_type: text/yaml
|
||||
overwrite: true
|
||||
|
||||
update_release_draft:
|
||||
needs: [build-macos, build-windows-x64, build-linux-x64]
|
||||
needs: [build-macos-x64, build-macos-arm64, build-windows-x64, build-linux-x64, combine-latest-mac-yml]
|
||||
permissions:
|
||||
# write permission is required to create a github release
|
||||
contents: write
|
||||
|
||||
160
.github/workflows/template-build-macos-arm64.yml
vendored
Normal file
160
.github/workflows/template-build-macos-arm64.yml
vendored
Normal file
@ -0,0 +1,160 @@
|
||||
name: build-macos
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ref:
|
||||
required: true
|
||||
type: string
|
||||
default: 'refs/heads/main'
|
||||
public_provider:
|
||||
required: true
|
||||
type: string
|
||||
default: none
|
||||
description: 'none: build only, github: build and publish to github, cloudflare: build and publish to cloudflare'
|
||||
new_version:
|
||||
required: true
|
||||
type: string
|
||||
default: ''
|
||||
cloudflare_r2_path:
|
||||
required: false
|
||||
type: string
|
||||
default: '/latest/'
|
||||
secrets:
|
||||
CLOUDFLARE_R2_BUCKET_NAME:
|
||||
required: false
|
||||
CLOUDFLARE_R2_ACCESS_KEY_ID:
|
||||
required: false
|
||||
CLOUDFLARE_R2_SECRET_ACCESS_KEY:
|
||||
required: false
|
||||
CLOUDFLARE_ACCOUNT_ID:
|
||||
required: false
|
||||
CODE_SIGN_P12_BASE64:
|
||||
required: false
|
||||
CODE_SIGN_P12_PASSWORD:
|
||||
required: false
|
||||
APPLE_ID:
|
||||
required: false
|
||||
APPLE_APP_SPECIFIC_PASSWORD:
|
||||
required: false
|
||||
DEVELOPER_ID:
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
build-macos:
|
||||
runs-on: macos-silicon
|
||||
environment: production
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Getting the repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
|
||||
- name: Installing node
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Unblock keychain
|
||||
run: |
|
||||
security unlock-keychain -p ${{ secrets.KEYCHAIN_PASSWORD }} ~/Library/Keychains/login.keychain-db
|
||||
# - uses: actions/setup-python@v5
|
||||
# with:
|
||||
# python-version: '3.11'
|
||||
|
||||
# - name: Install jq
|
||||
# uses: dcarbone/install-jq-action@v2.0.1
|
||||
|
||||
- name: Update app version based on latest release tag with build number
|
||||
if: inputs.public_provider != 'github'
|
||||
run: |
|
||||
echo "Version: ${{ inputs.new_version }}"
|
||||
# Update the version in electron/package.json
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' electron/package.json > /tmp/package.json
|
||||
mv /tmp/package.json electron/package.json
|
||||
|
||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
|
||||
mv /tmp/package.json web/package.json
|
||||
|
||||
jq '.build.publish = [{"provider": "generic", "url": "${{ secrets.CLOUDFLARE_R2_PUBLIC_URL }}", "channel": "latest"}, {"provider": "s3", "bucket": "${{ secrets.CLOUDFLARE_R2_BUCKET_NAME }}", "region": "auto", "endpoint": "https://${{ secrets.CLOUDFLARE_ACCOUNT_ID }}.r2.cloudflarestorage.com", "path": "${{ inputs.cloudflare_r2_path }}", "channel": "latest"}]' electron/package.json > /tmp/package.json
|
||||
mv /tmp/package.json electron/package.json
|
||||
cat electron/package.json
|
||||
|
||||
- name: Update app version base on tag
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
|
||||
run: |
|
||||
if [[ ! "${VERSION_TAG}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "Error: Tag is not valid!"
|
||||
exit 1
|
||||
fi
|
||||
jq --arg version "${VERSION_TAG#v}" '.version = $version' electron/package.json > /tmp/package.json
|
||||
mv /tmp/package.json electron/package.json
|
||||
jq --arg version "${VERSION_TAG#v}" '.version = $version' web/package.json > /tmp/package.json
|
||||
mv /tmp/package.json web/package.json
|
||||
env:
|
||||
VERSION_TAG: ${{ inputs.new_version }}
|
||||
|
||||
# - name: Get Cer for code signing
|
||||
# run: base64 -d <<< "$CODE_SIGN_P12_BASE64" > /tmp/codesign.p12
|
||||
# shell: bash
|
||||
# env:
|
||||
# CODE_SIGN_P12_BASE64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
||||
|
||||
# - uses: apple-actions/import-codesign-certs@v2
|
||||
# continue-on-error: true
|
||||
# with:
|
||||
# p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
||||
# p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
||||
|
||||
- name: Build and publish app to cloudflare r2 or github artifactory
|
||||
if: inputs.public_provider != 'github'
|
||||
run: |
|
||||
# check public_provider is true or not
|
||||
echo "public_provider is ${{ inputs.public_provider }}"
|
||||
if [ "${{ inputs.public_provider }}" == "none" ]; then
|
||||
make build
|
||||
else
|
||||
make build-and-publish
|
||||
fi
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# CSC_LINK: "/tmp/codesign.p12"
|
||||
# CSC_KEY_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
||||
# CSC_IDENTITY_AUTO_DISCOVERY: "true"
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
|
||||
APP_PATH: "."
|
||||
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CLOUDFLARE_R2_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CLOUDFLARE_R2_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
AWS_EC2_METADATA_DISABLED: "true"
|
||||
|
||||
- name: Build and publish app to github
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
|
||||
run: |
|
||||
make build-and-publish
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# CSC_LINK: "/tmp/codesign.p12"
|
||||
# CSC_KEY_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
||||
# CSC_IDENTITY_AUTO_DISCOVERY: "true"
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
|
||||
APP_PATH: "."
|
||||
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
|
||||
ANALYTICS_ID: ${{ secrets.JAN_APP_UMAMI_PROJECT_API_KEY }}
|
||||
ANALYTICS_HOST: ${{ secrets.JAN_APP_UMAMI_URL }}
|
||||
|
||||
- name: Upload Artifact
|
||||
if: inputs.public_provider != 'github'
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: jan-mac-arm64-${{ inputs.new_version }}
|
||||
path: ./electron/dist/jan-mac-arm64-${{ inputs.new_version }}.dmg
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: latest-mac-arm64
|
||||
path: ./electron/dist/latest-mac.yml
|
||||
@ -148,9 +148,8 @@ jobs:
|
||||
path: ./electron/dist/jan-mac-x64-${{ inputs.new_version }}.dmg
|
||||
|
||||
- name: Upload Artifact
|
||||
if: inputs.public_provider != 'github'
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: jan-mac-arm64-${{ inputs.new_version }}
|
||||
path: ./electron/dist/jan-mac-arm64-${{ inputs.new_version }}.dmg
|
||||
name: latest-mac-x64
|
||||
path: ./electron/dist/latest-mac.yml
|
||||
|
||||
67
Dockerfile
67
Dockerfile
@ -1,39 +1,58 @@
|
||||
FROM node:20-bullseye AS base
|
||||
FROM node:20-bookworm AS base
|
||||
|
||||
# 1. Install dependencies only when needed
|
||||
FROM base AS deps
|
||||
FROM base AS builder
|
||||
|
||||
# Install g++ 11
|
||||
RUN apt update && apt install -y gcc-11 g++-11 cpp-11 jq xsel && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install dependencies based on the preferred package manager
|
||||
COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* ./
|
||||
RUN yarn install
|
||||
COPY . ./
|
||||
|
||||
RUN export NITRO_VERSION=$(cat extensions/inference-nitro-extension/bin/version.txt) && \
|
||||
jq --arg nitroVersion $NITRO_VERSION '(.scripts."downloadnitro:linux" | gsub("\\${NITRO_VERSION}"; $nitroVersion)) | gsub("\r"; "")' extensions/inference-nitro-extension/package.json > /tmp/newcommand.txt && export NEW_COMMAND=$(sed 's/^"//;s/"$//' /tmp/newcommand.txt) && jq --arg newCommand "$NEW_COMMAND" '.scripts."downloadnitro:linux" = $newCommand' extensions/inference-nitro-extension/package.json > /tmp/package.json && mv /tmp/package.json extensions/inference-nitro-extension/package.json
|
||||
RUN make install-and-build
|
||||
RUN yarn workspace jan-web install
|
||||
|
||||
RUN export NODE_ENV=production && yarn workspace jan-web build
|
||||
|
||||
# # 2. Rebuild the source code only when needed
|
||||
FROM base AS builder
|
||||
WORKDIR /app
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
# This will do the trick, use the corresponding env file for each environment.
|
||||
RUN yarn workspace server install
|
||||
RUN yarn server:prod
|
||||
|
||||
# 3. Production image, copy all the files and run next
|
||||
FROM base AS runner
|
||||
|
||||
# Install g++ 11
|
||||
RUN apt update && apt install -y gcc-11 g++-11 cpp-11 jq xsel && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV NODE_ENV=production
|
||||
# Copy the package.json and yarn.lock of root yarn space to leverage Docker cache
|
||||
COPY --from=builder /app/package.json ./package.json
|
||||
COPY --from=builder /app/node_modules ./node_modules/
|
||||
COPY --from=builder /app/yarn.lock ./yarn.lock
|
||||
|
||||
# RUN addgroup -g 1001 -S nodejs;
|
||||
COPY --from=builder /app/server/build ./
|
||||
# Copy the package.json, yarn.lock, and build output of server yarn space to leverage Docker cache
|
||||
COPY --from=builder /app/server ./server/
|
||||
COPY --from=builder /app/docs/openapi ./docs/openapi/
|
||||
|
||||
# Automatically leverage output traces to reduce image size
|
||||
# https://nextjs.org/docs/advanced-features/output-file-tracing
|
||||
COPY --from=builder /app/server/node_modules ./node_modules
|
||||
COPY --from=builder /app/server/package.json ./package.json
|
||||
# Copy pre-install dependencies
|
||||
COPY --from=builder /app/pre-install ./pre-install/
|
||||
|
||||
EXPOSE 4000 3928
|
||||
# Copy the package.json, yarn.lock, and output of web yarn space to leverage Docker cache
|
||||
COPY --from=builder /app/web/out ./web/out/
|
||||
COPY --from=builder /app/web/.next ./web/.next/
|
||||
COPY --from=builder /app/web/package.json ./web/package.json
|
||||
COPY --from=builder /app/web/yarn.lock ./web/yarn.lock
|
||||
COPY --from=builder /app/models ./models/
|
||||
|
||||
ENV PORT 4000
|
||||
ENV APPDATA /app/data
|
||||
RUN npm install -g serve@latest
|
||||
|
||||
CMD ["node", "main.js"]
|
||||
EXPOSE 1337 3000 3928
|
||||
|
||||
ENV JAN_API_HOST 0.0.0.0
|
||||
ENV JAN_API_PORT 1337
|
||||
|
||||
CMD ["sh", "-c", "cd server && node build/main.js & cd web && npx serve out"]
|
||||
|
||||
# docker build -t jan .
|
||||
# docker run -p 1337:1337 -p 3000:3000 -p 3928:3928 jan
|
||||
|
||||
85
Dockerfile.gpu
Normal file
85
Dockerfile.gpu
Normal file
@ -0,0 +1,85 @@
|
||||
# Please change the base image to the appropriate CUDA version base on NVIDIA Driver Compatibility
|
||||
# Run nvidia-smi to check the CUDA version and the corresponding driver version
|
||||
# Then update the base image to the appropriate CUDA version refer https://catalog.ngc.nvidia.com/orgs/nvidia/containers/cuda/tags
|
||||
|
||||
FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04 AS base
|
||||
|
||||
# 1. Install dependencies only when needed
|
||||
FROM base AS builder
|
||||
|
||||
# Install g++ 11
|
||||
RUN apt update && apt install -y gcc-11 g++-11 cpp-11 jq xsel curl gnupg make python3-dev && curl -sL https://deb.nodesource.com/setup_20.x | bash - && apt install nodejs -y && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Update alternatives for GCC and related tools
|
||||
RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 110 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-11 \
|
||||
--slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-11 \
|
||||
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-11 && \
|
||||
update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-11 110
|
||||
|
||||
RUN npm install -g yarn
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install dependencies based on the preferred package manager
|
||||
COPY . ./
|
||||
|
||||
RUN export NITRO_VERSION=$(cat extensions/inference-nitro-extension/bin/version.txt) && \
|
||||
jq --arg nitroVersion $NITRO_VERSION '(.scripts."downloadnitro:linux" | gsub("\\${NITRO_VERSION}"; $nitroVersion)) | gsub("\r"; "")' extensions/inference-nitro-extension/package.json > /tmp/newcommand.txt && export NEW_COMMAND=$(sed 's/^"//;s/"$//' /tmp/newcommand.txt) && jq --arg newCommand "$NEW_COMMAND" '.scripts."downloadnitro:linux" = $newCommand' extensions/inference-nitro-extension/package.json > /tmp/package.json && mv /tmp/package.json extensions/inference-nitro-extension/package.json
|
||||
RUN make install-and-build
|
||||
RUN yarn workspace jan-web install
|
||||
|
||||
RUN export NODE_ENV=production && yarn workspace jan-web build
|
||||
|
||||
# # 2. Rebuild the source code only when needed
|
||||
FROM base AS runner
|
||||
|
||||
# Install g++ 11
|
||||
RUN apt update && apt install -y gcc-11 g++-11 cpp-11 jq xsel curl gnupg make python3-dev && curl -sL https://deb.nodesource.com/setup_20.x | bash - && apt-get install nodejs -y && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Update alternatives for GCC and related tools
|
||||
RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 110 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-11 \
|
||||
--slave /usr/bin/gcov gcov /usr/bin/gcov-11 \
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-11 \
|
||||
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-11 && \
|
||||
update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-11 110
|
||||
|
||||
RUN npm install -g yarn
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the package.json and yarn.lock of root yarn space to leverage Docker cache
|
||||
COPY --from=builder /app/package.json ./package.json
|
||||
COPY --from=builder /app/node_modules ./node_modules/
|
||||
COPY --from=builder /app/yarn.lock ./yarn.lock
|
||||
|
||||
# Copy the package.json, yarn.lock, and build output of server yarn space to leverage Docker cache
|
||||
COPY --from=builder /app/server ./server/
|
||||
COPY --from=builder /app/docs/openapi ./docs/openapi/
|
||||
|
||||
# Copy pre-install dependencies
|
||||
COPY --from=builder /app/pre-install ./pre-install/
|
||||
|
||||
# Copy the package.json, yarn.lock, and output of web yarn space to leverage Docker cache
|
||||
COPY --from=builder /app/web/out ./web/out/
|
||||
COPY --from=builder /app/web/.next ./web/.next/
|
||||
COPY --from=builder /app/web/package.json ./web/package.json
|
||||
COPY --from=builder /app/web/yarn.lock ./web/yarn.lock
|
||||
COPY --from=builder /app/models ./models/
|
||||
|
||||
RUN npm install -g serve@latest
|
||||
|
||||
EXPOSE 1337 3000 3928
|
||||
|
||||
ENV LD_LIBRARY_PATH=/usr/local/cuda/targets/x86_64-linux/lib:/usr/local/cuda-12.0/compat${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
|
||||
|
||||
ENV JAN_API_HOST 0.0.0.0
|
||||
ENV JAN_API_PORT 1337
|
||||
|
||||
CMD ["sh", "-c", "cd server && node build/main.js & cd web && npx serve out"]
|
||||
|
||||
# pre-requisites: nvidia-docker
|
||||
# docker build -t jan-gpu . -f Dockerfile.gpu
|
||||
# docker run -p 1337:1337 -p 3000:3000 -p 3928:3928 --gpus all jan-gpu
|
||||
4
Makefile
4
Makefile
@ -24,9 +24,9 @@ endif
|
||||
|
||||
check-file-counts: install-and-build
|
||||
ifeq ($(OS),Windows_NT)
|
||||
powershell -Command "if ((Get-ChildItem -Path electron/pre-install -Filter *.tgz | Measure-Object | Select-Object -ExpandProperty Count) -ne (Get-ChildItem -Path extensions -Directory | Measure-Object | Select-Object -ExpandProperty Count)) { Write-Host 'Number of .tgz files in electron/pre-install does not match the number of subdirectories in extension'; exit 1 } else { Write-Host 'Extension build successful' }"
|
||||
powershell -Command "if ((Get-ChildItem -Path pre-install -Filter *.tgz | Measure-Object | Select-Object -ExpandProperty Count) -ne (Get-ChildItem -Path extensions -Directory | Measure-Object | Select-Object -ExpandProperty Count)) { Write-Host 'Number of .tgz files in pre-install does not match the number of subdirectories in extension'; exit 1 } else { Write-Host 'Extension build successful' }"
|
||||
else
|
||||
@tgz_count=$$(find electron/pre-install -type f -name "*.tgz" | wc -l); dir_count=$$(find extensions -mindepth 1 -maxdepth 1 -type d | wc -l); if [ $$tgz_count -ne $$dir_count ]; then echo "Number of .tgz files in electron/pre-install ($$tgz_count) does not match the number of subdirectories in extension ($$dir_count)"; exit 1; else echo "Extension build successful"; fi
|
||||
@tgz_count=$$(find pre-install -type f -name "*.tgz" | wc -l); dir_count=$$(find extensions -mindepth 1 -maxdepth 1 -type d | wc -l); if [ $$tgz_count -ne $$dir_count ]; then echo "Number of .tgz files in pre-install ($$tgz_count) does not match the number of subdirectories in extension ($$dir_count)"; exit 1; else echo "Extension build successful"; fi
|
||||
endif
|
||||
|
||||
dev: check-file-counts
|
||||
|
||||
90
README.md
90
README.md
@ -43,31 +43,31 @@ Jan is an open-source ChatGPT alternative that runs 100% offline on your compute
|
||||
<tr style="text-align:center">
|
||||
<td style="text-align:center"><b>Stable (Recommended)</b></td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://github.com/janhq/jan/releases/download/v0.4.5/jan-win-x64-0.4.5.exe'>
|
||||
<a href='https://github.com/janhq/jan/releases/download/v0.4.6/jan-win-x64-0.4.6.exe'>
|
||||
<img src='./docs/static/img/windows.png' style="height:14px; width: 14px" />
|
||||
<b>jan.exe</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://github.com/janhq/jan/releases/download/v0.4.5/jan-mac-x64-0.4.5.dmg'>
|
||||
<a href='https://github.com/janhq/jan/releases/download/v0.4.6/jan-mac-x64-0.4.6.dmg'>
|
||||
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
|
||||
<b>Intel</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://github.com/janhq/jan/releases/download/v0.4.5/jan-mac-arm64-0.4.5.dmg'>
|
||||
<a href='https://github.com/janhq/jan/releases/download/v0.4.6/jan-mac-arm64-0.4.6.dmg'>
|
||||
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
|
||||
<b>M1/M2</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://github.com/janhq/jan/releases/download/v0.4.5/jan-linux-amd64-0.4.5.deb'>
|
||||
<a href='https://github.com/janhq/jan/releases/download/v0.4.6/jan-linux-amd64-0.4.6.deb'>
|
||||
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<b>jan.deb</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://github.com/janhq/jan/releases/download/v0.4.5/jan-linux-x86_64-0.4.5.AppImage'>
|
||||
<a href='https://github.com/janhq/jan/releases/download/v0.4.6/jan-linux-x86_64-0.4.6.AppImage'>
|
||||
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<b>jan.AppImage</b>
|
||||
</a>
|
||||
@ -76,31 +76,31 @@ Jan is an open-source ChatGPT alternative that runs 100% offline on your compute
|
||||
<tr style="text-align:center">
|
||||
<td style="text-align:center"><b>Experimental (Nightly Build)</b></td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://delta.jan.ai/latest/jan-win-x64-0.4.5-260.exe'>
|
||||
<a href='https://delta.jan.ai/latest/jan-win-x64-0.4.6-264.exe'>
|
||||
<img src='./docs/static/img/windows.png' style="height:14px; width: 14px" />
|
||||
<b>jan.exe</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://delta.jan.ai/latest/jan-mac-x64-0.4.5-260.dmg'>
|
||||
<a href='https://delta.jan.ai/latest/jan-mac-x64-0.4.6-264.dmg'>
|
||||
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
|
||||
<b>Intel</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://delta.jan.ai/latest/jan-mac-arm64-0.4.5-260.dmg'>
|
||||
<a href='https://delta.jan.ai/latest/jan-mac-arm64-0.4.6-264.dmg'>
|
||||
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
|
||||
<b>M1/M2</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://delta.jan.ai/latest/jan-linux-amd64-0.4.5-260.deb'>
|
||||
<a href='https://delta.jan.ai/latest/jan-linux-amd64-0.4.6-264.deb'>
|
||||
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<b>jan.deb</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://delta.jan.ai/latest/jan-linux-x86_64-0.4.5-260.AppImage'>
|
||||
<a href='https://delta.jan.ai/latest/jan-linux-x86_64-0.4.6-264.AppImage'>
|
||||
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<b>jan.AppImage</b>
|
||||
</a>
|
||||
@ -218,6 +218,76 @@ make build
|
||||
|
||||
This will build the app MacOS m1/m2 for production (with code signing already done) and put the result in `dist` folder.
|
||||
|
||||
### Docker mode
|
||||
|
||||
- Supported OS: Linux, WSL2 Docker
|
||||
- Pre-requisites:
|
||||
- `docker` and `docker compose`, follow instruction [here](https://docs.docker.com/engine/install/ubuntu/)
|
||||
|
||||
```bash
|
||||
curl -fsSL https://get.docker.com -o get-docker.sh
|
||||
sudo sh ./get-docker.sh --dry-run
|
||||
```
|
||||
|
||||
- `nvidia-driver` and `nvidia-docker2`, follow instruction [here](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) (If you want to run with GPU mode)
|
||||
|
||||
- Run Jan in Docker mode
|
||||
|
||||
- **Option 1**: Run Jan in CPU mode
|
||||
|
||||
```bash
|
||||
docker compose --profile cpu up -d
|
||||
```
|
||||
|
||||
- **Option 2**: Run Jan in GPU mode
|
||||
|
||||
- **Step 1**: Check cuda compatibility with your nvidia driver by running `nvidia-smi` and check the cuda version in the output
|
||||
|
||||
```bash
|
||||
nvidia-smi
|
||||
|
||||
# Output
|
||||
+---------------------------------------------------------------------------------------+
|
||||
| NVIDIA-SMI 531.18 Driver Version: 531.18 CUDA Version: 12.1 |
|
||||
|-----------------------------------------+----------------------+----------------------+
|
||||
| GPU Name TCC/WDDM | Bus-Id Disp.A | Volatile Uncorr. ECC |
|
||||
| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
|
||||
| | | MIG M. |
|
||||
|=========================================+======================+======================|
|
||||
| 0 NVIDIA GeForce RTX 4070 Ti WDDM | 00000000:01:00.0 On | N/A |
|
||||
| 0% 44C P8 16W / 285W| 1481MiB / 12282MiB | 2% Default |
|
||||
| | | N/A |
|
||||
+-----------------------------------------+----------------------+----------------------+
|
||||
| 1 NVIDIA GeForce GTX 1660 Ti WDDM | 00000000:02:00.0 Off | N/A |
|
||||
| 0% 49C P8 14W / 120W| 0MiB / 6144MiB | 0% Default |
|
||||
| | | N/A |
|
||||
+-----------------------------------------+----------------------+----------------------+
|
||||
| 2 NVIDIA GeForce GTX 1660 Ti WDDM | 00000000:05:00.0 Off | N/A |
|
||||
| 29% 38C P8 11W / 120W| 0MiB / 6144MiB | 0% Default |
|
||||
| | | N/A |
|
||||
+-----------------------------------------+----------------------+----------------------+
|
||||
|
||||
+---------------------------------------------------------------------------------------+
|
||||
| Processes: |
|
||||
| GPU GI CI PID Type Process name GPU Memory |
|
||||
| ID ID Usage |
|
||||
|=======================================================================================|
|
||||
```
|
||||
|
||||
- **Step 2**: Go to https://catalog.ngc.nvidia.com/orgs/nvidia/containers/cuda/tags and find the smallest minor version of image tag that matches the cuda version from the output of `nvidia-smi` (e.g. 12.1 -> 12.1.0)
|
||||
|
||||
- **Step 3**: Update the `Dockerfile.gpu` line number 5 with the latest minor version of the image tag from step 2 (e.g. change `FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04 AS base` to `FROM nvidia/cuda:12.1.0-runtime-ubuntu22.04 AS base`)
|
||||
|
||||
- **Step 4**: Run command to start Jan in GPU mode
|
||||
|
||||
```bash
|
||||
# GPU mode
|
||||
docker compose --profile gpu up -d
|
||||
```
|
||||
|
||||
This will start the web server and you can access Jan at `http://localhost:3000`.
|
||||
> Note: Currently, Docker mode is only work for development and localhost, production is not supported yet. RAG feature is not supported in Docker mode yet.
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
Jan builds on top of other open-source projects:
|
||||
|
||||
@ -57,6 +57,7 @@
|
||||
"rollup-plugin-typescript2": "^0.36.0",
|
||||
"ts-jest": "^26.1.1",
|
||||
"tslib": "^2.6.2",
|
||||
"typescript": "^5.2.2"
|
||||
"typescript": "^5.2.2",
|
||||
"rimraf": "^3.0.2"
|
||||
}
|
||||
}
|
||||
|
||||
@ -30,6 +30,7 @@ export enum DownloadRoute {
|
||||
downloadFile = 'downloadFile',
|
||||
pauseDownload = 'pauseDownload',
|
||||
resumeDownload = 'resumeDownload',
|
||||
getDownloadProgress = 'getDownloadProgress',
|
||||
}
|
||||
|
||||
export enum DownloadEvent {
|
||||
|
||||
@ -12,6 +12,8 @@ import {
|
||||
import { JanApiRouteConfiguration } from '../common/configuration'
|
||||
import { startModel, stopModel } from '../common/startStopModel'
|
||||
import { ModelSettingParams } from '../../../types'
|
||||
import { getJanDataFolderPath } from '../../utils'
|
||||
import { normalizeFilePath } from '../../path'
|
||||
|
||||
export const commonRouter = async (app: HttpServer) => {
|
||||
// Common Routes
|
||||
@ -52,7 +54,14 @@ export const commonRouter = async (app: HttpServer) => {
|
||||
// App Routes
|
||||
app.post(`/app/${AppRoute.joinPath}`, async (request: any, reply: any) => {
|
||||
const args = JSON.parse(request.body) as any[]
|
||||
reply.send(JSON.stringify(join(...args[0])))
|
||||
|
||||
const paths = args[0].map((arg: string) =>
|
||||
typeof arg === 'string' && (arg.startsWith(`file:/`) || arg.startsWith(`file:\\`))
|
||||
? join(getJanDataFolderPath(), normalizeFilePath(arg))
|
||||
: arg
|
||||
)
|
||||
|
||||
reply.send(JSON.stringify(join(...paths)))
|
||||
})
|
||||
|
||||
app.post(`/app/${AppRoute.baseName}`, async (request: any, reply: any) => {
|
||||
|
||||
@ -4,55 +4,109 @@ import { DownloadManager } from '../../download'
|
||||
import { HttpServer } from '../HttpServer'
|
||||
import { createWriteStream } from 'fs'
|
||||
import { getJanDataFolderPath } from '../../utils'
|
||||
import { normalizeFilePath } from "../../path";
|
||||
import { normalizeFilePath } from '../../path'
|
||||
import { DownloadState } from '../../../types'
|
||||
|
||||
export const downloadRouter = async (app: HttpServer) => {
|
||||
app.post(`/${DownloadRoute.downloadFile}`, async (req, res) => {
|
||||
const strictSSL = !(req.query.ignoreSSL === "true");
|
||||
const proxy = req.query.proxy?.startsWith("http") ? req.query.proxy : undefined;
|
||||
const body = JSON.parse(req.body as any);
|
||||
const normalizedArgs = body.map((arg: any) => {
|
||||
if (typeof arg === "string") {
|
||||
return join(getJanDataFolderPath(), normalizeFilePath(arg));
|
||||
app.get(`/${DownloadRoute.getDownloadProgress}/:modelId`, async (req, res) => {
|
||||
const modelId = req.params.modelId
|
||||
|
||||
console.debug(`Getting download progress for model ${modelId}`)
|
||||
console.debug(
|
||||
`All Download progress: ${JSON.stringify(DownloadManager.instance.downloadProgressMap)}`
|
||||
)
|
||||
|
||||
// check if null DownloadManager.instance.downloadProgressMap
|
||||
if (!DownloadManager.instance.downloadProgressMap[modelId]) {
|
||||
return res.status(404).send({
|
||||
message: 'Download progress not found',
|
||||
})
|
||||
} else {
|
||||
return res.status(200).send(DownloadManager.instance.downloadProgressMap[modelId])
|
||||
}
|
||||
return arg;
|
||||
});
|
||||
})
|
||||
|
||||
const localPath = normalizedArgs[1];
|
||||
const fileName = localPath.split("/").pop() ?? "";
|
||||
app.post(`/${DownloadRoute.downloadFile}`, async (req, res) => {
|
||||
const strictSSL = !(req.query.ignoreSSL === 'true')
|
||||
const proxy = req.query.proxy?.startsWith('http') ? req.query.proxy : undefined
|
||||
const body = JSON.parse(req.body as any)
|
||||
const normalizedArgs = body.map((arg: any) => {
|
||||
if (typeof arg === 'string' && arg.startsWith('file:')) {
|
||||
return join(getJanDataFolderPath(), normalizeFilePath(arg))
|
||||
}
|
||||
return arg
|
||||
})
|
||||
|
||||
const request = require("request");
|
||||
const progress = require("request-progress");
|
||||
const localPath = normalizedArgs[1]
|
||||
const array = localPath.split('/')
|
||||
const fileName = array.pop() ?? ''
|
||||
const modelId = array.pop() ?? ''
|
||||
console.debug('downloadFile', normalizedArgs, fileName, modelId)
|
||||
|
||||
const rq = request({ url: normalizedArgs[0], strictSSL, proxy });
|
||||
const request = require('request')
|
||||
const progress = require('request-progress')
|
||||
|
||||
const rq = request({ url: normalizedArgs[0], strictSSL, proxy })
|
||||
progress(rq, {})
|
||||
.on("progress", function (state: any) {
|
||||
console.log("download onProgress", state);
|
||||
.on('progress', function (state: any) {
|
||||
const downloadProps: DownloadState = {
|
||||
...state,
|
||||
modelId,
|
||||
fileName,
|
||||
downloadState: 'downloading',
|
||||
}
|
||||
console.debug(`Download ${modelId} onProgress`, downloadProps)
|
||||
DownloadManager.instance.downloadProgressMap[modelId] = downloadProps
|
||||
})
|
||||
.on("error", function (err: Error) {
|
||||
console.log("download onError", err);
|
||||
})
|
||||
.on("end", function () {
|
||||
console.log("download onEnd");
|
||||
})
|
||||
.pipe(createWriteStream(normalizedArgs[1]));
|
||||
.on('error', function (err: Error) {
|
||||
console.debug(`Download ${modelId} onError`, err.message)
|
||||
|
||||
DownloadManager.instance.setRequest(fileName, rq);
|
||||
});
|
||||
const currentDownloadState = DownloadManager.instance.downloadProgressMap[modelId]
|
||||
if (currentDownloadState) {
|
||||
DownloadManager.instance.downloadProgressMap[modelId] = {
|
||||
...currentDownloadState,
|
||||
downloadState: 'error',
|
||||
}
|
||||
}
|
||||
})
|
||||
.on('end', function () {
|
||||
console.debug(`Download ${modelId} onEnd`)
|
||||
|
||||
const currentDownloadState = DownloadManager.instance.downloadProgressMap[modelId]
|
||||
if (currentDownloadState) {
|
||||
if (currentDownloadState.downloadState === 'downloading') {
|
||||
// if the previous state is downloading, then set the state to end (success)
|
||||
DownloadManager.instance.downloadProgressMap[modelId] = {
|
||||
...currentDownloadState,
|
||||
downloadState: 'end',
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.pipe(createWriteStream(normalizedArgs[1]))
|
||||
|
||||
DownloadManager.instance.setRequest(localPath, rq)
|
||||
res.status(200).send({ message: 'Download started' })
|
||||
})
|
||||
|
||||
app.post(`/${DownloadRoute.abortDownload}`, async (req, res) => {
|
||||
const body = JSON.parse(req.body as any);
|
||||
const body = JSON.parse(req.body as any)
|
||||
const normalizedArgs = body.map((arg: any) => {
|
||||
if (typeof arg === "string") {
|
||||
return join(getJanDataFolderPath(), normalizeFilePath(arg));
|
||||
if (typeof arg === 'string' && arg.startsWith('file:')) {
|
||||
return join(getJanDataFolderPath(), normalizeFilePath(arg))
|
||||
}
|
||||
return arg;
|
||||
});
|
||||
return arg
|
||||
})
|
||||
|
||||
const localPath = normalizedArgs[0];
|
||||
const fileName = localPath.split("/").pop() ?? "";
|
||||
const rq = DownloadManager.instance.networkRequests[fileName];
|
||||
DownloadManager.instance.networkRequests[fileName] = undefined;
|
||||
rq?.abort();
|
||||
});
|
||||
};
|
||||
const localPath = normalizedArgs[0]
|
||||
const fileName = localPath.split('/').pop() ?? ''
|
||||
const rq = DownloadManager.instance.networkRequests[fileName]
|
||||
DownloadManager.instance.networkRequests[fileName] = undefined
|
||||
rq?.abort()
|
||||
if (rq) {
|
||||
res.status(200).send({ message: 'Download aborted' })
|
||||
} else {
|
||||
res.status(404).send({ message: 'Download not found' })
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@ -1,14 +1,29 @@
|
||||
import { FileManagerRoute } from '../../../api'
|
||||
import { HttpServer } from '../../index'
|
||||
import { join } from 'path'
|
||||
|
||||
export const fsRouter = async (app: HttpServer) => {
|
||||
app.post(`/app/${FileManagerRoute.syncFile}`, async (request: any, reply: any) => {})
|
||||
export const fileManagerRouter = async (app: HttpServer) => {
|
||||
app.post(`/fs/${FileManagerRoute.syncFile}`, async (request: any, reply: any) => {
|
||||
const reflect = require('@alumna/reflect')
|
||||
const args = JSON.parse(request.body)
|
||||
return reflect({
|
||||
src: args[0],
|
||||
dest: args[1],
|
||||
recursive: true,
|
||||
delete: false,
|
||||
overwrite: true,
|
||||
errorOnExist: false,
|
||||
})
|
||||
})
|
||||
|
||||
app.post(`/app/${FileManagerRoute.getJanDataFolderPath}`, async (request: any, reply: any) => {})
|
||||
app.post(`/fs/${FileManagerRoute.getJanDataFolderPath}`, async (request: any, reply: any) =>
|
||||
global.core.appPath()
|
||||
)
|
||||
|
||||
app.post(`/app/${FileManagerRoute.getResourcePath}`, async (request: any, reply: any) => {})
|
||||
app.post(`/fs/${FileManagerRoute.getResourcePath}`, async (request: any, reply: any) =>
|
||||
join(global.core.appPath(), '../../..')
|
||||
)
|
||||
|
||||
app.post(`/app/${FileManagerRoute.getUserHomePath}`, async (request: any, reply: any) => {})
|
||||
|
||||
app.post(`/app/${FileManagerRoute.fileStat}`, async (request: any, reply: any) => {})
|
||||
app.post(`/fs/${FileManagerRoute.fileStat}`, async (request: any, reply: any) => {})
|
||||
}
|
||||
|
||||
@ -1,8 +1,9 @@
|
||||
import { FileSystemRoute } from '../../../api'
|
||||
import { FileManagerRoute, FileSystemRoute } from '../../../api'
|
||||
import { join } from 'path'
|
||||
import { HttpServer } from '../HttpServer'
|
||||
import { getJanDataFolderPath } from '../../utils'
|
||||
import { normalizeFilePath } from '../../path'
|
||||
import { writeFileSync } from 'fs'
|
||||
|
||||
export const fsRouter = async (app: HttpServer) => {
|
||||
const moduleName = 'fs'
|
||||
@ -26,4 +27,14 @@ export const fsRouter = async (app: HttpServer) => {
|
||||
}
|
||||
})
|
||||
})
|
||||
app.post(`/${FileManagerRoute.writeBlob}`, async (request: any, reply: any) => {
|
||||
try {
|
||||
const args = JSON.parse(request.body) as any[]
|
||||
console.log('writeBlob:', args[0])
|
||||
const dataBuffer = Buffer.from(args[1], 'base64')
|
||||
writeFileSync(args[0], dataBuffer)
|
||||
} catch (err) {
|
||||
console.error(`writeFile ${request.body} result: ${err}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@ -4,6 +4,7 @@ import { threadRouter } from './thread'
|
||||
import { fsRouter } from './fs'
|
||||
import { extensionRouter } from './extension'
|
||||
import { downloadRouter } from './download'
|
||||
import { fileManagerRouter } from './fileManager'
|
||||
|
||||
export const v1Router = async (app: HttpServer) => {
|
||||
// MARK: External Routes
|
||||
@ -16,6 +17,8 @@ export const v1Router = async (app: HttpServer) => {
|
||||
app.register(fsRouter, {
|
||||
prefix: '/fs',
|
||||
})
|
||||
app.register(fileManagerRouter)
|
||||
|
||||
app.register(extensionRouter, {
|
||||
prefix: '/extension',
|
||||
})
|
||||
|
||||
@ -1,15 +1,18 @@
|
||||
import { DownloadState } from '../types'
|
||||
|
||||
/**
|
||||
* Manages file downloads and network requests.
|
||||
*/
|
||||
export class DownloadManager {
|
||||
public networkRequests: Record<string, any> = {};
|
||||
public networkRequests: Record<string, any> = {}
|
||||
|
||||
public static instance: DownloadManager = new DownloadManager();
|
||||
public static instance: DownloadManager = new DownloadManager()
|
||||
|
||||
public downloadProgressMap: Record<string, DownloadState> = {}
|
||||
|
||||
constructor() {
|
||||
if (DownloadManager.instance) {
|
||||
return DownloadManager.instance;
|
||||
return DownloadManager.instance
|
||||
}
|
||||
}
|
||||
/**
|
||||
@ -18,6 +21,6 @@ export class DownloadManager {
|
||||
* @param {Request | undefined} request - The network request to set, or undefined to clear the request.
|
||||
*/
|
||||
setRequest(fileName: string, request: any | undefined) {
|
||||
this.networkRequests[fileName] = request;
|
||||
this.networkRequests[fileName] = request
|
||||
}
|
||||
}
|
||||
|
||||
@ -41,8 +41,8 @@ async function registerExtensionProtocol() {
|
||||
console.error('Electron is not available')
|
||||
}
|
||||
const extensionPath = ExtensionManager.instance.getExtensionsPath()
|
||||
if (electron) {
|
||||
return electron.protocol.registerFileProtocol('extension', (request: any, callback: any) => {
|
||||
if (electron && electron.protocol) {
|
||||
return electron.protocol?.registerFileProtocol('extension', (request: any, callback: any) => {
|
||||
const entry = request.url.substr('extension://'.length - 1)
|
||||
|
||||
const url = normalize(extensionPath + entry)
|
||||
@ -69,7 +69,7 @@ export function useExtensions(extensionsPath: string) {
|
||||
|
||||
// Read extension list from extensions folder
|
||||
const extensions = JSON.parse(
|
||||
readFileSync(ExtensionManager.instance.getExtensionsFile(), 'utf-8'),
|
||||
readFileSync(ExtensionManager.instance.getExtensionsFile(), 'utf-8')
|
||||
)
|
||||
try {
|
||||
// Create and store a Extension instance for each extension in list
|
||||
@ -82,7 +82,7 @@ export function useExtensions(extensionsPath: string) {
|
||||
throw new Error(
|
||||
'Could not successfully rebuild list of installed extensions.\n' +
|
||||
error +
|
||||
'\nPlease check the extensions.json file in the extensions folder.',
|
||||
'\nPlease check the extensions.json file in the extensions folder.'
|
||||
)
|
||||
}
|
||||
|
||||
@ -122,7 +122,7 @@ function loadExtension(ext: any) {
|
||||
export function getStore() {
|
||||
if (!ExtensionManager.instance.getExtensionsFile()) {
|
||||
throw new Error(
|
||||
'The extension path has not yet been set up. Please run useExtensions before accessing the store',
|
||||
'The extension path has not yet been set up. Please run useExtensions before accessing the store'
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
8
core/src/types/assistant/assistantEvent.ts
Normal file
8
core/src/types/assistant/assistantEvent.ts
Normal file
@ -0,0 +1,8 @@
|
||||
/**
|
||||
* The `EventName` enumeration contains the names of all the available events in the Jan platform.
|
||||
*/
|
||||
export enum AssistantEvent {
|
||||
/** The `OnAssistantsUpdate` event is emitted when the assistant list is updated. */
|
||||
OnAssistantsUpdate = 'OnAssistantsUpdate',
|
||||
}
|
||||
|
||||
@ -1,2 +1,3 @@
|
||||
export * from './assistantEntity'
|
||||
export * from './assistantEvent'
|
||||
export * from './assistantInterface'
|
||||
|
||||
@ -2,3 +2,26 @@ export type FileStat = {
|
||||
isDirectory: boolean
|
||||
size: number
|
||||
}
|
||||
|
||||
export type DownloadState = {
|
||||
modelId: string
|
||||
filename: string
|
||||
time: DownloadTime
|
||||
speed: number
|
||||
percent: number
|
||||
|
||||
size: DownloadSize
|
||||
children?: DownloadState[]
|
||||
error?: string
|
||||
downloadState: 'downloading' | 'error' | 'end'
|
||||
}
|
||||
|
||||
type DownloadTime = {
|
||||
elapsed: number
|
||||
remaining: number
|
||||
}
|
||||
|
||||
type DownloadSize = {
|
||||
total: number
|
||||
transferred: number
|
||||
}
|
||||
|
||||
@ -12,4 +12,6 @@ export enum ModelEvent {
|
||||
OnModelStop = 'OnModelStop',
|
||||
/** The `OnModelStopped` event is emitted when a model stopped ok. */
|
||||
OnModelStopped = 'OnModelStopped',
|
||||
/** The `OnModelUpdate` event is emitted when the model list is updated. */
|
||||
OnModelsUpdate = 'OnModelsUpdate',
|
||||
}
|
||||
|
||||
117
docker-compose.yml
Normal file
117
docker-compose.yml
Normal file
@ -0,0 +1,117 @@
|
||||
# Docker Compose file for setting up Minio, createbuckets, app_cpu, and app_gpu services
|
||||
|
||||
version: '3.7'
|
||||
|
||||
services:
|
||||
# Minio service for object storage
|
||||
minio:
|
||||
image: minio/minio
|
||||
volumes:
|
||||
- minio_data:/data
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "9001:9001"
|
||||
environment:
|
||||
# Set the root user and password for Minio
|
||||
MINIO_ROOT_USER: minioadmin # This acts as AWS_ACCESS_KEY
|
||||
MINIO_ROOT_PASSWORD: minioadmin # This acts as AWS_SECRET_ACCESS_KEY
|
||||
command: server --console-address ":9001" /data
|
||||
restart: always
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||
interval: 30s
|
||||
timeout: 20s
|
||||
retries: 3
|
||||
networks:
|
||||
vpcbr:
|
||||
ipv4_address: 10.5.0.2
|
||||
|
||||
# createbuckets service to create a bucket and set its policy
|
||||
createbuckets:
|
||||
image: minio/mc
|
||||
depends_on:
|
||||
- minio
|
||||
entrypoint: >
|
||||
/bin/sh -c "
|
||||
/usr/bin/mc alias set myminio http://minio:9000 minioadmin minioadmin;
|
||||
/usr/bin/mc mb myminio/mybucket;
|
||||
/usr/bin/mc policy set public myminio/mybucket;
|
||||
exit 0;
|
||||
"
|
||||
networks:
|
||||
vpcbr:
|
||||
|
||||
# app_cpu service for running the CPU version of the application
|
||||
app_cpu:
|
||||
image: jan:latest
|
||||
volumes:
|
||||
- app_data:/app/server/build/jan
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
environment:
|
||||
# Set the AWS access key, secret access key, bucket name, endpoint, and region for app_cpu
|
||||
AWS_ACCESS_KEY_ID: minioadmin
|
||||
AWS_SECRET_ACCESS_KEY: minioadmin
|
||||
S3_BUCKET_NAME: mybucket
|
||||
AWS_ENDPOINT: http://10.5.0.2:9000
|
||||
AWS_REGION: us-east-1
|
||||
restart: always
|
||||
profiles:
|
||||
- cpu
|
||||
ports:
|
||||
- "3000:3000"
|
||||
- "1337:1337"
|
||||
- "3928:3928"
|
||||
networks:
|
||||
vpcbr:
|
||||
ipv4_address: 10.5.0.3
|
||||
|
||||
# app_gpu service for running the GPU version of the application
|
||||
app_gpu:
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
count: all
|
||||
capabilities: [gpu]
|
||||
image: jan-gpu:latest
|
||||
volumes:
|
||||
- app_data:/app/server/build/jan
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.gpu
|
||||
restart: always
|
||||
environment:
|
||||
# Set the AWS access key, secret access key, bucket name, endpoint, and region for app_gpu
|
||||
AWS_ACCESS_KEY_ID: minioadmin
|
||||
AWS_SECRET_ACCESS_KEY: minioadmin
|
||||
S3_BUCKET_NAME: mybucket
|
||||
AWS_ENDPOINT: http://10.5.0.2:9000
|
||||
AWS_REGION: us-east-1
|
||||
profiles:
|
||||
- gpu
|
||||
ports:
|
||||
- "3000:3000"
|
||||
- "1337:1337"
|
||||
- "3928:3928"
|
||||
networks:
|
||||
vpcbr:
|
||||
ipv4_address: 10.5.0.4
|
||||
|
||||
volumes:
|
||||
minio_data:
|
||||
app_data:
|
||||
|
||||
networks:
|
||||
vpcbr:
|
||||
driver: bridge
|
||||
ipam:
|
||||
config:
|
||||
- subnet: 10.5.0.0/16
|
||||
gateway: 10.5.0.1
|
||||
|
||||
# Usage:
|
||||
# - Run 'docker-compose --profile cpu up -d' to start the app_cpu service
|
||||
# - Run 'docker-compose --profile gpu up -d' to start the app_gpu service
|
||||
@ -4,3 +4,60 @@ dan-jan:
|
||||
url: https://github.com/dan-jan
|
||||
image_url: https://avatars.githubusercontent.com/u/101145494?v=4
|
||||
email: daniel@jan.ai
|
||||
|
||||
namchuai:
|
||||
name: Nam Nguyen
|
||||
title: Developer
|
||||
url: https://github.com/namchuai
|
||||
image_url: https://avatars.githubusercontent.com/u/10397206?v=4
|
||||
email: james@jan.ai
|
||||
|
||||
hiro-v:
|
||||
name: Hiro Vuong
|
||||
title: MLE
|
||||
url: https://github.com/hiro-v
|
||||
image_url: https://avatars.githubusercontent.com/u/22463238?v=4
|
||||
email: hiro@jan.ai
|
||||
|
||||
ashley-jan:
|
||||
name: Ashley Tran
|
||||
title: Product Designer
|
||||
url: https://github.com/imtuyethan
|
||||
image_url: https://avatars.githubusercontent.com/u/89722390?v=4
|
||||
email: ashley@jan.ai
|
||||
|
||||
hientominh:
|
||||
name: Hien To
|
||||
title: DevOps Engineer
|
||||
url: https://github.com/hientominh
|
||||
image_url: https://avatars.githubusercontent.com/u/37921427?v=4
|
||||
email: hien@jan.ai
|
||||
|
||||
Van-QA:
|
||||
name: Van Pham
|
||||
title: QA & Release Manager
|
||||
url: https://github.com/Van-QA
|
||||
image_url: https://avatars.githubusercontent.com/u/64197333?v=4
|
||||
email: van@jan.ai
|
||||
|
||||
louis-jan:
|
||||
name: Louis Le
|
||||
title: Software Engineer
|
||||
url: https://github.com/louis-jan
|
||||
image_url: https://avatars.githubusercontent.com/u/133622055?v=4
|
||||
email: louis@jan.ai
|
||||
|
||||
hahuyhoang411:
|
||||
name: Rex Ha
|
||||
title: LLM Researcher & Content Writer
|
||||
url: https://github.com/hahuyhoang411
|
||||
image_url: https://avatars.githubusercontent.com/u/64120343?v=4
|
||||
email: rex@jan.ai
|
||||
|
||||
automaticcat:
|
||||
name: Alan Dao
|
||||
title: AI Engineer
|
||||
url: https://github.com/tikikun
|
||||
image_url: https://avatars.githubusercontent.com/u/22268502?v=4
|
||||
email: alan@jan.ai
|
||||
|
||||
|
||||
@ -110,9 +110,10 @@ Adhering to Jan's privacy preserving philosophy, our analytics philosophy is to
|
||||
|
||||
#### What is tracked
|
||||
|
||||
1. By default, Github tracks downloads and device metadata for all public Github repos. This helps us troubleshoot & ensure cross platform support.
|
||||
1. We use Posthog to track a single `app.opened` event without additional user metadata, in order to understand retention.
|
||||
1. Additionally, we plan to enable a `Settings` feature for users to turn off all tracking.
|
||||
1. By default, Github tracks downloads and device metadata for all public GitHub repositories. This helps us troubleshoot & ensure cross-platform support.
|
||||
2. We use [Umami](https://umami.is/) to collect, analyze, and understand application data while maintaining visitor privacy and data ownership. We are using the Umami Cloud in Europe to ensure GDPR compliance. Please see [Umami Privacy Policy](https://umami.is/privacy) for more details.
|
||||
3. We use Umami to track a single `app.opened` event without additional user metadata, in order to understand retention. In addition, we track `app.event` to understand app version usage.
|
||||
4. Additionally, we plan to enable a `Settings` feature for users to turn off all tracking.
|
||||
|
||||
#### Request for help
|
||||
|
||||
|
||||
@ -0,0 +1,79 @@
|
||||
---
|
||||
title: Installation and Prerequisites
|
||||
slug: /developer/prereq
|
||||
description: Guide to install and setup Jan for development.
|
||||
keywords:
|
||||
[
|
||||
Jan AI,
|
||||
Jan,
|
||||
ChatGPT alternative,
|
||||
local AI,
|
||||
private AI,
|
||||
conversational AI,
|
||||
no-subscription fee,
|
||||
large language model,
|
||||
installation,
|
||||
prerequisites,
|
||||
developer setup,
|
||||
]
|
||||
---
|
||||
|
||||
## Requirements
|
||||
|
||||
### Hardware Requirements
|
||||
|
||||
Ensure your system meets the following specifications to guarantee a smooth development experience:
|
||||
|
||||
- [Hardware Requirements](../../guides/02-installation/06-hardware.md)
|
||||
|
||||
### System Requirements
|
||||
|
||||
Make sure your operating system meets the specific requirements for Jan development:
|
||||
|
||||
- [Windows](../../install/windows/#system-requirements)
|
||||
- [MacOS](../../install/mac/#system-requirements)
|
||||
- [Linux](../../install/linux/#system-requirements)
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [Node.js](https://nodejs.org/en/) (version 20.0.0 or higher)
|
||||
- [yarn](https://yarnpkg.com/) (version 1.22.0 or higher)
|
||||
- [make](https://www.gnu.org/software/make/) (version 3.81 or higher)
|
||||
|
||||
## Instructions
|
||||
|
||||
1. **Clone the Repository:**
|
||||
|
||||
```bash
|
||||
git clone https://github.com/janhq/jan
|
||||
cd jan
|
||||
git checkout -b DESIRED_BRANCH
|
||||
```
|
||||
|
||||
2. **Install Dependencies**
|
||||
|
||||
```bash
|
||||
yarn install
|
||||
```
|
||||
|
||||
3. **Run Development and Use Jan Desktop**
|
||||
|
||||
```bash
|
||||
make dev
|
||||
```
|
||||
|
||||
This command starts the development server and opens the Jan Desktop app.
|
||||
|
||||
## For Production Build
|
||||
|
||||
```bash
|
||||
# Do steps 1 and 2 in the previous section
|
||||
# Build the app
|
||||
make build
|
||||
```
|
||||
|
||||
This will build the app MacOS (M1/M2/M3) for production (with code signing already done) and place the result in `/electron/dist` folder.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you run into any issues due to a broken build, please check the [Stuck on a Broken Build](../../troubleshooting/stuck-on-broken-build) guide.
|
||||
@ -12,11 +12,16 @@ keywords:
|
||||
conversational AI,
|
||||
no-subscription fee,
|
||||
large language model,
|
||||
installation guide,
|
||||
]
|
||||
---
|
||||
|
||||
# Installing Jan on MacOS
|
||||
|
||||
## System Requirements
|
||||
|
||||
Ensure that your MacOS version is 13 or higher to run Jan.
|
||||
|
||||
## Installation
|
||||
|
||||
Jan is available for download via our homepage, [https://jan.ai/](https://jan.ai/).
|
||||
|
||||
@ -12,11 +12,23 @@ keywords:
|
||||
conversational AI,
|
||||
no-subscription fee,
|
||||
large language model,
|
||||
installation guide,
|
||||
]
|
||||
---
|
||||
|
||||
# Installing Jan on Windows
|
||||
|
||||
## System Requirements
|
||||
|
||||
Ensure that your system meets the following requirements:
|
||||
|
||||
- Windows 10 or higher is required to run Jan.
|
||||
|
||||
To enable GPU support, you will need:
|
||||
|
||||
- NVIDIA GPU with CUDA Toolkit 11.7 or higher
|
||||
- NVIDIA driver 470.63.01 or higher
|
||||
|
||||
## Installation
|
||||
|
||||
Jan is available for download via our homepage, [https://jan.ai](https://jan.ai/).
|
||||
@ -59,13 +71,3 @@ To remove all user data associated with Jan, you can delete the `/jan` directory
|
||||
cd C:\Users\%USERNAME%\AppData\Roaming
|
||||
rmdir /S jan
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Microsoft Defender
|
||||
|
||||
**Error: "Microsoft Defender SmartScreen prevented an unrecognized app from starting"**
|
||||
|
||||
Windows Defender may display the above warning when running the Jan Installer, as a standard security measure.
|
||||
|
||||
To proceed, select the "More info" option and select the "Run Anyway" option to continue with the installation.
|
||||
|
||||
@ -12,11 +12,24 @@ keywords:
|
||||
conversational AI,
|
||||
no-subscription fee,
|
||||
large language model,
|
||||
installation guide,
|
||||
]
|
||||
---
|
||||
|
||||
# Installing Jan on Linux
|
||||
|
||||
## System Requirements
|
||||
|
||||
Ensure that your system meets the following requirements:
|
||||
|
||||
- glibc 2.27 or higher (check with `ldd --version`)
|
||||
- gcc 11, g++ 11, cpp 11, or higher, refer to this [link](https://jan.ai/guides/troubleshooting/gpu-not-used/#specific-requirements-for-linux) for more information.
|
||||
|
||||
To enable GPU support, you will need:
|
||||
|
||||
- NVIDIA GPU with CUDA Toolkit 11.7 or higher
|
||||
- NVIDIA driver 470.63.01 or higher
|
||||
|
||||
## Installation
|
||||
|
||||
Jan is available for download via our homepage, [https://jan.ai](https://jan.ai/).
|
||||
@ -66,7 +79,6 @@ jan-linux-amd64-{version}.deb
|
||||
# AppImage
|
||||
jan-linux-x86_64-{version}.AppImage
|
||||
```
|
||||
```
|
||||
|
||||
## Uninstall Jan
|
||||
|
||||
|
||||
@ -65,6 +65,13 @@ Navigate to the `~/jan/models` folder. Create a folder named `gpt-3.5-turbo-16k`
|
||||
}
|
||||
```
|
||||
|
||||
:::tip
|
||||
|
||||
- You can find the list of available models in the [OpenAI Platform](https://platform.openai.com/docs/models/overview).
|
||||
- Please note that the `id` property need to match the model name in the list. For example, if you want to use the [GPT-4 Turbo](https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo), you need to set the `id` property as `gpt-4-1106-preview`.
|
||||
|
||||
:::
|
||||
|
||||
### 2. Configure OpenAI API Keys
|
||||
|
||||
You can find your API keys in the [OpenAI Platform](https://platform.openai.com/api-keys) and set the OpenAI API keys in `~/jan/engines/openai.json` file.
|
||||
|
||||
@ -45,7 +45,9 @@ This may occur due to several reasons. Please follow these steps to resolve it:
|
||||
|
||||
5. If you are on Nvidia GPUs, please download [Cuda](https://developer.nvidia.com/cuda-downloads).
|
||||
|
||||
6. When [checking app logs](https://jan.ai/troubleshooting/how-to-get-error-logs/), if you encounter the error log `Bind address failed at 127.0.0.1:3928`, it indicates that the port used by Nitro might already be in use. Use the following commands to check the port status:
|
||||
6. If you're using Linux, please ensure that your system meets the following requirements gcc 11, g++ 11, cpp 11, or higher, refer to this [link](https://jan.ai/guides/troubleshooting/gpu-not-used/#specific-requirements-for-linux) for more information.
|
||||
|
||||
7. When [checking app logs](https://jan.ai/troubleshooting/how-to-get-error-logs/), if you encounter the error log `Bind address failed at 127.0.0.1:3928`, it indicates that the port used by Nitro might already be in use. Use the following commands to check the port status:
|
||||
|
||||
<Tabs groupId="operating-systems">
|
||||
<TabItem value="mac" label="macOS">
|
||||
|
||||
@ -17,4 +17,8 @@ keywords:
|
||||
]
|
||||
---
|
||||
|
||||
1. You may receive an error response `Error occurred: Unexpected token '<', "<!DOCTYPE"...is not valid JSON`, when you start a chat with OpenAI models. Using a VPN may help fix the issue.
|
||||
You may receive an error response `Error occurred: Unexpected token '<', "<!DOCTYPE"...is not valid JSON`, when you start a chat with OpenAI models.
|
||||
|
||||
1. Check that you added an OpenAI API key. You can get an API key from OpenAI's [developer platform](https://platform.openai.com/). Alternatively, we recommend you download a local model from Jan Hub, which remains free to use and runs on your own computer!
|
||||
|
||||
2. Using a VPN may help fix the issue.
|
||||
|
||||
26
docs/docs/guides/08-troubleshooting/07-undefined-issue.mdx
Normal file
26
docs/docs/guides/08-troubleshooting/07-undefined-issue.mdx
Normal file
@ -0,0 +1,26 @@
|
||||
---
|
||||
title: Undefined Issue
|
||||
slug: /troubleshooting/undefined-issue
|
||||
description: Undefined issue troubleshooting guide.
|
||||
keywords:
|
||||
[
|
||||
Jan AI,
|
||||
Jan,
|
||||
ChatGPT alternative,
|
||||
local AI,
|
||||
private AI,
|
||||
conversational AI,
|
||||
no-subscription fee,
|
||||
large language model,
|
||||
troubleshooting,
|
||||
undefined issue,
|
||||
]
|
||||
---
|
||||
|
||||
You may encounter an "undefined" issue when using Jan. Here are some troubleshooting steps to help you resolve the issue.
|
||||
|
||||
1. Try wiping the Jan folder and reopening the Jan app and see if the issue persists.
|
||||
2. If the issue persists, try to go `~/jan/extensions/@janhq/inference-nitro-extensions/dist/bin/<your-os>/nitro` and run the nitro manually and see if you get any error messages.
|
||||
3. Resolve the error messages you get from the nitro and see if the issue persists.
|
||||
4. Reopen the Jan app and see if the issue is resolved.
|
||||
5. If the issue persists, please share with us the [app logs](https://jan.ai/troubleshooting/how-to-get-error-logs/) via [Jan Discord](https://discord.gg/mY69SZaMaC).
|
||||
59
docs/docs/template/QA_script.md
vendored
59
docs/docs/template/QA_script.md
vendored
@ -1,6 +1,6 @@
|
||||
# [Release Version] QA Script
|
||||
|
||||
**Release Version:**
|
||||
**Release Version:** v0.4.6
|
||||
|
||||
**Operating System:**
|
||||
|
||||
@ -25,10 +25,10 @@
|
||||
|
||||
### 3. Users uninstall app
|
||||
|
||||
- [ ] :key: Check that the uninstallation process removes all components of the app from the system.
|
||||
- [ ] :key::warning: Check that the uninstallation process removes the app successfully from the system.
|
||||
- [ ] Clean the Jan root directory and open the app to check if it creates all the necessary folders, especially models and extensions.
|
||||
- [ ] When updating the app, check if the `/models` directory has any JSON files that change according to the update.
|
||||
- [ ] Verify if updating the app also updates extensions correctly (test functionality changes; support notifications for necessary tests with each version related to extensions update).
|
||||
- [ ] Verify if updating the app also updates extensions correctly (test functionality changes, support notifications for necessary tests with each version related to extensions update).
|
||||
|
||||
### 4. Users close app
|
||||
|
||||
@ -60,49 +60,45 @@
|
||||
- [ ] :key: Ensure that the conversation thread is maintained without any loss of data upon sending multiple messages.
|
||||
- [ ] Test for the ability to send different types of messages (e.g., text, emojis, code blocks).
|
||||
- [ ] :key: Validate the scroll functionality in the chat window for lengthy conversations.
|
||||
- [ ] Check if the user can renew responses multiple times.
|
||||
- [ ] Check if the user can copy the response.
|
||||
- [ ] Check if the user can delete responses.
|
||||
- [ ] :warning: Test if the user deletes the message midway, then the assistant stops that response.
|
||||
- [ ] :key: Check the `clear message` button works.
|
||||
- [ ] :key: Check the `delete entire chat` works.
|
||||
- [ ] :warning: Check if deleting all the chat retains the system prompt.
|
||||
- [ ] Check if deleting all the chat retains the system prompt.
|
||||
- [ ] Check the output format of the AI (code blocks, JSON, markdown, ...).
|
||||
- [ ] :key: Validate that there is appropriate error handling and messaging if the assistant fails to respond.
|
||||
- [ ] Test assistant's ability to maintain context over multiple exchanges.
|
||||
- [ ] :key: Check the `create new chat` button works correctly
|
||||
- [ ] Confirm that by changing `models` mid-thread the app can still handle it.
|
||||
- [ ] Check that by changing `instructions` mid-thread the app can still handle it.
|
||||
- [ ] Check the `regenerate` button renews the response.
|
||||
- [ ] Check the `Instructions` update correctly after the user updates it midway.
|
||||
- [ ] Check the `regenerate` button renews the response (single / multiple times).
|
||||
- [ ] Check the `Instructions` update correctly after the user updates it midway (mid-thread).
|
||||
|
||||
### 2. Users can customize chat settings like model parameters via both the GUI & thread.json
|
||||
|
||||
- [ ] :key: Confirm that the chat settings options are accessible via the GUI.
|
||||
- [ ] :key: Confirm that the Threads settings options are accessible.
|
||||
- [ ] Test the functionality to adjust model parameters (e.g., Temperature, Top K, Top P) from the GUI and verify they are reflected in the chat behavior.
|
||||
- [ ] :key: Ensure that changes can be saved and persisted between sessions.
|
||||
- [ ] Validate that users can access and modify the thread.json file.
|
||||
- [ ] :key: Check that changes made in thread.json are correctly applied to the chat session upon reload or restart.
|
||||
- [ ] Verify if there is a revert option to go back to previous settings after changes are made.
|
||||
- [ ] Test for user feedback or confirmation after saving changes to settings.
|
||||
- [ ] Check the maximum and minimum limits of the adjustable parameters and how they affect the assistant's responses.
|
||||
- [ ] :key: Validate user permissions for those who can change settings and persist them.
|
||||
- [ ] :key: Ensure that users switch between threads with different models, the app can handle it.
|
||||
|
||||
### 3. Users can click on a history thread
|
||||
### 3. Model dropdown
|
||||
- [ ] :key: Model list should highlight recommended based on user RAM
|
||||
- [ ] Model size should display (for both installed and imported models)
|
||||
|
||||
### 4. Users can click on a history thread
|
||||
- [ ] Test the ability to click on any thread in the history panel.
|
||||
- [ ] :key: Verify that clicking a thread brings up the past conversation in the main chat window.
|
||||
- [ ] :key: Ensure that the selected thread is highlighted or otherwise indicated in the history panel.
|
||||
- [ ] Confirm that the chat window displays the entire conversation from the selected history thread without any missing messages.
|
||||
- [ ] :key: Check the performance and accuracy of the history feature when dealing with a large number of threads.
|
||||
- [ ] Validate that historical threads reflect the exact state of the chat at that time, including settings.
|
||||
- [ ] :key: :warning: Test the search functionality within the history panel for quick navigation.
|
||||
- [ ] :key: Verify the ability to delete or clean old threads.
|
||||
- [ ] :key: Confirm that changing the title of the thread updates correctly.
|
||||
|
||||
### 4. Users can config instructions for the assistant.
|
||||
|
||||
### 5. Users can config instructions for the assistant.
|
||||
- [ ] Ensure there is a clear interface to input or change instructions for the assistant.
|
||||
- [ ] Test if the instructions set by the user are being followed by the assistant in subsequent conversations.
|
||||
- [ ] :key: Validate that changes to instructions are updated in real time and do not require a restart of the application or session.
|
||||
@ -112,6 +108,8 @@
|
||||
- [ ] Validate that instructions can be saved with descriptive names for easy retrieval.
|
||||
- [ ] :key: Check if the assistant can handle conflicting instructions and how it resolves them.
|
||||
- [ ] Ensure that instruction configurations are documented for user reference.
|
||||
- [ ] :key: RAG - Users can import documents and the system should process queries about the uploaded file, providing accurate and appropriate responses in the conversation thread.
|
||||
|
||||
|
||||
## D. Hub
|
||||
|
||||
@ -125,8 +123,7 @@
|
||||
|
||||
- [ ] Display the best model for their RAM at the top.
|
||||
- [ ] :key: Ensure that models are labeled with RAM requirements and compatibility.
|
||||
- [ ] :key: Validate that the download function is disabled for models that exceed the user's system capabilities.
|
||||
- [ ] Test that the platform provides alternative recommendations for models not suitable due to RAM limitations.
|
||||
- [ ] :warning: Test that the platform provides alternative recommendations for models not suitable due to RAM limitations.
|
||||
- [ ] :key: Check the download model functionality and validate if the cancel download feature works correctly.
|
||||
|
||||
### 3. Users can download models via a HuggingFace URL (coming soon)
|
||||
@ -139,7 +136,7 @@
|
||||
|
||||
- [ ] :key: Have clear instructions so users can do their own.
|
||||
- [ ] :key: Ensure the new model updates after restarting the app.
|
||||
- [ ] Ensure it raises clear errors for users to fix the problem while adding a new model.
|
||||
- [ ] :warning:Ensure it raises clear errors for users to fix the problem while adding a new model.
|
||||
|
||||
### 5. Users can use the model as they want
|
||||
|
||||
@ -149,9 +146,13 @@
|
||||
- [ ] Check if starting another model stops the other model entirely.
|
||||
- [ ] Check the `Explore models` navigate correctly to the model panel.
|
||||
- [ ] :key: Check when deleting a model it will delete all the files on the user's computer.
|
||||
- [ ] The recommended tags should present right for the user's hardware.
|
||||
- [ ] :warning:The recommended tags should present right for the user's hardware.
|
||||
- [ ] Assess that the descriptions of models are accurate and informative.
|
||||
|
||||
### 6. Users can Integrate With a Remote Server
|
||||
- [ ] :key: Import openAI GPT model https://jan.ai/guides/using-models/integrate-with-remote-server/ and the model displayed in Hub / Thread dropdown
|
||||
- [ ] Users can use the remote model properly
|
||||
|
||||
## E. System Monitor
|
||||
|
||||
### 1. Users can see disk and RAM utilization
|
||||
@ -181,7 +182,7 @@
|
||||
- [ ] Confirm that the application saves the theme preference and persists it across sessions.
|
||||
- [ ] Validate that all elements of the UI are compatible with the theme changes and maintain legibility and contrast.
|
||||
|
||||
### 2. Users change the extensions
|
||||
### 2. Users change the extensions [TBU]
|
||||
|
||||
- [ ] Confirm that the `Extensions` tab lists all available plugins.
|
||||
- [ ] :key: Test the toggle switch for each plugin to ensure it enables or disables the plugin correctly.
|
||||
@ -208,3 +209,19 @@
|
||||
- [ ] :key: Test that the application prevents the installation of incompatible or corrupt plugin files.
|
||||
- [ ] :key: Check that the user can uninstall or disable custom plugins as easily as pre-installed ones.
|
||||
- [ ] Verify that the application's performance remains stable after the installation of custom plugins.
|
||||
|
||||
### 5. Advanced Settings
|
||||
- [ ] Attemp to test downloading model from hub using **HTTP Proxy** [guideline](https://github.com/janhq/jan/pull/1562)
|
||||
- [ ] Users can move **Jan data folder**
|
||||
- [ ] Users can click on Reset button to **factory reset** app settings to its original state & delete all usage data.
|
||||
|
||||
## G. Local API server
|
||||
|
||||
### 1. Local Server Usage with Server Options
|
||||
- [ ] :key: Explore API Reference: Swagger API for sending/receiving requests
|
||||
- [ ] Use default server option
|
||||
- [ ] Configure and use custom server options
|
||||
- [ ] Test starting/stopping the local API server with different Model/Model settings
|
||||
- [ ] Server logs captured with correct Server Options provided
|
||||
- [ ] Verify functionality of Open logs/Clear feature
|
||||
- [ ] Ensure that threads and other functions impacting the model are disabled while the local server is running
|
||||
|
||||
@ -67,20 +67,31 @@ paths:
|
||||
x-codeSamples:
|
||||
- lang: cURL
|
||||
source: |
|
||||
curl http://localhost:1337/v1/chat/completions \
|
||||
-H "Content-Type: application/json" \
|
||||
curl -X 'POST' \
|
||||
'http://localhost:1337/v1/chat/completions' \
|
||||
-H 'accept: application/json' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{
|
||||
"model": "tinyllama-1.1b",
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You are a helpful assistant."
|
||||
"content": "You are a helpful assistant.",
|
||||
"role": "system"
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Hello!"
|
||||
"content": "Hello!",
|
||||
"role": "user"
|
||||
}
|
||||
]
|
||||
],
|
||||
"model": "tinyllama-1.1b",
|
||||
"stream": true,
|
||||
"max_tokens": 2048,
|
||||
"stop": [
|
||||
"hello"
|
||||
],
|
||||
"frequency_penalty": 0,
|
||||
"presence_penalty": 0,
|
||||
"temperature": 0.7,
|
||||
"top_p": 0.95
|
||||
}'
|
||||
/models:
|
||||
get:
|
||||
@ -103,7 +114,9 @@ paths:
|
||||
x-codeSamples:
|
||||
- lang: cURL
|
||||
source: |
|
||||
curl http://localhost:1337/v1/models
|
||||
curl -X 'GET' \
|
||||
'http://localhost:1337/v1/models' \
|
||||
-H 'accept: application/json'
|
||||
"/models/download/{model_id}":
|
||||
get:
|
||||
operationId: downloadModel
|
||||
@ -131,7 +144,9 @@ paths:
|
||||
x-codeSamples:
|
||||
- lang: cURL
|
||||
source: |
|
||||
curl -X POST http://localhost:1337/v1/models/download/{model_id}
|
||||
curl -X 'GET' \
|
||||
'http://localhost:1337/v1/models/download/{model_id}' \
|
||||
-H 'accept: application/json'
|
||||
"/models/{model_id}":
|
||||
get:
|
||||
operationId: retrieveModel
|
||||
@ -162,7 +177,9 @@ paths:
|
||||
x-codeSamples:
|
||||
- lang: cURL
|
||||
source: |
|
||||
curl http://localhost:1337/v1/models/{model_id}
|
||||
curl -X 'GET' \
|
||||
'http://localhost:1337/v1/models/{model_id}' \
|
||||
-H 'accept: application/json'
|
||||
delete:
|
||||
operationId: deleteModel
|
||||
tags:
|
||||
@ -191,7 +208,9 @@ paths:
|
||||
x-codeSamples:
|
||||
- lang: cURL
|
||||
source: |
|
||||
curl -X DELETE http://localhost:1337/v1/models/{model_id}
|
||||
curl -X 'DELETE' \
|
||||
'http://localhost:1337/v1/models/{model_id}' \
|
||||
-H 'accept: application/json'
|
||||
/threads:
|
||||
post:
|
||||
operationId: createThread
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
---
|
||||
components:
|
||||
schemas:
|
||||
MessageObject:
|
||||
@ -75,7 +76,7 @@ components:
|
||||
example: msg_abc123
|
||||
object:
|
||||
type: string
|
||||
description: "Type of the object, indicating it's a thread message."
|
||||
description: Type of the object, indicating it's a thread message.
|
||||
default: thread.message
|
||||
created_at:
|
||||
type: integer
|
||||
@ -88,7 +89,7 @@ components:
|
||||
example: thread_abc123
|
||||
role:
|
||||
type: string
|
||||
description: "Role of the sender, either 'user' or 'assistant'."
|
||||
description: Role of the sender, either 'user' or 'assistant'.
|
||||
example: user
|
||||
content:
|
||||
type: array
|
||||
@ -97,7 +98,7 @@ components:
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
description: "Type of content, e.g., 'text'."
|
||||
description: Type of content, e.g., 'text'.
|
||||
example: text
|
||||
text:
|
||||
type: object
|
||||
@ -110,21 +111,21 @@ components:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: "Annotations for the text content, if any."
|
||||
description: Annotations for the text content, if any.
|
||||
example: []
|
||||
file_ids:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: "Array of file IDs associated with the message, if any."
|
||||
description: Array of file IDs associated with the message, if any.
|
||||
example: []
|
||||
assistant_id:
|
||||
type: string
|
||||
description: "Identifier of the assistant involved in the message, if applicable."
|
||||
description: Identifier of the assistant involved in the message, if applicable.
|
||||
example: null
|
||||
run_id:
|
||||
type: string
|
||||
description: "Run ID associated with the message, if applicable."
|
||||
description: Run ID associated with the message, if applicable.
|
||||
example: null
|
||||
metadata:
|
||||
type: object
|
||||
@ -139,7 +140,7 @@ components:
|
||||
example: msg_abc123
|
||||
object:
|
||||
type: string
|
||||
description: "Type of the object, indicating it's a thread message."
|
||||
description: Type of the object, indicating it's a thread message.
|
||||
example: thread.message
|
||||
created_at:
|
||||
type: integer
|
||||
@ -152,7 +153,7 @@ components:
|
||||
example: thread_abc123
|
||||
role:
|
||||
type: string
|
||||
description: "Role of the sender, either 'user' or 'assistant'."
|
||||
description: Role of the sender, either 'user' or 'assistant'.
|
||||
example: user
|
||||
content:
|
||||
type: array
|
||||
@ -161,7 +162,7 @@ components:
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
description: "Type of content, e.g., 'text'."
|
||||
description: Type of content, e.g., 'text'.
|
||||
example: text
|
||||
text:
|
||||
type: object
|
||||
@ -174,21 +175,21 @@ components:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: "Annotations for the text content, if any."
|
||||
description: Annotations for the text content, if any.
|
||||
example: []
|
||||
file_ids:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: "Array of file IDs associated with the message, if any."
|
||||
description: Array of file IDs associated with the message, if any.
|
||||
example: []
|
||||
assistant_id:
|
||||
type: string
|
||||
description: "Identifier of the assistant involved in the message, if applicable."
|
||||
description: Identifier of the assistant involved in the message, if applicable.
|
||||
example: null
|
||||
run_id:
|
||||
type: string
|
||||
description: "Run ID associated with the message, if applicable."
|
||||
description: Run ID associated with the message, if applicable.
|
||||
example: null
|
||||
metadata:
|
||||
type: object
|
||||
@ -199,7 +200,7 @@ components:
|
||||
properties:
|
||||
object:
|
||||
type: string
|
||||
description: "Type of the object, indicating it's a list."
|
||||
description: Type of the object, indicating it's a list.
|
||||
default: list
|
||||
data:
|
||||
type: array
|
||||
@ -226,7 +227,7 @@ components:
|
||||
example: msg_abc123
|
||||
object:
|
||||
type: string
|
||||
description: "Type of the object, indicating it's a thread message."
|
||||
description: Type of the object, indicating it's a thread message.
|
||||
example: thread.message
|
||||
created_at:
|
||||
type: integer
|
||||
@ -239,7 +240,7 @@ components:
|
||||
example: thread_abc123
|
||||
role:
|
||||
type: string
|
||||
description: "Role of the sender, either 'user' or 'assistant'."
|
||||
description: Role of the sender, either 'user' or 'assistant'.
|
||||
example: user
|
||||
content:
|
||||
type: array
|
||||
@ -248,7 +249,7 @@ components:
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
description: "Type of content, e.g., 'text'."
|
||||
description: Type of content, e.g., 'text'.
|
||||
text:
|
||||
type: object
|
||||
properties:
|
||||
@ -260,20 +261,20 @@ components:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: "Annotations for the text content, if any."
|
||||
description: Annotations for the text content, if any.
|
||||
file_ids:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: "Array of file IDs associated with the message, if any."
|
||||
description: Array of file IDs associated with the message, if any.
|
||||
example: []
|
||||
assistant_id:
|
||||
type: string
|
||||
description: "Identifier of the assistant involved in the message, if applicable."
|
||||
description: Identifier of the assistant involved in the message, if applicable.
|
||||
example: null
|
||||
run_id:
|
||||
type: string
|
||||
description: "Run ID associated with the message, if applicable."
|
||||
description: Run ID associated with the message, if applicable.
|
||||
example: null
|
||||
metadata:
|
||||
type: object
|
||||
|
||||
@ -18,114 +18,82 @@ components:
|
||||
Model:
|
||||
type: object
|
||||
properties:
|
||||
type:
|
||||
source_url:
|
||||
type: string
|
||||
default: model
|
||||
description: The type of the object.
|
||||
version:
|
||||
type: string
|
||||
default: "1"
|
||||
description: The version number of the model.
|
||||
format: uri
|
||||
description: URL to the source of the model.
|
||||
example: https://huggingface.co/janhq/trinity-v1.2-GGUF/resolve/main/trinity-v1.2.Q4_K_M.gguf
|
||||
id:
|
||||
type: string
|
||||
description: Unique identifier used in chat-completions model_name, matches
|
||||
description:
|
||||
Unique identifier used in chat-completions model_name, matches
|
||||
folder name.
|
||||
example: zephyr-7b
|
||||
example: trinity-v1.2-7b
|
||||
object:
|
||||
type: string
|
||||
example: model
|
||||
name:
|
||||
type: string
|
||||
description: Name of the model.
|
||||
example: Zephyr 7B
|
||||
owned_by:
|
||||
example: Trinity-v1.2 7B Q4
|
||||
version:
|
||||
type: string
|
||||
description: Compatibility field for OpenAI.
|
||||
default: ""
|
||||
created:
|
||||
type: integer
|
||||
format: int64
|
||||
description: Unix timestamp representing the creation time.
|
||||
default: "1.0"
|
||||
description: The version number of the model.
|
||||
description:
|
||||
type: string
|
||||
description: Description of the model.
|
||||
state:
|
||||
type: string
|
||||
enum:
|
||||
- null
|
||||
- downloading
|
||||
- ready
|
||||
- starting
|
||||
- stopping
|
||||
description: Current state of the model.
|
||||
example:
|
||||
Trinity is an experimental model merge using the Slerp method.
|
||||
Recommended for daily assistance purposes.
|
||||
format:
|
||||
type: string
|
||||
description: State format of the model, distinct from the engine.
|
||||
example: ggufv3
|
||||
source:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
url:
|
||||
format: uri
|
||||
description: URL to the source of the model.
|
||||
example: https://huggingface.co/TheBloke/zephyr-7B-beta-GGUF/blob/main/zephyr-7b-beta.Q4_K_M.gguf
|
||||
filename:
|
||||
type: string
|
||||
description: Filename of the model.
|
||||
example: zephyr-7b-beta.Q4_K_M.gguf
|
||||
example: gguf
|
||||
settings:
|
||||
type: object
|
||||
properties:
|
||||
ctx_len:
|
||||
type: string
|
||||
type: integer
|
||||
description: Context length.
|
||||
example: "4096"
|
||||
ngl:
|
||||
example: 4096
|
||||
prompt_template:
|
||||
type: string
|
||||
description: Number of layers.
|
||||
example: "100"
|
||||
embedding:
|
||||
type: string
|
||||
description: Indicates if embedding is enabled.
|
||||
example: "true"
|
||||
n_parallel:
|
||||
type: string
|
||||
description: Number of parallel processes.
|
||||
example: "4"
|
||||
example: "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant"
|
||||
additionalProperties: false
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
temperature:
|
||||
type: string
|
||||
description: Temperature setting for the model.
|
||||
example: "0.7"
|
||||
token_limit:
|
||||
type: string
|
||||
description: Token limit for the model.
|
||||
example: "4096"
|
||||
top_k:
|
||||
type: string
|
||||
description: Top-k setting for the model.
|
||||
example: "0"
|
||||
example: 0.7
|
||||
top_p:
|
||||
type: string
|
||||
description: Top-p setting for the model.
|
||||
example: "1"
|
||||
example: 0.95
|
||||
stream:
|
||||
type: string
|
||||
description: Indicates if streaming is enabled.
|
||||
example: "true"
|
||||
example: true
|
||||
max_tokens:
|
||||
example: 4096
|
||||
stop:
|
||||
example: []
|
||||
frequency_penalty:
|
||||
example: 0
|
||||
presence_penalty:
|
||||
example: 0
|
||||
additionalProperties: false
|
||||
metadata:
|
||||
type: object
|
||||
description: Additional metadata.
|
||||
assets:
|
||||
type: array
|
||||
items:
|
||||
author:
|
||||
type: string
|
||||
description: List of assets related to the model.
|
||||
required:
|
||||
- source
|
||||
example: Jan
|
||||
tags:
|
||||
example:
|
||||
- 7B
|
||||
- Merged
|
||||
- Featured
|
||||
size:
|
||||
example: 4370000000,
|
||||
cover:
|
||||
example: https://raw.githubusercontent.com/janhq/jan/main/models/trinity-v1.2-7b/cover.png
|
||||
engine:
|
||||
example: nitro
|
||||
ModelObject:
|
||||
type: object
|
||||
properties:
|
||||
@ -133,7 +101,7 @@ components:
|
||||
type: string
|
||||
description: |
|
||||
The identifier of the model.
|
||||
example: zephyr-7b
|
||||
example: trinity-v1.2-7b
|
||||
object:
|
||||
type: string
|
||||
description: |
|
||||
@ -153,197 +121,89 @@ components:
|
||||
GetModelResponse:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
source_url:
|
||||
type: string
|
||||
description: The identifier of the model.
|
||||
example: zephyr-7b
|
||||
object:
|
||||
type: string
|
||||
description: Type of the object, indicating it's a model.
|
||||
default: model
|
||||
created:
|
||||
type: integer
|
||||
format: int64
|
||||
description: Unix timestamp representing the creation time of the model.
|
||||
owned_by:
|
||||
type: string
|
||||
description: The entity that owns the model.
|
||||
example: _
|
||||
state:
|
||||
type: string
|
||||
enum:
|
||||
- not_downloaded
|
||||
- downloaded
|
||||
- running
|
||||
- stopped
|
||||
description: The current state of the model.
|
||||
source:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
url:
|
||||
format: uri
|
||||
description: URL to the source of the model.
|
||||
example: https://huggingface.co/TheBloke/zephyr-7B-beta-GGUF/blob/main/zephyr-7b-beta.Q4_K_M.gguf
|
||||
filename:
|
||||
example: https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-GGUF/resolve/main/mistral-7b-instruct-v0.2.Q4_K_M.gguf
|
||||
id:
|
||||
type: string
|
||||
description: Filename of the model.
|
||||
example: zephyr-7b-beta.Q4_K_M.gguf
|
||||
engine_parameters:
|
||||
type: object
|
||||
properties:
|
||||
pre_prompt:
|
||||
description:
|
||||
Unique identifier used in chat-completions model_name, matches
|
||||
folder name.
|
||||
example: mistral-ins-7b-q4
|
||||
object:
|
||||
type: string
|
||||
description: Predefined prompt used for setting up internal configurations.
|
||||
default: ""
|
||||
example: Initial setup complete.
|
||||
system_prompt:
|
||||
example: model
|
||||
name:
|
||||
type: string
|
||||
description: Prefix used for system-level prompts.
|
||||
default: "SYSTEM: "
|
||||
user_prompt:
|
||||
description: Name of the model.
|
||||
example: Mistral Instruct 7B Q4
|
||||
version:
|
||||
type: string
|
||||
description: Prefix used for user prompts.
|
||||
default: "USER: "
|
||||
ai_prompt:
|
||||
default: "1.0"
|
||||
description: The version number of the model.
|
||||
description:
|
||||
type: string
|
||||
description: Prefix used for assistant prompts.
|
||||
default: "ASSISTANT: "
|
||||
ngl:
|
||||
type: integer
|
||||
description: Number of neural network layers loaded onto the GPU for
|
||||
acceleration.
|
||||
minimum: 0
|
||||
maximum: 100
|
||||
default: 100
|
||||
example: 100
|
||||
ctx_len:
|
||||
type: integer
|
||||
description: Context length for model operations, varies based on the specific
|
||||
model.
|
||||
minimum: 128
|
||||
maximum: 4096
|
||||
default: 4096
|
||||
example: 4096
|
||||
n_parallel:
|
||||
type: integer
|
||||
description: Number of parallel operations, relevant when continuous batching is
|
||||
enabled.
|
||||
minimum: 1
|
||||
maximum: 10
|
||||
default: 1
|
||||
example: 4
|
||||
cont_batching:
|
||||
type: boolean
|
||||
description: Indicates if continuous batching is used for processing.
|
||||
default: false
|
||||
example: false
|
||||
cpu_threads:
|
||||
type: integer
|
||||
description: Number of threads allocated for CPU-based inference.
|
||||
minimum: 1
|
||||
example: 8
|
||||
embedding:
|
||||
type: boolean
|
||||
description: Indicates if embedding layers are enabled in the model.
|
||||
default: true
|
||||
example: true
|
||||
model_parameters:
|
||||
description: Description of the model.
|
||||
example:
|
||||
Trinity is an experimental model merge using the Slerp method.
|
||||
Recommended for daily assistance purposes.
|
||||
format:
|
||||
type: string
|
||||
description: State format of the model, distinct from the engine.
|
||||
example: gguf
|
||||
settings:
|
||||
type: object
|
||||
properties:
|
||||
ctx_len:
|
||||
type: integer
|
||||
description: Maximum context length the model can handle.
|
||||
minimum: 0
|
||||
maximum: 4096
|
||||
default: 4096
|
||||
description: Context length.
|
||||
example: 4096
|
||||
ngl:
|
||||
type: integer
|
||||
description: Number of layers in the neural network.
|
||||
minimum: 1
|
||||
maximum: 100
|
||||
default: 100
|
||||
example: 100
|
||||
embedding:
|
||||
type: boolean
|
||||
description: Indicates if embedding layers are used.
|
||||
default: true
|
||||
example: true
|
||||
n_parallel:
|
||||
type: integer
|
||||
description: Number of parallel processes the model can run.
|
||||
minimum: 1
|
||||
maximum: 10
|
||||
default: 1
|
||||
example: 4
|
||||
prompt_template:
|
||||
type: string
|
||||
example: "[INST] {prompt} [/INST]"
|
||||
additionalProperties: false
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
temperature:
|
||||
type: number
|
||||
description: Controls randomness in model's responses. Higher values lead to
|
||||
more random responses.
|
||||
minimum: 0
|
||||
maximum: 2
|
||||
default: 0.7
|
||||
example: 0.7
|
||||
token_limit:
|
||||
type: integer
|
||||
description: Maximum number of tokens the model can generate in a single
|
||||
response.
|
||||
minimum: 1
|
||||
maximum: 4096
|
||||
default: 4096
|
||||
example: 4096
|
||||
top_k:
|
||||
type: integer
|
||||
description: Limits the model to consider only the top k most likely next tokens
|
||||
at each step.
|
||||
minimum: 0
|
||||
maximum: 100
|
||||
default: 0
|
||||
example: 0
|
||||
top_p:
|
||||
type: number
|
||||
description: Nucleus sampling parameter. The model considers the smallest set of
|
||||
tokens whose cumulative probability exceeds the top_p value.
|
||||
minimum: 0
|
||||
maximum: 1
|
||||
default: 1
|
||||
example: 1
|
||||
example: 0.95
|
||||
stream:
|
||||
example: true
|
||||
max_tokens:
|
||||
example: 4096
|
||||
stop:
|
||||
example: []
|
||||
frequency_penalty:
|
||||
example: 0
|
||||
presence_penalty:
|
||||
example: 0
|
||||
additionalProperties: false
|
||||
metadata:
|
||||
type: object
|
||||
properties:
|
||||
engine:
|
||||
author:
|
||||
type: string
|
||||
description: The engine used by the model.
|
||||
enum:
|
||||
- nitro
|
||||
- openai
|
||||
- hf_inference
|
||||
quantization:
|
||||
type: string
|
||||
description: Quantization parameter of the model.
|
||||
example: Q3_K_L
|
||||
example: MistralAI
|
||||
tags:
|
||||
example:
|
||||
- 7B
|
||||
- Featured
|
||||
- Foundation Model
|
||||
size:
|
||||
type: string
|
||||
description: Size of the model.
|
||||
example: 7B
|
||||
required:
|
||||
- id
|
||||
- object
|
||||
- created
|
||||
- owned_by
|
||||
- state
|
||||
- source
|
||||
- parameters
|
||||
- metadata
|
||||
example: 4370000000,
|
||||
cover:
|
||||
example: https://raw.githubusercontent.com/janhq/jan/main/models/mistral-ins-7b-q4/cover.png
|
||||
engine:
|
||||
example: nitro
|
||||
DeleteModelResponse:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
description: The identifier of the model that was deleted.
|
||||
example: model-zephyr-7B
|
||||
example: mistral-ins-7b-q4
|
||||
object:
|
||||
type: string
|
||||
description: Type of the object, indicating it's a model.
|
||||
|
||||
@ -142,7 +142,7 @@ components:
|
||||
example: Jan
|
||||
instructions:
|
||||
type: string
|
||||
description: |
|
||||
description: >
|
||||
The instruction of assistant, defaults to "Be my grammar corrector"
|
||||
model:
|
||||
type: object
|
||||
|
||||
@ -5,7 +5,11 @@ import request from 'request'
|
||||
import { createWriteStream, renameSync } from 'fs'
|
||||
import { DownloadEvent, DownloadRoute } from '@janhq/core'
|
||||
const progress = require('request-progress')
|
||||
import { DownloadManager, getJanDataFolderPath, normalizeFilePath } from '@janhq/core/node'
|
||||
import {
|
||||
DownloadManager,
|
||||
getJanDataFolderPath,
|
||||
normalizeFilePath,
|
||||
} from '@janhq/core/node'
|
||||
|
||||
export function handleDownloaderIPCs() {
|
||||
/**
|
||||
@ -56,20 +60,23 @@ export function handleDownloaderIPCs() {
|
||||
*/
|
||||
ipcMain.handle(
|
||||
DownloadRoute.downloadFile,
|
||||
async (_event, url, fileName, network) => {
|
||||
async (_event, url, localPath, network) => {
|
||||
const strictSSL = !network?.ignoreSSL
|
||||
const proxy = network?.proxy?.startsWith('http')
|
||||
? network.proxy
|
||||
: undefined
|
||||
|
||||
if (typeof fileName === 'string') {
|
||||
fileName = normalizeFilePath(fileName)
|
||||
if (typeof localPath === 'string') {
|
||||
localPath = normalizeFilePath(localPath)
|
||||
}
|
||||
const destination = resolve(getJanDataFolderPath(), fileName)
|
||||
const array = localPath.split('/')
|
||||
const fileName = array.pop() ?? ''
|
||||
const modelId = array.pop() ?? ''
|
||||
|
||||
const destination = resolve(getJanDataFolderPath(), localPath)
|
||||
const rq = request({ url, strictSSL, proxy })
|
||||
|
||||
// Put request to download manager instance
|
||||
DownloadManager.instance.setRequest(fileName, rq)
|
||||
DownloadManager.instance.setRequest(localPath, rq)
|
||||
|
||||
// Downloading file to a temp file first
|
||||
const downloadingTempFile = `${destination}.download`
|
||||
@ -81,6 +88,7 @@ export function handleDownloaderIPCs() {
|
||||
{
|
||||
...state,
|
||||
fileName,
|
||||
modelId,
|
||||
}
|
||||
)
|
||||
})
|
||||
@ -90,11 +98,12 @@ export function handleDownloaderIPCs() {
|
||||
{
|
||||
fileName,
|
||||
err,
|
||||
modelId,
|
||||
}
|
||||
)
|
||||
})
|
||||
.on('end', function () {
|
||||
if (DownloadManager.instance.networkRequests[fileName]) {
|
||||
if (DownloadManager.instance.networkRequests[localPath]) {
|
||||
// Finished downloading, rename temp file to actual file
|
||||
renameSync(downloadingTempFile, destination)
|
||||
|
||||
@ -102,14 +111,16 @@ export function handleDownloaderIPCs() {
|
||||
DownloadEvent.onFileDownloadSuccess,
|
||||
{
|
||||
fileName,
|
||||
modelId,
|
||||
}
|
||||
)
|
||||
DownloadManager.instance.setRequest(fileName, undefined)
|
||||
DownloadManager.instance.setRequest(localPath, undefined)
|
||||
} else {
|
||||
WindowManager?.instance.currentWindow?.webContents.send(
|
||||
DownloadEvent.onFileDownloadError,
|
||||
{
|
||||
fileName,
|
||||
modelId,
|
||||
err: { message: 'aborted' },
|
||||
}
|
||||
)
|
||||
|
||||
@ -38,6 +38,7 @@ export function handleFileMangerIPCs() {
|
||||
getResourcePath()
|
||||
)
|
||||
|
||||
// Handles the 'getUserHomePath' IPC event. This event is triggered to get the user home path.
|
||||
ipcMain.handle(FileManagerRoute.getUserHomePath, async (_event) =>
|
||||
app.getPath('home')
|
||||
)
|
||||
|
||||
@ -1,8 +1,7 @@
|
||||
import { ipcMain } from 'electron'
|
||||
|
||||
import { getJanDataFolderPath, normalizeFilePath } from '@janhq/core/node'
|
||||
import fs from 'fs'
|
||||
import { FileManagerRoute, FileSystemRoute } from '@janhq/core'
|
||||
import { FileSystemRoute } from '@janhq/core'
|
||||
import { join } from 'path'
|
||||
/**
|
||||
* Handles file system operations.
|
||||
|
||||
27
electron/merge-latest-ymls.js
Normal file
27
electron/merge-latest-ymls.js
Normal file
@ -0,0 +1,27 @@
|
||||
const yaml = require('js-yaml')
|
||||
const fs = require('fs')
|
||||
|
||||
// get two file paths from arguments:
|
||||
const [, , ...args] = process.argv
|
||||
const file1 = args[0]
|
||||
const file2 = args[1]
|
||||
const file3 = args[2]
|
||||
|
||||
// check that all arguments are present and throw error instead
|
||||
if (!file1 || !file2 || !file3) {
|
||||
throw new Error('Please provide 3 file paths as arguments: path to file1, to file2 and destination path')
|
||||
}
|
||||
|
||||
const doc1 = yaml.load(fs.readFileSync(file1, 'utf8'))
|
||||
console.log('doc1: ', doc1)
|
||||
|
||||
const doc2 = yaml.load(fs.readFileSync(file2, 'utf8'))
|
||||
console.log('doc2: ', doc2)
|
||||
|
||||
const merged = { ...doc1, ...doc2 }
|
||||
merged.files.push(...doc1.files)
|
||||
|
||||
console.log('merged', merged)
|
||||
|
||||
const mergedYml = yaml.dump(merged)
|
||||
fs.writeFileSync(file3, mergedYml, 'utf8')
|
||||
@ -57,16 +57,17 @@
|
||||
"scripts": {
|
||||
"lint": "eslint . --ext \".js,.jsx,.ts,.tsx\"",
|
||||
"test:e2e": "playwright test --workers=1",
|
||||
"dev": "tsc -p . && electron .",
|
||||
"build": "run-script-os",
|
||||
"build:test": "run-script-os",
|
||||
"copy:assets": "rimraf --glob \"./pre-install/*.tgz\" && cpx \"../pre-install/*.tgz\" \"./pre-install\"",
|
||||
"dev": "yarn copy:assets && tsc -p . && electron .",
|
||||
"build": "yarn copy:assets && run-script-os",
|
||||
"build:test": "yarn copy:assets && run-script-os",
|
||||
"build:test:darwin": "tsc -p . && electron-builder -p never -m --dir",
|
||||
"build:test:win32": "tsc -p . && electron-builder -p never -w --dir",
|
||||
"build:test:linux": "tsc -p . && electron-builder -p never -l --dir",
|
||||
"build:darwin": "tsc -p . && electron-builder -p never -m --x64 --arm64",
|
||||
"build:win32": "tsc -p . && electron-builder -p never -w",
|
||||
"build:linux": "tsc -p . && electron-builder -p never -l deb -l AppImage",
|
||||
"build:publish": "run-script-os",
|
||||
"build:publish": "yarn copy:assets && run-script-os",
|
||||
"build:publish:darwin": "tsc -p . && electron-builder -p always -m --x64 --arm64",
|
||||
"build:publish:win32": "tsc -p . && electron-builder -p always -w",
|
||||
"build:publish:linux": "tsc -p . && electron-builder -p always -l deb -l AppImage"
|
||||
|
||||
@ -1,9 +1,16 @@
|
||||
import { PlaywrightTestConfig } from '@playwright/test'
|
||||
|
||||
const config: PlaywrightTestConfig = {
|
||||
testDir: './tests',
|
||||
testDir: './tests/e2e',
|
||||
retries: 0,
|
||||
globalTimeout: 300000,
|
||||
use: {
|
||||
screenshot: 'only-on-failure',
|
||||
video: 'retain-on-failure',
|
||||
trace: 'retain-on-failure',
|
||||
},
|
||||
|
||||
reporter: [['html', { outputFolder: './playwright-report' }]],
|
||||
}
|
||||
|
||||
export default config
|
||||
|
||||
34
electron/tests/e2e/hub.e2e.spec.ts
Normal file
34
electron/tests/e2e/hub.e2e.spec.ts
Normal file
@ -0,0 +1,34 @@
|
||||
import {
|
||||
page,
|
||||
test,
|
||||
setupElectron,
|
||||
teardownElectron,
|
||||
TIMEOUT,
|
||||
} from '../pages/basePage'
|
||||
import { expect } from '@playwright/test'
|
||||
|
||||
test.beforeAll(async () => {
|
||||
const appInfo = await setupElectron()
|
||||
expect(appInfo.asar).toBe(true)
|
||||
expect(appInfo.executable).toBeTruthy()
|
||||
expect(appInfo.main).toBeTruthy()
|
||||
expect(appInfo.name).toBe('jan')
|
||||
expect(appInfo.packageJson).toBeTruthy()
|
||||
expect(appInfo.packageJson.name).toBe('jan')
|
||||
expect(appInfo.platform).toBeTruthy()
|
||||
expect(appInfo.platform).toBe(process.platform)
|
||||
expect(appInfo.resourcesDir).toBeTruthy()
|
||||
})
|
||||
|
||||
test.afterAll(async () => {
|
||||
await teardownElectron()
|
||||
})
|
||||
|
||||
test('explores hub', async () => {
|
||||
await page.getByTestId('Hub').first().click({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
await page.getByTestId('hub-container-test-id').isVisible({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
})
|
||||
38
electron/tests/e2e/navigation.e2e.spec.ts
Normal file
38
electron/tests/e2e/navigation.e2e.spec.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import { expect } from '@playwright/test'
|
||||
import {
|
||||
page,
|
||||
setupElectron,
|
||||
TIMEOUT,
|
||||
test,
|
||||
teardownElectron,
|
||||
} from '../pages/basePage'
|
||||
|
||||
test.beforeAll(async () => {
|
||||
await setupElectron()
|
||||
})
|
||||
|
||||
test.afterAll(async () => {
|
||||
await teardownElectron()
|
||||
})
|
||||
|
||||
test('renders left navigation panel', async () => {
|
||||
const systemMonitorBtn = await page
|
||||
.getByTestId('System Monitor')
|
||||
.first()
|
||||
.isEnabled({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
const settingsBtn = await page
|
||||
.getByTestId('Thread')
|
||||
.first()
|
||||
.isEnabled({ timeout: TIMEOUT })
|
||||
expect([systemMonitorBtn, settingsBtn].filter((e) => !e).length).toBe(0)
|
||||
// Chat section should be there
|
||||
await page.getByTestId('Local API Server').first().click({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
const localServer = page.getByTestId('local-server-testid').first()
|
||||
await expect(localServer).toBeVisible({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
})
|
||||
23
electron/tests/e2e/settings.e2e.spec.ts
Normal file
23
electron/tests/e2e/settings.e2e.spec.ts
Normal file
@ -0,0 +1,23 @@
|
||||
import { expect } from '@playwright/test'
|
||||
|
||||
import {
|
||||
setupElectron,
|
||||
teardownElectron,
|
||||
test,
|
||||
page,
|
||||
TIMEOUT,
|
||||
} from '../pages/basePage'
|
||||
|
||||
test.beforeAll(async () => {
|
||||
await setupElectron()
|
||||
})
|
||||
|
||||
test.afterAll(async () => {
|
||||
await teardownElectron()
|
||||
})
|
||||
|
||||
test('shows settings', async () => {
|
||||
await page.getByTestId('Settings').first().click({ timeout: TIMEOUT })
|
||||
const settingDescription = page.getByTestId('testid-setting-description')
|
||||
await expect(settingDescription).toBeVisible({ timeout: TIMEOUT })
|
||||
})
|
||||
@ -1,48 +0,0 @@
|
||||
import { _electron as electron } from 'playwright'
|
||||
import { ElectronApplication, Page, expect, test } from '@playwright/test'
|
||||
|
||||
import {
|
||||
findLatestBuild,
|
||||
parseElectronApp,
|
||||
stubDialog,
|
||||
} from 'electron-playwright-helpers'
|
||||
|
||||
let electronApp: ElectronApplication
|
||||
let page: Page
|
||||
const TIMEOUT: number = parseInt(process.env.TEST_TIMEOUT || '300000')
|
||||
|
||||
test.beforeAll(async () => {
|
||||
process.env.CI = 'e2e'
|
||||
|
||||
const latestBuild = findLatestBuild('dist')
|
||||
expect(latestBuild).toBeTruthy()
|
||||
|
||||
// parse the packaged Electron app and find paths and other info
|
||||
const appInfo = parseElectronApp(latestBuild)
|
||||
expect(appInfo).toBeTruthy()
|
||||
|
||||
electronApp = await electron.launch({
|
||||
args: [appInfo.main], // main file from package.json
|
||||
executablePath: appInfo.executable, // path to the Electron executable
|
||||
})
|
||||
await stubDialog(electronApp, 'showMessageBox', { response: 1 })
|
||||
|
||||
page = await electronApp.firstWindow({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
})
|
||||
|
||||
test.afterAll(async () => {
|
||||
await electronApp.close()
|
||||
await page.close()
|
||||
})
|
||||
|
||||
test('explores hub', async () => {
|
||||
test.setTimeout(TIMEOUT)
|
||||
await page.getByTestId('Hub').first().click({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
await page.getByTestId('hub-container-test-id').isVisible({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
})
|
||||
@ -1,61 +0,0 @@
|
||||
import { _electron as electron } from 'playwright'
|
||||
import { ElectronApplication, Page, expect, test } from '@playwright/test'
|
||||
|
||||
import {
|
||||
findLatestBuild,
|
||||
parseElectronApp,
|
||||
stubDialog,
|
||||
} from 'electron-playwright-helpers'
|
||||
|
||||
let electronApp: ElectronApplication
|
||||
let page: Page
|
||||
const TIMEOUT: number = parseInt(process.env.TEST_TIMEOUT || '300000')
|
||||
|
||||
test.beforeAll(async () => {
|
||||
process.env.CI = 'e2e'
|
||||
|
||||
const latestBuild = findLatestBuild('dist')
|
||||
expect(latestBuild).toBeTruthy()
|
||||
|
||||
// parse the packaged Electron app and find paths and other info
|
||||
const appInfo = parseElectronApp(latestBuild)
|
||||
expect(appInfo).toBeTruthy()
|
||||
|
||||
electronApp = await electron.launch({
|
||||
args: [appInfo.main], // main file from package.json
|
||||
executablePath: appInfo.executable, // path to the Electron executable
|
||||
})
|
||||
await stubDialog(electronApp, 'showMessageBox', { response: 1 })
|
||||
|
||||
page = await electronApp.firstWindow({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
})
|
||||
|
||||
test.afterAll(async () => {
|
||||
await electronApp.close()
|
||||
await page.close()
|
||||
})
|
||||
|
||||
test('renders left navigation panel', async () => {
|
||||
test.setTimeout(TIMEOUT)
|
||||
const systemMonitorBtn = await page
|
||||
.getByTestId('System Monitor')
|
||||
.first()
|
||||
.isEnabled({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
const settingsBtn = await page
|
||||
.getByTestId('Thread')
|
||||
.first()
|
||||
.isEnabled({ timeout: TIMEOUT })
|
||||
expect([systemMonitorBtn, settingsBtn].filter((e) => !e).length).toBe(0)
|
||||
// Chat section should be there
|
||||
await page.getByTestId('Local API Server').first().click({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
const localServer = await page.getByTestId('local-server-testid').first()
|
||||
await expect(localServer).toBeVisible({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
})
|
||||
67
electron/tests/pages/basePage.ts
Normal file
67
electron/tests/pages/basePage.ts
Normal file
@ -0,0 +1,67 @@
|
||||
import {
|
||||
expect,
|
||||
test as base,
|
||||
_electron as electron,
|
||||
ElectronApplication,
|
||||
Page,
|
||||
} from '@playwright/test'
|
||||
import {
|
||||
findLatestBuild,
|
||||
parseElectronApp,
|
||||
stubDialog,
|
||||
} from 'electron-playwright-helpers'
|
||||
|
||||
export const TIMEOUT: number = parseInt(process.env.TEST_TIMEOUT || '300000')
|
||||
|
||||
export let electronApp: ElectronApplication
|
||||
export let page: Page
|
||||
|
||||
export async function setupElectron() {
|
||||
process.env.CI = 'e2e'
|
||||
|
||||
const latestBuild = findLatestBuild('dist')
|
||||
expect(latestBuild).toBeTruthy()
|
||||
|
||||
// parse the packaged Electron app and find paths and other info
|
||||
const appInfo = parseElectronApp(latestBuild)
|
||||
expect(appInfo).toBeTruthy()
|
||||
|
||||
electronApp = await electron.launch({
|
||||
args: [appInfo.main], // main file from package.json
|
||||
executablePath: appInfo.executable, // path to the Electron executable
|
||||
})
|
||||
await stubDialog(electronApp, 'showMessageBox', { response: 1 })
|
||||
|
||||
page = await electronApp.firstWindow({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
// Return appInfo for future use
|
||||
return appInfo
|
||||
}
|
||||
|
||||
export async function teardownElectron() {
|
||||
await page.close()
|
||||
await electronApp.close()
|
||||
}
|
||||
|
||||
export const test = base.extend<{
|
||||
attachScreenshotsToReport: void
|
||||
}>({
|
||||
attachScreenshotsToReport: [
|
||||
async ({ request }, use, testInfo) => {
|
||||
await use()
|
||||
|
||||
// After the test, we can check whether the test passed or failed.
|
||||
if (testInfo.status !== testInfo.expectedStatus) {
|
||||
const screenshot = await page.screenshot()
|
||||
await testInfo.attach('screenshot', {
|
||||
body: screenshot,
|
||||
contentType: 'image/png',
|
||||
})
|
||||
}
|
||||
},
|
||||
{ auto: true },
|
||||
],
|
||||
})
|
||||
|
||||
test.setTimeout(TIMEOUT)
|
||||
@ -1,45 +0,0 @@
|
||||
import { _electron as electron } from 'playwright'
|
||||
import { ElectronApplication, Page, expect, test } from '@playwright/test'
|
||||
|
||||
import {
|
||||
findLatestBuild,
|
||||
parseElectronApp,
|
||||
stubDialog,
|
||||
} from 'electron-playwright-helpers'
|
||||
|
||||
let electronApp: ElectronApplication
|
||||
let page: Page
|
||||
const TIMEOUT: number = parseInt(process.env.TEST_TIMEOUT || '300000')
|
||||
|
||||
test.beforeAll(async () => {
|
||||
process.env.CI = 'e2e'
|
||||
|
||||
const latestBuild = findLatestBuild('dist')
|
||||
expect(latestBuild).toBeTruthy()
|
||||
|
||||
// parse the packaged Electron app and find paths and other info
|
||||
const appInfo = parseElectronApp(latestBuild)
|
||||
expect(appInfo).toBeTruthy()
|
||||
|
||||
electronApp = await electron.launch({
|
||||
args: [appInfo.main], // main file from package.json
|
||||
executablePath: appInfo.executable, // path to the Electron executable
|
||||
})
|
||||
await stubDialog(electronApp, 'showMessageBox', { response: 1 })
|
||||
|
||||
page = await electronApp.firstWindow({
|
||||
timeout: TIMEOUT,
|
||||
})
|
||||
})
|
||||
|
||||
test.afterAll(async () => {
|
||||
await electronApp.close()
|
||||
await page.close()
|
||||
})
|
||||
|
||||
test('shows settings', async () => {
|
||||
test.setTimeout(TIMEOUT)
|
||||
await page.getByTestId('Settings').first().click({ timeout: TIMEOUT })
|
||||
const settingDescription = page.getByTestId('testid-setting-description')
|
||||
await expect(settingDescription).toBeVisible({ timeout: TIMEOUT })
|
||||
})
|
||||
@ -8,9 +8,9 @@
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"build": "tsc --module commonjs && rollup -c rollup.config.ts",
|
||||
"build:publish:linux": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../electron/pre-install",
|
||||
"build:publish:darwin": "rimraf *.tgz --glob && npm run build && ../../.github/scripts/auto-sign.sh && npm pack && cpx *.tgz ../../electron/pre-install",
|
||||
"build:publish:win32": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../electron/pre-install",
|
||||
"build:publish:linux": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../pre-install",
|
||||
"build:publish:darwin": "rimraf *.tgz --glob && npm run build && ../../.github/scripts/auto-sign.sh && npm pack && cpx *.tgz ../../pre-install",
|
||||
"build:publish:win32": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../pre-install",
|
||||
"build:publish": "run-script-os"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@ -9,6 +9,7 @@ import {
|
||||
joinPath,
|
||||
executeOnMain,
|
||||
AssistantExtension,
|
||||
AssistantEvent,
|
||||
} from "@janhq/core";
|
||||
|
||||
export default class JanAssistantExtension extends AssistantExtension {
|
||||
@ -21,7 +22,7 @@ export default class JanAssistantExtension extends AssistantExtension {
|
||||
async onLoad() {
|
||||
// making the assistant directory
|
||||
const assistantDirExist = await fs.existsSync(
|
||||
JanAssistantExtension._homeDir,
|
||||
JanAssistantExtension._homeDir
|
||||
);
|
||||
if (
|
||||
localStorage.getItem(`${EXTENSION_NAME}-version`) !== VERSION ||
|
||||
@ -31,14 +32,16 @@ export default class JanAssistantExtension extends AssistantExtension {
|
||||
await fs.mkdirSync(JanAssistantExtension._homeDir);
|
||||
|
||||
// Write assistant metadata
|
||||
this.createJanAssistant();
|
||||
await this.createJanAssistant();
|
||||
// Finished migration
|
||||
localStorage.setItem(`${EXTENSION_NAME}-version`, VERSION);
|
||||
// Update the assistant list
|
||||
events.emit(AssistantEvent.OnAssistantsUpdate, {});
|
||||
}
|
||||
|
||||
// Events subscription
|
||||
events.on(MessageEvent.OnMessageSent, (data: MessageRequest) =>
|
||||
JanAssistantExtension.handleMessageRequest(data, this),
|
||||
JanAssistantExtension.handleMessageRequest(data, this)
|
||||
);
|
||||
|
||||
events.on(InferenceEvent.OnInferenceStopped, () => {
|
||||
@ -53,7 +56,7 @@ export default class JanAssistantExtension extends AssistantExtension {
|
||||
|
||||
private static async handleMessageRequest(
|
||||
data: MessageRequest,
|
||||
instance: JanAssistantExtension,
|
||||
instance: JanAssistantExtension
|
||||
) {
|
||||
instance.isCancelled = false;
|
||||
instance.controller = new AbortController();
|
||||
@ -80,7 +83,7 @@ export default class JanAssistantExtension extends AssistantExtension {
|
||||
NODE,
|
||||
"toolRetrievalIngestNewDocument",
|
||||
docFile,
|
||||
data.model?.proxyEngine,
|
||||
data.model?.proxyEngine
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -96,7 +99,7 @@ export default class JanAssistantExtension extends AssistantExtension {
|
||||
NODE,
|
||||
"toolRetrievalUpdateTextSplitter",
|
||||
data.thread.assistants[0].tools[0]?.settings?.chunk_size ?? 4000,
|
||||
data.thread.assistants[0].tools[0]?.settings?.chunk_overlap ?? 200,
|
||||
data.thread.assistants[0].tools[0]?.settings?.chunk_overlap ?? 200
|
||||
);
|
||||
}
|
||||
|
||||
@ -110,7 +113,7 @@ export default class JanAssistantExtension extends AssistantExtension {
|
||||
const retrievalResult = await executeOnMain(
|
||||
NODE,
|
||||
"toolRetrievalQueryResult",
|
||||
prompt,
|
||||
prompt
|
||||
);
|
||||
|
||||
// Update the message content
|
||||
@ -168,7 +171,7 @@ export default class JanAssistantExtension extends AssistantExtension {
|
||||
try {
|
||||
await fs.writeFileSync(
|
||||
assistantMetadataPath,
|
||||
JSON.stringify(assistant, null, 2),
|
||||
JSON.stringify(assistant, null, 2)
|
||||
);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
@ -180,7 +183,7 @@ export default class JanAssistantExtension extends AssistantExtension {
|
||||
// get all the assistant metadata json
|
||||
const results: Assistant[] = [];
|
||||
const allFileName: string[] = await fs.readdirSync(
|
||||
JanAssistantExtension._homeDir,
|
||||
JanAssistantExtension._homeDir
|
||||
);
|
||||
for (const fileName of allFileName) {
|
||||
const filePath = await joinPath([
|
||||
@ -190,7 +193,7 @@ export default class JanAssistantExtension extends AssistantExtension {
|
||||
|
||||
if (filePath.includes(".DS_Store")) continue;
|
||||
const jsonFiles: string[] = (await fs.readdirSync(filePath)).filter(
|
||||
(file: string) => file === "assistant.json",
|
||||
(file: string) => file === "assistant.json"
|
||||
);
|
||||
|
||||
if (jsonFiles.length !== 1) {
|
||||
@ -200,7 +203,7 @@ export default class JanAssistantExtension extends AssistantExtension {
|
||||
|
||||
const content = await fs.readFileSync(
|
||||
await joinPath([filePath, jsonFiles[0]]),
|
||||
"utf-8",
|
||||
"utf-8"
|
||||
);
|
||||
const assistant: Assistant =
|
||||
typeof content === "object" ? content : JSON.parse(content);
|
||||
|
||||
@ -35,21 +35,19 @@ export class Retrieval {
|
||||
if (engine === "nitro") {
|
||||
this.embeddingModel = new OpenAIEmbeddings(
|
||||
{ openAIApiKey: "nitro-embedding" },
|
||||
{ basePath: "http://127.0.0.1:3928/v1" }
|
||||
{ basePath: "http://127.0.0.1:3928/v1" },
|
||||
);
|
||||
} else {
|
||||
// Fallback to OpenAI Settings
|
||||
this.embeddingModel = new OpenAIEmbeddings({
|
||||
configuration: {
|
||||
apiKey: settings.api_key,
|
||||
},
|
||||
openAIApiKey: settings.api_key,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public ingestAgentKnowledge = async (
|
||||
filePath: string,
|
||||
memoryPath: string
|
||||
memoryPath: string,
|
||||
): Promise<any> => {
|
||||
const loader = new PDFLoader(filePath, {
|
||||
splitPages: true,
|
||||
|
||||
@ -7,7 +7,7 @@
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsc -b . && webpack --config webpack.config.js",
|
||||
"build:publish": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../electron/pre-install"
|
||||
"build:publish": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../pre-install"
|
||||
},
|
||||
"exports": {
|
||||
".": "./dist/index.js",
|
||||
|
||||
@ -12,7 +12,7 @@ import {
|
||||
* functionality for managing threads.
|
||||
*/
|
||||
export default class JSONConversationalExtension extends ConversationalExtension {
|
||||
private static readonly _homeDir = 'file://threads'
|
||||
private static readonly _threadFolder = 'file://threads'
|
||||
private static readonly _threadInfoFileName = 'thread.json'
|
||||
private static readonly _threadMessagesFileName = 'messages.jsonl'
|
||||
|
||||
@ -20,8 +20,8 @@ export default class JSONConversationalExtension extends ConversationalExtension
|
||||
* Called when the extension is loaded.
|
||||
*/
|
||||
async onLoad() {
|
||||
if (!(await fs.existsSync(JSONConversationalExtension._homeDir)))
|
||||
await fs.mkdirSync(JSONConversationalExtension._homeDir)
|
||||
if (!(await fs.existsSync(JSONConversationalExtension._threadFolder)))
|
||||
await fs.mkdirSync(JSONConversationalExtension._threadFolder)
|
||||
console.debug('JSONConversationalExtension loaded')
|
||||
}
|
||||
|
||||
@ -68,7 +68,7 @@ export default class JSONConversationalExtension extends ConversationalExtension
|
||||
async saveThread(thread: Thread): Promise<void> {
|
||||
try {
|
||||
const threadDirPath = await joinPath([
|
||||
JSONConversationalExtension._homeDir,
|
||||
JSONConversationalExtension._threadFolder,
|
||||
thread.id,
|
||||
])
|
||||
const threadJsonPath = await joinPath([
|
||||
@ -92,7 +92,7 @@ export default class JSONConversationalExtension extends ConversationalExtension
|
||||
*/
|
||||
async deleteThread(threadId: string): Promise<void> {
|
||||
const path = await joinPath([
|
||||
JSONConversationalExtension._homeDir,
|
||||
JSONConversationalExtension._threadFolder,
|
||||
`${threadId}`,
|
||||
])
|
||||
try {
|
||||
@ -109,7 +109,7 @@ export default class JSONConversationalExtension extends ConversationalExtension
|
||||
async addNewMessage(message: ThreadMessage): Promise<void> {
|
||||
try {
|
||||
const threadDirPath = await joinPath([
|
||||
JSONConversationalExtension._homeDir,
|
||||
JSONConversationalExtension._threadFolder,
|
||||
message.thread_id,
|
||||
])
|
||||
const threadMessagePath = await joinPath([
|
||||
@ -177,7 +177,7 @@ export default class JSONConversationalExtension extends ConversationalExtension
|
||||
): Promise<void> {
|
||||
try {
|
||||
const threadDirPath = await joinPath([
|
||||
JSONConversationalExtension._homeDir,
|
||||
JSONConversationalExtension._threadFolder,
|
||||
threadId,
|
||||
])
|
||||
const threadMessagePath = await joinPath([
|
||||
@ -205,7 +205,7 @@ export default class JSONConversationalExtension extends ConversationalExtension
|
||||
private async readThread(threadDirName: string): Promise<any> {
|
||||
return fs.readFileSync(
|
||||
await joinPath([
|
||||
JSONConversationalExtension._homeDir,
|
||||
JSONConversationalExtension._threadFolder,
|
||||
threadDirName,
|
||||
JSONConversationalExtension._threadInfoFileName,
|
||||
]),
|
||||
@ -219,14 +219,14 @@ export default class JSONConversationalExtension extends ConversationalExtension
|
||||
*/
|
||||
private async getValidThreadDirs(): Promise<string[]> {
|
||||
const fileInsideThread: string[] = await fs.readdirSync(
|
||||
JSONConversationalExtension._homeDir
|
||||
JSONConversationalExtension._threadFolder
|
||||
)
|
||||
|
||||
const threadDirs: string[] = []
|
||||
for (let i = 0; i < fileInsideThread.length; i++) {
|
||||
if (fileInsideThread[i].includes('.DS_Store')) continue
|
||||
const path = await joinPath([
|
||||
JSONConversationalExtension._homeDir,
|
||||
JSONConversationalExtension._threadFolder,
|
||||
fileInsideThread[i],
|
||||
])
|
||||
|
||||
@ -246,7 +246,7 @@ export default class JSONConversationalExtension extends ConversationalExtension
|
||||
async getAllMessages(threadId: string): Promise<ThreadMessage[]> {
|
||||
try {
|
||||
const threadDirPath = await joinPath([
|
||||
JSONConversationalExtension._homeDir,
|
||||
JSONConversationalExtension._threadFolder,
|
||||
threadId,
|
||||
])
|
||||
|
||||
@ -263,22 +263,17 @@ export default class JSONConversationalExtension extends ConversationalExtension
|
||||
JSONConversationalExtension._threadMessagesFileName,
|
||||
])
|
||||
|
||||
const result = await fs
|
||||
.readFileSync(messageFilePath, 'utf-8')
|
||||
.then((content) =>
|
||||
content
|
||||
.toString()
|
||||
.split('\n')
|
||||
.filter((line) => line !== '')
|
||||
)
|
||||
let readResult = await fs.readFileSync(messageFilePath, 'utf-8')
|
||||
|
||||
if (typeof readResult === 'object') {
|
||||
readResult = JSON.stringify(readResult)
|
||||
}
|
||||
|
||||
const result = readResult.split('\n').filter((line) => line !== '')
|
||||
|
||||
const messages: ThreadMessage[] = []
|
||||
result.forEach((line: string) => {
|
||||
try {
|
||||
messages.push(JSON.parse(line) as ThreadMessage)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
}
|
||||
messages.push(JSON.parse(line))
|
||||
})
|
||||
return messages
|
||||
} catch (err) {
|
||||
|
||||
@ -12,9 +12,9 @@
|
||||
"downloadnitro:darwin": "NITRO_VERSION=$(cat ./bin/version.txt) && download https://github.com/janhq/nitro/releases/download/v${NITRO_VERSION}/nitro-${NITRO_VERSION}-mac-arm64.tar.gz -e --strip 1 -o ./bin/mac-arm64 && chmod +x ./bin/mac-arm64/nitro && download https://github.com/janhq/nitro/releases/download/v${NITRO_VERSION}/nitro-${NITRO_VERSION}-mac-amd64.tar.gz -e --strip 1 -o ./bin/mac-x64 && chmod +x ./bin/mac-x64/nitro",
|
||||
"downloadnitro:win32": "download.bat",
|
||||
"downloadnitro": "run-script-os",
|
||||
"build:publish:darwin": "rimraf *.tgz --glob && npm run build && npm run downloadnitro && ../../.github/scripts/auto-sign.sh && cpx \"bin/**\" \"dist/bin\" && npm pack && cpx *.tgz ../../electron/pre-install",
|
||||
"build:publish:win32": "rimraf *.tgz --glob && npm run build && npm run downloadnitro && cpx \"bin/**\" \"dist/bin\" && npm pack && cpx *.tgz ../../electron/pre-install",
|
||||
"build:publish:linux": "rimraf *.tgz --glob && npm run build && npm run downloadnitro && cpx \"bin/**\" \"dist/bin\" && npm pack && cpx *.tgz ../../electron/pre-install",
|
||||
"build:publish:darwin": "rimraf *.tgz --glob && npm run build && npm run downloadnitro && ../../.github/scripts/auto-sign.sh && cpx \"bin/**\" \"dist/bin\" && npm pack && cpx *.tgz ../../pre-install",
|
||||
"build:publish:win32": "rimraf *.tgz --glob && npm run build && npm run downloadnitro && cpx \"bin/**\" \"dist/bin\" && npm pack && cpx *.tgz ../../pre-install",
|
||||
"build:publish:linux": "rimraf *.tgz --glob && npm run build && npm run downloadnitro && cpx \"bin/**\" \"dist/bin\" && npm pack && cpx *.tgz ../../pre-install",
|
||||
"build:publish": "run-script-os"
|
||||
},
|
||||
"exports": {
|
||||
|
||||
@ -243,9 +243,6 @@ export default class JanInferenceNitroExtension extends InferenceExtension {
|
||||
*/
|
||||
private async onMessageRequest(data: MessageRequest) {
|
||||
if (data.model?.engine !== InferenceEngine.nitro || !this._currentModel) {
|
||||
console.log(
|
||||
`Model is not nitro or no model loaded ${data.model?.engine} ${this._currentModel}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@ -25,12 +25,12 @@ export const executableNitroFile = (): NitroExecutableOptions => {
|
||||
if (nvidiaInfo["run_mode"] === "cpu") {
|
||||
binaryFolder = path.join(binaryFolder, "win-cpu");
|
||||
} else {
|
||||
if (nvidiaInfo["cuda"].version === "12") {
|
||||
binaryFolder = path.join(binaryFolder, "win-cuda-12-0");
|
||||
} else {
|
||||
if (nvidiaInfo["cuda"].version === "11") {
|
||||
binaryFolder = path.join(binaryFolder, "win-cuda-11-7");
|
||||
} else {
|
||||
binaryFolder = path.join(binaryFolder, "win-cuda-12-0");
|
||||
}
|
||||
cudaVisibleDevices = nvidiaInfo["gpu_highest_vram"];
|
||||
cudaVisibleDevices = nvidiaInfo["gpus_in_use"].join(",");
|
||||
}
|
||||
binaryName = "nitro.exe";
|
||||
} else if (process.platform === "darwin") {
|
||||
@ -50,12 +50,12 @@ export const executableNitroFile = (): NitroExecutableOptions => {
|
||||
if (nvidiaInfo["run_mode"] === "cpu") {
|
||||
binaryFolder = path.join(binaryFolder, "linux-cpu");
|
||||
} else {
|
||||
if (nvidiaInfo["cuda"].version === "12") {
|
||||
binaryFolder = path.join(binaryFolder, "linux-cuda-12-0");
|
||||
} else {
|
||||
if (nvidiaInfo["cuda"].version === "11") {
|
||||
binaryFolder = path.join(binaryFolder, "linux-cuda-11-7");
|
||||
} else {
|
||||
binaryFolder = path.join(binaryFolder, "linux-cuda-12-0");
|
||||
}
|
||||
cudaVisibleDevices = nvidiaInfo["gpu_highest_vram"];
|
||||
cudaVisibleDevices = nvidiaInfo["gpus_in_use"].join(",");
|
||||
}
|
||||
}
|
||||
return {
|
||||
|
||||
@ -19,6 +19,8 @@ const DEFALT_SETTINGS = {
|
||||
},
|
||||
gpus: [],
|
||||
gpu_highest_vram: "",
|
||||
gpus_in_use: [],
|
||||
is_initial: true,
|
||||
};
|
||||
|
||||
/**
|
||||
@ -48,11 +50,15 @@ export interface NitroProcessInfo {
|
||||
*/
|
||||
export async function updateNvidiaInfo() {
|
||||
if (process.platform !== "darwin") {
|
||||
await Promise.all([
|
||||
updateNvidiaDriverInfo(),
|
||||
updateCudaExistence(),
|
||||
updateGpuInfo(),
|
||||
]);
|
||||
let data;
|
||||
try {
|
||||
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
|
||||
} catch (error) {
|
||||
data = DEFALT_SETTINGS;
|
||||
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
|
||||
}
|
||||
updateNvidiaDriverInfo();
|
||||
updateGpuInfo();
|
||||
}
|
||||
}
|
||||
|
||||
@ -73,12 +79,7 @@ export async function updateNvidiaDriverInfo(): Promise<void> {
|
||||
exec(
|
||||
"nvidia-smi --query-gpu=driver_version --format=csv,noheader",
|
||||
(error, stdout) => {
|
||||
let data;
|
||||
try {
|
||||
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
|
||||
} catch (error) {
|
||||
data = DEFALT_SETTINGS;
|
||||
}
|
||||
let data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
|
||||
|
||||
if (!error) {
|
||||
const firstLine = stdout.split("\n")[0].trim();
|
||||
@ -107,7 +108,7 @@ export function checkFileExistenceInPaths(
|
||||
/**
|
||||
* Validate cuda for linux and windows
|
||||
*/
|
||||
export function updateCudaExistence() {
|
||||
export function updateCudaExistence(data: Record<string, any> = DEFALT_SETTINGS): Record<string, any> {
|
||||
let filesCuda12: string[];
|
||||
let filesCuda11: string[];
|
||||
let paths: string[];
|
||||
@ -141,19 +142,14 @@ export function updateCudaExistence() {
|
||||
cudaVersion = "12";
|
||||
}
|
||||
|
||||
let data;
|
||||
try {
|
||||
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
|
||||
} catch (error) {
|
||||
data = DEFALT_SETTINGS;
|
||||
}
|
||||
|
||||
data["cuda"].exist = cudaExists;
|
||||
data["cuda"].version = cudaVersion;
|
||||
if (cudaExists) {
|
||||
console.log(data["is_initial"], data["gpus_in_use"]);
|
||||
if (cudaExists && data["is_initial"] && data["gpus_in_use"].length > 0) {
|
||||
data.run_mode = "gpu";
|
||||
}
|
||||
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
|
||||
data.is_initial = false;
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -161,14 +157,9 @@ export function updateCudaExistence() {
|
||||
*/
|
||||
export async function updateGpuInfo(): Promise<void> {
|
||||
exec(
|
||||
"nvidia-smi --query-gpu=index,memory.total --format=csv,noheader,nounits",
|
||||
"nvidia-smi --query-gpu=index,memory.total,name --format=csv,noheader,nounits",
|
||||
(error, stdout) => {
|
||||
let data;
|
||||
try {
|
||||
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
|
||||
} catch (error) {
|
||||
data = DEFALT_SETTINGS;
|
||||
}
|
||||
let data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
|
||||
|
||||
if (!error) {
|
||||
// Get GPU info and gpu has higher memory first
|
||||
@ -178,21 +169,27 @@ export async function updateGpuInfo(): Promise<void> {
|
||||
.trim()
|
||||
.split("\n")
|
||||
.map((line) => {
|
||||
let [id, vram] = line.split(", ");
|
||||
let [id, vram, name] = line.split(", ");
|
||||
vram = vram.replace(/\r/g, "");
|
||||
if (parseFloat(vram) > highestVram) {
|
||||
highestVram = parseFloat(vram);
|
||||
highestVramId = id;
|
||||
}
|
||||
return { id, vram };
|
||||
return { id, vram, name };
|
||||
});
|
||||
|
||||
data["gpus"] = gpus;
|
||||
data["gpu_highest_vram"] = highestVramId;
|
||||
data.gpus = gpus;
|
||||
data.gpu_highest_vram = highestVramId;
|
||||
} else {
|
||||
data["gpus"] = [];
|
||||
data.gpus = [];
|
||||
data.gpu_highest_vram = "";
|
||||
}
|
||||
|
||||
if (!data["gpus_in_use"] || data["gpus_in_use"].length === 0) {
|
||||
data.gpus_in_use = [data["gpu_highest_vram"]];
|
||||
}
|
||||
|
||||
data = updateCudaExistence(data);
|
||||
writeFileSync(NVIDIA_INFO_FILE, JSON.stringify(data, null, 2));
|
||||
Promise.resolve();
|
||||
}
|
||||
|
||||
@ -8,7 +8,7 @@
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"build": "tsc -b . && webpack --config webpack.config.js",
|
||||
"build:publish": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../electron/pre-install"
|
||||
"build:publish": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../pre-install"
|
||||
},
|
||||
"exports": {
|
||||
".": "./dist/index.js",
|
||||
|
||||
@ -8,7 +8,7 @@
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"build": "tsc -b . && webpack --config webpack.config.js",
|
||||
"build:publish": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../electron/pre-install"
|
||||
"build:publish": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../pre-install"
|
||||
},
|
||||
"exports": {
|
||||
".": "./dist/index.js",
|
||||
|
||||
@ -8,7 +8,7 @@
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"build": "tsc -b . && webpack --config webpack.config.js",
|
||||
"build:publish": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../electron/pre-install"
|
||||
"build:publish": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../pre-install"
|
||||
},
|
||||
"devDependencies": {
|
||||
"cpx": "^1.5.0",
|
||||
|
||||
@ -1,3 +1,15 @@
|
||||
export {}
|
||||
declare global {
|
||||
declare const EXTENSION_NAME: string
|
||||
declare const MODULE_PATH: string
|
||||
declare const VERSION: stringå
|
||||
declare const VERSION: string
|
||||
|
||||
interface Core {
|
||||
api: APIFunctions
|
||||
events: EventEmitter
|
||||
}
|
||||
interface Window {
|
||||
core?: Core | undefined
|
||||
electronAPI?: any | undefined
|
||||
}
|
||||
}
|
||||
|
||||
11
extensions/model-extension/src/helpers/path.ts
Normal file
11
extensions/model-extension/src/helpers/path.ts
Normal file
@ -0,0 +1,11 @@
|
||||
/**
|
||||
* try to retrieve the download file name from the source url
|
||||
*/
|
||||
|
||||
export function extractFileName(url: string, fileExtension: string): string {
|
||||
const extractedFileName = url.split('/').pop()
|
||||
const fileName = extractedFileName.toLowerCase().endsWith(fileExtension)
|
||||
? extractedFileName
|
||||
: extractedFileName + fileExtension
|
||||
return fileName
|
||||
}
|
||||
@ -8,7 +8,13 @@ import {
|
||||
ModelExtension,
|
||||
Model,
|
||||
getJanDataFolderPath,
|
||||
events,
|
||||
DownloadEvent,
|
||||
DownloadRoute,
|
||||
ModelEvent,
|
||||
} from '@janhq/core'
|
||||
import { DownloadState } from '@janhq/core/.'
|
||||
import { extractFileName } from './helpers/path'
|
||||
|
||||
/**
|
||||
* A extension for models
|
||||
@ -29,6 +35,8 @@ export default class JanModelExtension extends ModelExtension {
|
||||
*/
|
||||
async onLoad() {
|
||||
this.copyModelsToHomeDir()
|
||||
// Handle Desktop Events
|
||||
this.handleDesktopEvents()
|
||||
}
|
||||
|
||||
/**
|
||||
@ -61,6 +69,8 @@ export default class JanModelExtension extends ModelExtension {
|
||||
|
||||
// Finished migration
|
||||
localStorage.setItem(`${EXTENSION_NAME}-version`, VERSION)
|
||||
|
||||
events.emit(ModelEvent.OnModelsUpdate, {})
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
}
|
||||
@ -83,31 +93,66 @@ export default class JanModelExtension extends ModelExtension {
|
||||
if (model.sources.length > 1) {
|
||||
// path to model binaries
|
||||
for (const source of model.sources) {
|
||||
let path = this.extractFileName(source.url)
|
||||
let path = extractFileName(
|
||||
source.url,
|
||||
JanModelExtension._supportedModelFormat
|
||||
)
|
||||
if (source.filename) {
|
||||
path = await joinPath([modelDirPath, source.filename])
|
||||
}
|
||||
|
||||
downloadFile(source.url, path, network)
|
||||
}
|
||||
// TODO: handle multiple binaries for web later
|
||||
} else {
|
||||
const fileName = this.extractFileName(model.sources[0]?.url)
|
||||
const fileName = extractFileName(
|
||||
model.sources[0]?.url,
|
||||
JanModelExtension._supportedModelFormat
|
||||
)
|
||||
const path = await joinPath([modelDirPath, fileName])
|
||||
downloadFile(model.sources[0]?.url, path, network)
|
||||
|
||||
if (window && window.core?.api && window.core.api.baseApiUrl) {
|
||||
this.startPollingDownloadProgress(model.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* try to retrieve the download file name from the source url
|
||||
* Specifically for Jan server.
|
||||
*/
|
||||
private extractFileName(url: string): string {
|
||||
const extractedFileName = url.split('/').pop()
|
||||
const fileName = extractedFileName
|
||||
.toLowerCase()
|
||||
.endsWith(JanModelExtension._supportedModelFormat)
|
||||
? extractedFileName
|
||||
: extractedFileName + JanModelExtension._supportedModelFormat
|
||||
return fileName
|
||||
private async startPollingDownloadProgress(modelId: string): Promise<void> {
|
||||
// wait for some seconds before polling
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000))
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const interval = setInterval(async () => {
|
||||
fetch(
|
||||
`${window.core.api.baseApiUrl}/v1/download/${DownloadRoute.getDownloadProgress}/${modelId}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: { contentType: 'application/json' },
|
||||
}
|
||||
).then(async (res) => {
|
||||
const state: DownloadState = await res.json()
|
||||
if (state.downloadState === 'end') {
|
||||
events.emit(DownloadEvent.onFileDownloadSuccess, state)
|
||||
clearInterval(interval)
|
||||
resolve()
|
||||
return
|
||||
}
|
||||
|
||||
if (state.downloadState === 'error') {
|
||||
events.emit(DownloadEvent.onFileDownloadError, state)
|
||||
clearInterval(interval)
|
||||
resolve()
|
||||
return
|
||||
}
|
||||
|
||||
events.emit(DownloadEvent.onFileDownloadUpdate, state)
|
||||
})
|
||||
}, 1000)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
@ -286,6 +331,7 @@ export default class JanModelExtension extends ModelExtension {
|
||||
* model.json file associated with it.
|
||||
*
|
||||
* This function will create a model.json file for the model.
|
||||
* It works only with single binary file model.
|
||||
*
|
||||
* @param dirName the director which reside in ~/jan/models but does not have model.json file.
|
||||
*/
|
||||
@ -302,9 +348,7 @@ export default class JanModelExtension extends ModelExtension {
|
||||
let binaryFileSize: number | undefined = undefined
|
||||
|
||||
for (const file of files) {
|
||||
if (file.endsWith(JanModelExtension._incompletedModelFileName)) continue
|
||||
if (file.endsWith('.json')) continue
|
||||
|
||||
if (file.endsWith(JanModelExtension._supportedModelFormat)) {
|
||||
const path = await joinPath([JanModelExtension._homeDir, dirName, file])
|
||||
const fileStats = await fs.fileStat(path)
|
||||
if (fileStats.isDirectory) continue
|
||||
@ -312,13 +356,14 @@ export default class JanModelExtension extends ModelExtension {
|
||||
binaryFileName = file
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (!binaryFileName) {
|
||||
console.warn(`Unable to find binary file for model ${dirName}`)
|
||||
return
|
||||
}
|
||||
|
||||
const defaultModel = await this.getDefaultModel()
|
||||
const defaultModel = (await this.getDefaultModel()) as Model
|
||||
if (!defaultModel) {
|
||||
console.error('Unable to find default model')
|
||||
return
|
||||
@ -326,8 +371,19 @@ export default class JanModelExtension extends ModelExtension {
|
||||
|
||||
const model: Model = {
|
||||
...defaultModel,
|
||||
// Overwrite default N/A fields
|
||||
id: dirName,
|
||||
name: dirName,
|
||||
sources: [
|
||||
{
|
||||
url: binaryFileName,
|
||||
filename: binaryFileName,
|
||||
},
|
||||
],
|
||||
settings: {
|
||||
...defaultModel.settings,
|
||||
llama_model_path: binaryFileName,
|
||||
},
|
||||
created: Date.now(),
|
||||
description: `${dirName} - user self import model`,
|
||||
metadata: {
|
||||
@ -371,4 +427,28 @@ export default class JanModelExtension extends ModelExtension {
|
||||
async getConfiguredModels(): Promise<Model[]> {
|
||||
return this.getModelsMetadata()
|
||||
}
|
||||
|
||||
handleDesktopEvents() {
|
||||
if (window && window.electronAPI) {
|
||||
window.electronAPI.onFileDownloadUpdate(
|
||||
async (_event: string, state: any | undefined) => {
|
||||
if (!state) return
|
||||
state.downloadState = 'update'
|
||||
events.emit(DownloadEvent.onFileDownloadUpdate, state)
|
||||
}
|
||||
)
|
||||
window.electronAPI.onFileDownloadError(
|
||||
async (_event: string, state: any) => {
|
||||
state.downloadState = 'error'
|
||||
events.emit(DownloadEvent.onFileDownloadError, state)
|
||||
}
|
||||
)
|
||||
window.electronAPI.onFileDownloadSuccess(
|
||||
async (_event: string, state: any) => {
|
||||
state.downloadState = 'end'
|
||||
events.emit(DownloadEvent.onFileDownloadSuccess, state)
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -8,7 +8,7 @@
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": false,
|
||||
"skipLibCheck": true,
|
||||
"rootDir": "./src"
|
||||
"rootDir": "./src",
|
||||
},
|
||||
"include": ["./src"]
|
||||
"include": ["./src"],
|
||||
}
|
||||
|
||||
@ -19,7 +19,7 @@ module.exports = {
|
||||
new webpack.DefinePlugin({
|
||||
EXTENSION_NAME: JSON.stringify(packageJson.name),
|
||||
MODULE_PATH: JSON.stringify(`${packageJson.name}/${packageJson.module}`),
|
||||
VERSION: JSON.stringify(packageJson.version),
|
||||
VERSION: JSON.stringify(packageJson.version)
|
||||
}),
|
||||
],
|
||||
output: {
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@janhq/monitoring-extension",
|
||||
"version": "1.0.9",
|
||||
"version": "1.0.10",
|
||||
"description": "This extension provides system health and OS level data",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/module.js",
|
||||
@ -8,7 +8,7 @@
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"build": "tsc -b . && webpack --config webpack.config.js",
|
||||
"build:publish": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../electron/pre-install"
|
||||
"build:publish": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../pre-install"
|
||||
},
|
||||
"devDependencies": {
|
||||
"rimraf": "^3.0.2",
|
||||
@ -26,6 +26,7 @@
|
||||
"README.md"
|
||||
],
|
||||
"bundleDependencies": [
|
||||
"node-os-utils"
|
||||
"node-os-utils",
|
||||
"@janhq/core"
|
||||
]
|
||||
}
|
||||
|
||||
@ -1,4 +1,14 @@
|
||||
const nodeOsUtils = require("node-os-utils");
|
||||
const getJanDataFolderPath = require("@janhq/core/node").getJanDataFolderPath;
|
||||
const path = require("path");
|
||||
const { readFileSync } = require("fs");
|
||||
const exec = require("child_process").exec;
|
||||
|
||||
const NVIDIA_INFO_FILE = path.join(
|
||||
getJanDataFolderPath(),
|
||||
"settings",
|
||||
"settings.json"
|
||||
);
|
||||
|
||||
const getResourcesInfo = () =>
|
||||
new Promise((resolve) => {
|
||||
@ -16,14 +26,44 @@ const getResourcesInfo = () =>
|
||||
});
|
||||
|
||||
const getCurrentLoad = () =>
|
||||
new Promise((resolve) => {
|
||||
new Promise((resolve, reject) => {
|
||||
nodeOsUtils.cpu.usage().then((cpuPercentage) => {
|
||||
const response = {
|
||||
cpu: {
|
||||
usage: cpuPercentage,
|
||||
},
|
||||
let data = {
|
||||
run_mode: "cpu",
|
||||
gpus_in_use: [],
|
||||
};
|
||||
resolve(response);
|
||||
if (process.platform !== "darwin") {
|
||||
data = JSON.parse(readFileSync(NVIDIA_INFO_FILE, "utf-8"));
|
||||
}
|
||||
if (data.run_mode === "gpu" && data.gpus_in_use.length > 0) {
|
||||
const gpuIds = data["gpus_in_use"].join(",");
|
||||
if (gpuIds !== "") {
|
||||
exec(
|
||||
`nvidia-smi --query-gpu=index,name,temperature.gpu,utilization.gpu,memory.total,memory.free,utilization.memory --format=csv,noheader,nounits --id=${gpuIds}`,
|
||||
(error, stdout, stderr) => {
|
||||
if (error) {
|
||||
console.error(`exec error: ${error}`);
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
const gpuInfo = stdout.trim().split("\n").map((line) => {
|
||||
const [id, name, temperature, utilization, memoryTotal, memoryFree, memoryUtilization] = line.split(", ").map(item => item.replace(/\r/g, ""));
|
||||
return { id, name, temperature, utilization, memoryTotal, memoryFree, memoryUtilization };
|
||||
});
|
||||
resolve({
|
||||
cpu: { usage: cpuPercentage },
|
||||
gpu: gpuInfo
|
||||
});
|
||||
}
|
||||
);
|
||||
} else {
|
||||
// Handle the case where gpuIds is empty
|
||||
resolve({ cpu: { usage: cpuPercentage }, gpu: [] });
|
||||
}
|
||||
} else {
|
||||
// Handle the case where run_mode is not 'gpu' or no GPUs are in use
|
||||
resolve({ cpu: { usage: cpuPercentage }, gpu: [] });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
13
package.json
13
package.json
@ -21,22 +21,23 @@
|
||||
"lint": "yarn workspace jan lint && yarn workspace jan-web lint",
|
||||
"test:unit": "yarn workspace @janhq/core test",
|
||||
"test": "yarn workspace jan test:e2e",
|
||||
"copy:assets": "cpx \"models/**\" \"electron/models/\" && cpx \"docs/openapi/**\" \"electron/docs/openapi\"",
|
||||
"copy:assets": "cpx \"models/**\" \"electron/models/\" && cpx \"pre-install/*.tgz\" \"electron/pre-install/\" && cpx \"docs/openapi/**\" \"electron/docs/openapi\"",
|
||||
"dev:electron": "yarn copy:assets && yarn workspace jan dev",
|
||||
"dev:web": "yarn workspace jan-web dev",
|
||||
"dev:server": "yarn workspace @janhq/server dev",
|
||||
"dev:server": "yarn copy:assets && yarn workspace @janhq/server dev",
|
||||
"dev": "concurrently --kill-others \"yarn dev:web\" \"wait-on http://localhost:3000 && yarn dev:electron\"",
|
||||
"test-local": "yarn lint && yarn build:test && yarn test",
|
||||
"dev:uikit": "yarn workspace @janhq/uikit install && yarn workspace @janhq/uikit dev",
|
||||
"build:uikit": "yarn workspace @janhq/uikit install && yarn workspace @janhq/uikit build",
|
||||
"build:server": "cd server && yarn install && yarn run build",
|
||||
"build:server": "yarn copy:assets && cd server && yarn install && yarn run build",
|
||||
"build:core": "cd core && yarn install && yarn run build",
|
||||
"build:web": "yarn workspace jan-web build && cpx \"web/out/**\" \"electron/renderer/\"",
|
||||
"build:electron": "yarn copy:assets && yarn workspace jan build",
|
||||
"build:electron:test": "yarn workspace jan build:test",
|
||||
"build:extensions:windows": "rimraf ./electron/pre-install/*.tgz && powershell -command \"$jobs = Get-ChildItem -Path './extensions' -Directory | ForEach-Object { Start-Job -Name ($_.Name) -ScriptBlock { param($_dir); try { Set-Location $_dir; npm install; npm run build:publish; Write-Output 'Build successful in ' + $_dir } catch { Write-Error 'Error in ' + $_dir; throw } } -ArgumentList $_.FullName }; $jobs | Wait-Job; $jobs | ForEach-Object { Receive-Job -Job $_ -Keep } | ForEach-Object { Write-Host $_ }; $failed = $jobs | Where-Object { $_.State -ne 'Completed' -or $_.ChildJobs[0].JobStateInfo.State -ne 'Completed' }; if ($failed) { Exit 1 }\"",
|
||||
"build:extensions:linux": "rimraf ./electron/pre-install/*.tgz && find ./extensions -mindepth 1 -maxdepth 1 -type d -print0 | xargs -0 -n 1 -P 4 -I {} sh -c 'cd {} && npm install && npm run build:publish'",
|
||||
"build:extensions:darwin": "rimraf ./electron/pre-install/*.tgz && find ./extensions -mindepth 1 -maxdepth 1 -type d -print0 | xargs -0 -n 1 -P 4 -I {} sh -c 'cd {} && npm install && npm run build:publish'",
|
||||
"build:extensions:windows": "rimraf ./pre-install/*.tgz && powershell -command \"$jobs = Get-ChildItem -Path './extensions' -Directory | ForEach-Object { Start-Job -Name ($_.Name) -ScriptBlock { param($_dir); try { Set-Location $_dir; npm install; npm run build:publish; Write-Output 'Build successful in ' + $_dir } catch { Write-Error 'Error in ' + $_dir; throw } } -ArgumentList $_.FullName }; $jobs | Wait-Job; $jobs | ForEach-Object { Receive-Job -Job $_ -Keep } | ForEach-Object { Write-Host $_ }; $failed = $jobs | Where-Object { $_.State -ne 'Completed' -or $_.ChildJobs[0].JobStateInfo.State -ne 'Completed' }; if ($failed) { Exit 1 }\"",
|
||||
"build:extensions:linux": "rimraf ./pre-install/*.tgz && find ./extensions -mindepth 1 -maxdepth 1 -type d -print0 | xargs -0 -n 1 -P 4 -I {} sh -c 'cd {} && npm install && npm run build:publish'",
|
||||
"build:extensions:darwin": "rimraf ./pre-install/*.tgz && find ./extensions -mindepth 1 -maxdepth 1 -type d -print0 | xargs -0 -n 1 -P 4 -I {} sh -c 'cd {} && npm install && npm run build:publish'",
|
||||
"build:extensions:server": "yarn workspace build:extensions ",
|
||||
"build:extensions": "run-script-os",
|
||||
"build:test": "yarn copy:assets && yarn build:web && yarn workspace jan build:test",
|
||||
"build": "yarn build:web && yarn build:electron",
|
||||
|
||||
0
pre-install/.gitkeep
Normal file
0
pre-install/.gitkeep
Normal file
47
server/helpers/setup.ts
Normal file
47
server/helpers/setup.ts
Normal file
@ -0,0 +1,47 @@
|
||||
import { join, extname } from "path";
|
||||
import { existsSync, readdirSync, writeFileSync, mkdirSync } from "fs";
|
||||
import { init, installExtensions } from "@janhq/core/node";
|
||||
|
||||
export async function setup() {
|
||||
/**
|
||||
* Setup Jan Data Directory
|
||||
*/
|
||||
const appDir = process.env.JAN_DATA_DIRECTORY ?? join(__dirname, "..", "jan");
|
||||
|
||||
console.debug(`Create app data directory at ${appDir}...`);
|
||||
if (!existsSync(appDir)) mkdirSync(appDir);
|
||||
//@ts-ignore
|
||||
global.core = {
|
||||
// Define appPath function for app to retrieve app path globaly
|
||||
appPath: () => appDir,
|
||||
};
|
||||
init({
|
||||
extensionsPath: join(appDir, "extensions"),
|
||||
});
|
||||
|
||||
/**
|
||||
* Write app configurations. See #1619
|
||||
*/
|
||||
console.debug("Writing config file...");
|
||||
writeFileSync(
|
||||
join(appDir, "settings.json"),
|
||||
JSON.stringify({
|
||||
data_folder: appDir,
|
||||
}),
|
||||
"utf-8"
|
||||
);
|
||||
|
||||
/**
|
||||
* Install extensions
|
||||
*/
|
||||
|
||||
console.debug("Installing extensions...");
|
||||
|
||||
const baseExtensionPath = join(__dirname, "../../..", "pre-install");
|
||||
const extensions = readdirSync(baseExtensionPath)
|
||||
.filter((file) => extname(file) === ".tgz")
|
||||
.map((file) => join(baseExtensionPath, file));
|
||||
|
||||
await installExtensions(extensions);
|
||||
console.debug("Extensions installed");
|
||||
}
|
||||
@ -38,6 +38,7 @@ export interface ServerConfig {
|
||||
isVerboseEnabled?: boolean;
|
||||
schemaPath?: string;
|
||||
baseDir?: string;
|
||||
storageAdataper?: any;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -103,9 +104,12 @@ export const startServer = async (configs?: ServerConfig) => {
|
||||
{ prefix: "extensions" }
|
||||
);
|
||||
|
||||
// Register proxy middleware
|
||||
if (configs?.storageAdataper)
|
||||
server.addHook("preHandler", configs.storageAdataper);
|
||||
|
||||
// Register API routes
|
||||
await server.register(v1Router, { prefix: "/v1" });
|
||||
|
||||
// Start listening for requests
|
||||
await server
|
||||
.listen({
|
||||
|
||||
@ -1,3 +1,7 @@
|
||||
import { startServer } from "./index";
|
||||
|
||||
startServer();
|
||||
import { s3 } from "./middleware/s3";
|
||||
import { setup } from "./helpers/setup";
|
||||
import { startServer as start } from "./index";
|
||||
/**
|
||||
* Setup extensions and start the server
|
||||
*/
|
||||
setup().then(() => start({ storageAdataper: s3 }));
|
||||
|
||||
70
server/middleware/s3.ts
Normal file
70
server/middleware/s3.ts
Normal file
@ -0,0 +1,70 @@
|
||||
import { join } from "path";
|
||||
|
||||
// Middleware to intercept requests and proxy if certain conditions are met
|
||||
const config = {
|
||||
endpoint: process.env.AWS_ENDPOINT,
|
||||
region: process.env.AWS_REGION,
|
||||
credentials: {
|
||||
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
||||
},
|
||||
};
|
||||
|
||||
const S3_BUCKET_NAME = process.env.S3_BUCKET_NAME;
|
||||
|
||||
const fs = require("@cyclic.sh/s3fs")(S3_BUCKET_NAME, config);
|
||||
const PROXY_PREFIX = "/v1/fs";
|
||||
const PROXY_ROUTES = ["/threads", "/messages"];
|
||||
|
||||
export const s3 = (req: any, reply: any, done: any) => {
|
||||
// Proxy FS requests to S3 using S3FS
|
||||
if (req.url.startsWith(PROXY_PREFIX)) {
|
||||
const route = req.url.split("/").pop();
|
||||
const args = parseRequestArgs(req);
|
||||
|
||||
// Proxy matched requests to the s3fs module
|
||||
if (args.length && PROXY_ROUTES.some((route) => args[0].includes(route))) {
|
||||
try {
|
||||
// Handle customized route
|
||||
// S3FS does not handle appendFileSync
|
||||
if (route === "appendFileSync") {
|
||||
let result = handAppendFileSync(args);
|
||||
|
||||
reply.status(200).send(result);
|
||||
return;
|
||||
}
|
||||
// Reroute the other requests to the s3fs module
|
||||
const result = fs[route](...args);
|
||||
reply.status(200).send(result);
|
||||
return;
|
||||
} catch (ex) {
|
||||
console.log(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Let other requests go through
|
||||
done();
|
||||
};
|
||||
|
||||
const parseRequestArgs = (req: Request) => {
|
||||
const {
|
||||
getJanDataFolderPath,
|
||||
normalizeFilePath,
|
||||
} = require("@janhq/core/node");
|
||||
|
||||
return JSON.parse(req.body as any).map((arg: any) =>
|
||||
typeof arg === "string" &&
|
||||
(arg.startsWith(`file:/`) || arg.startsWith(`file:\\`))
|
||||
? join(getJanDataFolderPath(), normalizeFilePath(arg))
|
||||
: arg
|
||||
);
|
||||
};
|
||||
|
||||
const handAppendFileSync = (args: any[]) => {
|
||||
if (fs.existsSync(args[0])) {
|
||||
const data = fs.readFileSync(args[0], "utf-8");
|
||||
return fs.writeFileSync(args[0], data + args[1]);
|
||||
} else {
|
||||
return fs.writeFileSync(args[0], args[1]);
|
||||
}
|
||||
};
|
||||
@ -1,5 +0,0 @@
|
||||
{
|
||||
"watch": ["main.ts", "v1"],
|
||||
"ext": "ts, json",
|
||||
"exec": "tsc && node ./build/main.js"
|
||||
}
|
||||
@ -13,16 +13,18 @@
|
||||
"scripts": {
|
||||
"lint": "eslint . --ext \".js,.jsx,.ts,.tsx\"",
|
||||
"test:e2e": "playwright test --workers=1",
|
||||
"dev": "tsc --watch & node --watch build/main.js",
|
||||
"build": "tsc"
|
||||
"build:core": "cd node_modules/@janhq/core && yarn install && yarn build",
|
||||
"dev": "yarn build:core && tsc --watch & node --watch build/main.js",
|
||||
"build": "yarn build:core && tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@alumna/reflect": "^1.1.3",
|
||||
"@cyclic.sh/s3fs": "^1.2.9",
|
||||
"@fastify/cors": "^8.4.2",
|
||||
"@fastify/static": "^6.12.0",
|
||||
"@fastify/swagger": "^8.13.0",
|
||||
"@fastify/swagger-ui": "2.0.1",
|
||||
"@janhq/core": "link:./core",
|
||||
"@janhq/core": "file:../core",
|
||||
"dotenv": "^16.3.1",
|
||||
"fastify": "^4.24.3",
|
||||
"request": "^2.88.2",
|
||||
@ -39,5 +41,8 @@
|
||||
"run-script-os": "^1.1.6",
|
||||
"@types/tcp-port-used": "^1.0.4",
|
||||
"typescript": "^5.2.2"
|
||||
}
|
||||
},
|
||||
"bundleDependencies": [
|
||||
"@janhq/core"
|
||||
]
|
||||
}
|
||||
|
||||
@ -20,5 +20,5 @@
|
||||
// "sourceMap": true,
|
||||
|
||||
"include": ["./**/*.ts"],
|
||||
"exclude": ["core", "build", "dist", "tests", "node_modules"]
|
||||
"exclude": ["core", "build", "dist", "tests", "node_modules", "extensions"]
|
||||
}
|
||||
|
||||
@ -13,22 +13,22 @@ import {
|
||||
import { useAtomValue } from 'jotai'
|
||||
|
||||
import useDownloadModel from '@/hooks/useDownloadModel'
|
||||
import { useDownloadState } from '@/hooks/useDownloadState'
|
||||
import { modelDownloadStateAtom } from '@/hooks/useDownloadState'
|
||||
|
||||
import { formatDownloadPercentage } from '@/utils/converter'
|
||||
|
||||
import { downloadingModelsAtom } from '@/helpers/atoms/Model.atom'
|
||||
import { getDownloadingModelAtom } from '@/helpers/atoms/Model.atom'
|
||||
|
||||
export default function DownloadingState() {
|
||||
const { downloadStates } = useDownloadState()
|
||||
const downloadingModels = useAtomValue(downloadingModelsAtom)
|
||||
const downloadStates = useAtomValue(modelDownloadStateAtom)
|
||||
const downloadingModels = useAtomValue(getDownloadingModelAtom)
|
||||
const { abortModelDownload } = useDownloadModel()
|
||||
|
||||
const totalCurrentProgress = downloadStates
|
||||
const totalCurrentProgress = Object.values(downloadStates)
|
||||
.map((a) => a.size.transferred + a.size.transferred)
|
||||
.reduce((partialSum, a) => partialSum + a, 0)
|
||||
|
||||
const totalSize = downloadStates
|
||||
const totalSize = Object.values(downloadStates)
|
||||
.map((a) => a.size.total + a.size.total)
|
||||
.reduce((partialSum, a) => partialSum + a, 0)
|
||||
|
||||
@ -36,12 +36,14 @@ export default function DownloadingState() {
|
||||
|
||||
return (
|
||||
<Fragment>
|
||||
{downloadStates?.length > 0 && (
|
||||
{Object.values(downloadStates)?.length > 0 && (
|
||||
<Modal>
|
||||
<ModalTrigger asChild>
|
||||
<div className="relative block">
|
||||
<Button size="sm" themes="outline">
|
||||
<span>{downloadStates.length} Downloading model</span>
|
||||
<span>
|
||||
{Object.values(downloadStates).length} Downloading model
|
||||
</span>
|
||||
</Button>
|
||||
<span
|
||||
className="absolute left-0 h-full rounded-md rounded-l-md bg-primary/20"
|
||||
@ -55,8 +57,7 @@ export default function DownloadingState() {
|
||||
<ModalHeader>
|
||||
<ModalTitle>Downloading model</ModalTitle>
|
||||
</ModalHeader>
|
||||
{downloadStates.map((item, i) => {
|
||||
return (
|
||||
{Object.values(downloadStates).map((item, i) => (
|
||||
<div className="pt-2" key={i}>
|
||||
<Progress
|
||||
className="mb-2 h-2"
|
||||
@ -87,8 +88,7 @@ export default function DownloadingState() {
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
))}
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
)}
|
||||
|
||||
@ -25,12 +25,12 @@ import { MainViewState } from '@/constants/screens'
|
||||
|
||||
import { useActiveModel } from '@/hooks/useActiveModel'
|
||||
|
||||
import { useDownloadState } from '@/hooks/useDownloadState'
|
||||
import { useGetDownloadedModels } from '@/hooks/useGetDownloadedModels'
|
||||
import { modelDownloadStateAtom } from '@/hooks/useDownloadState'
|
||||
import useGetSystemResources from '@/hooks/useGetSystemResources'
|
||||
import { useMainViewState } from '@/hooks/useMainViewState'
|
||||
|
||||
import { serverEnabledAtom } from '@/helpers/atoms/LocalServer.atom'
|
||||
import { downloadedModelsAtom } from '@/helpers/atoms/Model.atom'
|
||||
|
||||
const menuLinks = [
|
||||
{
|
||||
@ -47,14 +47,22 @@ const menuLinks = [
|
||||
|
||||
const BottomBar = () => {
|
||||
const { activeModel, stateModel } = useActiveModel()
|
||||
const { ram, cpu } = useGetSystemResources()
|
||||
const { ram, cpu, gpus } = useGetSystemResources()
|
||||
const progress = useAtomValue(appDownloadProgress)
|
||||
const { downloadedModels } = useGetDownloadedModels()
|
||||
const downloadedModels = useAtomValue(downloadedModelsAtom)
|
||||
|
||||
const { setMainViewState } = useMainViewState()
|
||||
const { downloadStates } = useDownloadState()
|
||||
const downloadStates = useAtomValue(modelDownloadStateAtom)
|
||||
const setShowSelectModelModal = useSetAtom(showSelectModelModalAtom)
|
||||
const [serverEnabled] = useAtom(serverEnabledAtom)
|
||||
|
||||
const calculateGpuMemoryUsage = (gpu: Record<string, never>) => {
|
||||
const total = parseInt(gpu.memoryTotal)
|
||||
const free = parseInt(gpu.memoryFree)
|
||||
if (!total || !free) return 0
|
||||
return Math.round(((total - free) / total) * 100)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="fixed bottom-0 left-16 z-20 flex h-12 w-[calc(100%-64px)] items-center justify-between border-t border-border bg-background/80 px-3">
|
||||
<div className="flex flex-shrink-0 items-center gap-x-2">
|
||||
@ -100,7 +108,7 @@ const BottomBar = () => {
|
||||
)}
|
||||
{downloadedModels.length === 0 &&
|
||||
!stateModel.loading &&
|
||||
downloadStates.length === 0 && (
|
||||
Object.values(downloadStates).length === 0 && (
|
||||
<Button
|
||||
size="sm"
|
||||
themes="outline"
|
||||
@ -117,6 +125,17 @@ const BottomBar = () => {
|
||||
<SystemItem name="CPU:" value={`${cpu}%`} />
|
||||
<SystemItem name="Mem:" value={`${ram}%`} />
|
||||
</div>
|
||||
{gpus.length > 0 && (
|
||||
<div className="flex items-center gap-x-2">
|
||||
{gpus.map((gpu, index) => (
|
||||
<SystemItem
|
||||
key={index}
|
||||
name={`GPU ${gpu.id}:`}
|
||||
value={`${gpu.utilization}% Util, ${calculateGpuMemoryUsage(gpu)}% Mem`}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{/* VERSION is defined by webpack, please see next.config.js */}
|
||||
<span className="text-xs text-muted-foreground">
|
||||
Jan v{VERSION ?? ''}
|
||||
|
||||
@ -11,7 +11,7 @@ import {
|
||||
Badge,
|
||||
} from '@janhq/uikit'
|
||||
|
||||
import { useAtom } from 'jotai'
|
||||
import { useAtom, useAtomValue } from 'jotai'
|
||||
import { DatabaseIcon, CpuIcon } from 'lucide-react'
|
||||
|
||||
import { showSelectModelModalAtom } from '@/containers/Providers/KeyListener'
|
||||
@ -19,14 +19,14 @@ import { showSelectModelModalAtom } from '@/containers/Providers/KeyListener'
|
||||
import { MainViewState } from '@/constants/screens'
|
||||
|
||||
import { useActiveModel } from '@/hooks/useActiveModel'
|
||||
import { useGetDownloadedModels } from '@/hooks/useGetDownloadedModels'
|
||||
import { useMainViewState } from '@/hooks/useMainViewState'
|
||||
|
||||
import { serverEnabledAtom } from '@/helpers/atoms/LocalServer.atom'
|
||||
import { downloadedModelsAtom } from '@/helpers/atoms/Model.atom'
|
||||
|
||||
export default function CommandListDownloadedModel() {
|
||||
const { setMainViewState } = useMainViewState()
|
||||
const { downloadedModels } = useGetDownloadedModels()
|
||||
const downloadedModels = useAtomValue(downloadedModelsAtom)
|
||||
const { activeModel, startModel, stopModel } = useActiveModel()
|
||||
const [serverEnabled] = useAtom(serverEnabledAtom)
|
||||
const [showSelectModelModal, setShowSelectModelModal] = useAtom(
|
||||
|
||||
@ -20,7 +20,6 @@ import { MainViewState } from '@/constants/screens'
|
||||
|
||||
import { useClickOutside } from '@/hooks/useClickOutside'
|
||||
import { useCreateNewThread } from '@/hooks/useCreateNewThread'
|
||||
import useGetAssistants, { getAssistants } from '@/hooks/useGetAssistants'
|
||||
import { useMainViewState } from '@/hooks/useMainViewState'
|
||||
|
||||
import { usePath } from '@/hooks/usePath'
|
||||
@ -29,13 +28,14 @@ import { showRightSideBarAtom } from '@/screens/Chat/Sidebar'
|
||||
|
||||
import { openFileTitle } from '@/utils/titleUtils'
|
||||
|
||||
import { assistantsAtom } from '@/helpers/atoms/Assistant.atom'
|
||||
import { activeThreadAtom } from '@/helpers/atoms/Thread.atom'
|
||||
|
||||
const TopBar = () => {
|
||||
const activeThread = useAtomValue(activeThreadAtom)
|
||||
const { mainViewState } = useMainViewState()
|
||||
const { requestCreateNewThread } = useCreateNewThread()
|
||||
const { assistants } = useGetAssistants()
|
||||
const assistants = useAtomValue(assistantsAtom)
|
||||
const [showRightSideBar, setShowRightSideBar] = useAtom(showRightSideBarAtom)
|
||||
const [showLeftSideBar, setShowLeftSideBar] = useAtom(showLeftSideBarAtom)
|
||||
const showing = useAtomValue(showRightSideBarAtom)
|
||||
@ -61,12 +61,7 @@ const TopBar = () => {
|
||||
|
||||
const onCreateConversationClick = async () => {
|
||||
if (assistants.length === 0) {
|
||||
const res = await getAssistants()
|
||||
if (res.length === 0) {
|
||||
alert('No assistant available')
|
||||
return
|
||||
}
|
||||
requestCreateNewThread(res[0])
|
||||
} else {
|
||||
requestCreateNewThread(assistants[0])
|
||||
}
|
||||
|
||||
@ -17,23 +17,22 @@ import {
|
||||
import { atom, useAtomValue } from 'jotai'
|
||||
|
||||
import useDownloadModel from '@/hooks/useDownloadModel'
|
||||
import { useDownloadState } from '@/hooks/useDownloadState'
|
||||
|
||||
import { modelDownloadStateAtom } from '@/hooks/useDownloadState'
|
||||
|
||||
import { formatDownloadPercentage } from '@/utils/converter'
|
||||
|
||||
import { downloadingModelsAtom } from '@/helpers/atoms/Model.atom'
|
||||
import { getDownloadingModelAtom } from '@/helpers/atoms/Model.atom'
|
||||
|
||||
type Props = {
|
||||
model: Model
|
||||
isFromList?: boolean
|
||||
}
|
||||
|
||||
export default function ModalCancelDownload({ model, isFromList }: Props) {
|
||||
const { modelDownloadStateAtom } = useDownloadState()
|
||||
const downloadingModels = useAtomValue(downloadingModelsAtom)
|
||||
const ModalCancelDownload: React.FC<Props> = ({ model, isFromList }) => {
|
||||
const downloadingModels = useAtomValue(getDownloadingModelAtom)
|
||||
const downloadAtom = useMemo(
|
||||
() => atom((get) => get(modelDownloadStateAtom)[model.id]),
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[model.id]
|
||||
)
|
||||
const downloadState = useAtomValue(downloadAtom)
|
||||
@ -98,3 +97,5 @@ export default function ModalCancelDownload({ model, isFromList }: Props) {
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
||||
export default ModalCancelDownload
|
||||
|
||||
21
web/containers/Providers/DataLoader.tsx
Normal file
21
web/containers/Providers/DataLoader.tsx
Normal file
@ -0,0 +1,21 @@
|
||||
'use client'
|
||||
|
||||
import { Fragment, ReactNode } from 'react'
|
||||
|
||||
import useAssistants from '@/hooks/useAssistants'
|
||||
import useModels from '@/hooks/useModels'
|
||||
import useThreads from '@/hooks/useThreads'
|
||||
|
||||
type Props = {
|
||||
children: ReactNode
|
||||
}
|
||||
|
||||
const DataLoader: React.FC<Props> = ({ children }) => {
|
||||
useModels()
|
||||
useThreads()
|
||||
useAssistants()
|
||||
|
||||
return <Fragment>{children}</Fragment>
|
||||
}
|
||||
|
||||
export default DataLoader
|
||||
@ -18,7 +18,6 @@ import {
|
||||
loadModelErrorAtom,
|
||||
stateModelAtom,
|
||||
} from '@/hooks/useActiveModel'
|
||||
import { useGetDownloadedModels } from '@/hooks/useGetDownloadedModels'
|
||||
|
||||
import { queuedMessageAtom } from '@/hooks/useSendChatMessage'
|
||||
|
||||
@ -29,16 +28,18 @@ import {
|
||||
addNewMessageAtom,
|
||||
updateMessageAtom,
|
||||
} from '@/helpers/atoms/ChatMessage.atom'
|
||||
import { downloadedModelsAtom } from '@/helpers/atoms/Model.atom'
|
||||
import {
|
||||
updateThreadWaitingForResponseAtom,
|
||||
threadsAtom,
|
||||
isGeneratingResponseAtom,
|
||||
updateThreadAtom,
|
||||
} from '@/helpers/atoms/Thread.atom'
|
||||
|
||||
export default function EventHandler({ children }: { children: ReactNode }) {
|
||||
const addNewMessage = useSetAtom(addNewMessageAtom)
|
||||
const updateMessage = useSetAtom(updateMessageAtom)
|
||||
const { downloadedModels } = useGetDownloadedModels()
|
||||
const downloadedModels = useAtomValue(downloadedModelsAtom)
|
||||
const setActiveModel = useSetAtom(activeModelAtom)
|
||||
const setStateModel = useSetAtom(stateModelAtom)
|
||||
const setQueuedMessage = useSetAtom(queuedMessageAtom)
|
||||
@ -49,6 +50,7 @@ export default function EventHandler({ children }: { children: ReactNode }) {
|
||||
const modelsRef = useRef(downloadedModels)
|
||||
const threadsRef = useRef(threads)
|
||||
const setIsGeneratingResponse = useSetAtom(isGeneratingResponseAtom)
|
||||
const updateThread = useSetAtom(updateThreadAtom)
|
||||
|
||||
useEffect(() => {
|
||||
threadsRef.current = threads
|
||||
@ -126,11 +128,17 @@ export default function EventHandler({ children }: { children: ReactNode }) {
|
||||
|
||||
const thread = threadsRef.current?.find((e) => e.id == message.thread_id)
|
||||
if (thread) {
|
||||
const messageContent = message.content[0]?.text.value ?? ''
|
||||
const messageContent = message.content[0]?.text?.value
|
||||
const metadata = {
|
||||
...thread.metadata,
|
||||
lastMessage: messageContent,
|
||||
...(messageContent && { lastMessage: messageContent }),
|
||||
}
|
||||
|
||||
updateThread({
|
||||
...thread,
|
||||
metadata,
|
||||
})
|
||||
|
||||
extensionManager
|
||||
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
|
||||
?.saveThread({
|
||||
@ -143,7 +151,7 @@ export default function EventHandler({ children }: { children: ReactNode }) {
|
||||
?.addNewMessage(message)
|
||||
}
|
||||
},
|
||||
[updateMessage, updateThreadWaiting]
|
||||
[updateMessage, updateThreadWaiting, setIsGeneratingResponse, updateThread]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@ -1,91 +1,62 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
import { PropsWithChildren, useCallback, useEffect } from 'react'
|
||||
|
||||
import { PropsWithChildren, useEffect, useRef } from 'react'
|
||||
import React from 'react'
|
||||
|
||||
import { baseName } from '@janhq/core'
|
||||
import { useAtomValue, useSetAtom } from 'jotai'
|
||||
import { DownloadEvent, events } from '@janhq/core'
|
||||
import { useSetAtom } from 'jotai'
|
||||
|
||||
import { useDownloadState } from '@/hooks/useDownloadState'
|
||||
import { useGetDownloadedModels } from '@/hooks/useGetDownloadedModels'
|
||||
|
||||
import { modelBinFileName } from '@/utils/model'
|
||||
import { setDownloadStateAtom } from '@/hooks/useDownloadState'
|
||||
|
||||
import EventHandler from './EventHandler'
|
||||
|
||||
import { appDownloadProgress } from './Jotai'
|
||||
|
||||
import { downloadingModelsAtom } from '@/helpers/atoms/Model.atom'
|
||||
|
||||
export default function EventListenerWrapper({ children }: PropsWithChildren) {
|
||||
const EventListenerWrapper = ({ children }: PropsWithChildren) => {
|
||||
const setDownloadState = useSetAtom(setDownloadStateAtom)
|
||||
const setProgress = useSetAtom(appDownloadProgress)
|
||||
const models = useAtomValue(downloadingModelsAtom)
|
||||
const modelsRef = useRef(models)
|
||||
|
||||
const { setDownloadedModels, downloadedModels } = useGetDownloadedModels()
|
||||
const {
|
||||
setDownloadState,
|
||||
setDownloadStateSuccess,
|
||||
setDownloadStateFailed,
|
||||
setDownloadStateCancelled,
|
||||
} = useDownloadState()
|
||||
const downloadedModelRef = useRef(downloadedModels)
|
||||
const onFileDownloadUpdate = useCallback(
|
||||
async (state: DownloadState) => {
|
||||
console.debug('onFileDownloadUpdate', state)
|
||||
setDownloadState(state)
|
||||
},
|
||||
[setDownloadState]
|
||||
)
|
||||
|
||||
const onFileDownloadError = useCallback(
|
||||
(state: DownloadState) => {
|
||||
console.debug('onFileDownloadError', state)
|
||||
setDownloadState(state)
|
||||
},
|
||||
[setDownloadState]
|
||||
)
|
||||
|
||||
const onFileDownloadSuccess = useCallback(
|
||||
(state: DownloadState) => {
|
||||
console.debug('onFileDownloadSuccess', state)
|
||||
setDownloadState(state)
|
||||
},
|
||||
[setDownloadState]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
modelsRef.current = models
|
||||
}, [models])
|
||||
useEffect(() => {
|
||||
downloadedModelRef.current = downloadedModels
|
||||
}, [downloadedModels])
|
||||
console.log('EventListenerWrapper: registering event listeners...')
|
||||
|
||||
events.on(DownloadEvent.onFileDownloadUpdate, onFileDownloadUpdate)
|
||||
events.on(DownloadEvent.onFileDownloadError, onFileDownloadError)
|
||||
events.on(DownloadEvent.onFileDownloadSuccess, onFileDownloadSuccess)
|
||||
|
||||
return () => {
|
||||
console.log('EventListenerWrapper: unregistering event listeners...')
|
||||
events.off(DownloadEvent.onFileDownloadUpdate, onFileDownloadUpdate)
|
||||
events.off(DownloadEvent.onFileDownloadError, onFileDownloadError)
|
||||
events.off(DownloadEvent.onFileDownloadSuccess, onFileDownloadSuccess)
|
||||
}
|
||||
}, [onFileDownloadUpdate, onFileDownloadError, onFileDownloadSuccess])
|
||||
|
||||
useEffect(() => {
|
||||
if (window && window.electronAPI) {
|
||||
window.electronAPI.onFileDownloadUpdate(
|
||||
async (_event: string, state: any | undefined) => {
|
||||
if (!state) return
|
||||
const modelName = await baseName(state.fileName)
|
||||
const model = modelsRef.current.find(
|
||||
(model) => modelBinFileName(model) === modelName
|
||||
)
|
||||
if (model)
|
||||
setDownloadState({
|
||||
...state,
|
||||
modelId: model.id,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
window.electronAPI.onFileDownloadError(
|
||||
async (_event: string, state: any) => {
|
||||
const modelName = await baseName(state.fileName)
|
||||
const model = modelsRef.current.find(
|
||||
(model) => modelBinFileName(model) === modelName
|
||||
)
|
||||
if (model) {
|
||||
if (state.err?.message !== 'aborted') {
|
||||
console.error('Download error', state)
|
||||
setDownloadStateFailed(model.id, state.err.message)
|
||||
} else {
|
||||
setDownloadStateCancelled(model.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
window.electronAPI.onFileDownloadSuccess(
|
||||
async (_event: string, state: any) => {
|
||||
if (state && state.fileName) {
|
||||
const modelName = await baseName(state.fileName)
|
||||
const model = modelsRef.current.find(
|
||||
(model) => modelBinFileName(model) === modelName
|
||||
)
|
||||
if (model) {
|
||||
setDownloadStateSuccess(model.id)
|
||||
setDownloadedModels([...downloadedModelRef.current, model])
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
window.electronAPI.onAppUpdateDownloadUpdate(
|
||||
(_event: string, progress: any) => {
|
||||
setProgress(progress.percent)
|
||||
@ -105,14 +76,9 @@ export default function EventListenerWrapper({ children }: PropsWithChildren) {
|
||||
})
|
||||
}
|
||||
return () => {}
|
||||
}, [
|
||||
setDownloadState,
|
||||
setDownloadStateCancelled,
|
||||
setDownloadStateFailed,
|
||||
setDownloadStateSuccess,
|
||||
setDownloadedModels,
|
||||
setProgress,
|
||||
])
|
||||
}, [setDownloadState, setProgress])
|
||||
|
||||
return <EventHandler>{children}</EventHandler>
|
||||
}
|
||||
|
||||
export default EventListenerWrapper
|
||||
|
||||
@ -21,6 +21,10 @@ import {
|
||||
|
||||
import Umami from '@/utils/umami'
|
||||
|
||||
import Loader from '../Loader'
|
||||
|
||||
import DataLoader from './DataLoader'
|
||||
|
||||
import KeyListener from './KeyListener'
|
||||
|
||||
import { extensionManager } from '@/extension'
|
||||
@ -30,6 +34,7 @@ const Providers = (props: PropsWithChildren) => {
|
||||
|
||||
const [setupCore, setSetupCore] = useState(false)
|
||||
const [activated, setActivated] = useState(false)
|
||||
const [settingUp, setSettingUp] = useState(false)
|
||||
|
||||
async function setupExtensions() {
|
||||
// Register all active extensions
|
||||
@ -37,11 +42,13 @@ const Providers = (props: PropsWithChildren) => {
|
||||
|
||||
setTimeout(async () => {
|
||||
if (!isCoreExtensionInstalled()) {
|
||||
setupBaseExtensions()
|
||||
setSettingUp(true)
|
||||
await setupBaseExtensions()
|
||||
return
|
||||
}
|
||||
|
||||
extensionManager.load()
|
||||
setSettingUp(false)
|
||||
setActivated(true)
|
||||
}, 500)
|
||||
}
|
||||
@ -71,11 +78,14 @@ const Providers = (props: PropsWithChildren) => {
|
||||
<JotaiWrapper>
|
||||
<ThemeWrapper>
|
||||
<Umami />
|
||||
{settingUp && <Loader description="Preparing Update..." />}
|
||||
{setupCore && activated && (
|
||||
<KeyListener>
|
||||
<FeatureToggleWrapper>
|
||||
<EventListenerWrapper>
|
||||
<TooltipProvider delayDuration={0}>{children}</TooltipProvider>
|
||||
<TooltipProvider delayDuration={0}>
|
||||
<DataLoader>{children}</DataLoader>
|
||||
</TooltipProvider>
|
||||
{!isMac && <GPUDriverPrompt />}
|
||||
</EventListenerWrapper>
|
||||
<Toaster />
|
||||
|
||||
4
web/helpers/atoms/Assistant.atom.ts
Normal file
4
web/helpers/atoms/Assistant.atom.ts
Normal file
@ -0,0 +1,4 @@
|
||||
import { Assistant } from '@janhq/core/.'
|
||||
import { atom } from 'jotai'
|
||||
|
||||
export const assistantsAtom = atom<Assistant[]>([])
|
||||
@ -70,6 +70,7 @@ export const addNewMessageAtom = atom(
|
||||
set(chatMessages, newData)
|
||||
|
||||
// Update thread last message
|
||||
if (newMessage.content.length)
|
||||
set(
|
||||
updateThreadStateLastMessageAtom,
|
||||
newMessage.thread_id,
|
||||
@ -131,6 +132,7 @@ export const updateMessageAtom = atom(
|
||||
newData[conversationId] = updatedMessages
|
||||
set(chatMessages, newData)
|
||||
// Update thread last message
|
||||
if (text.length)
|
||||
set(updateThreadStateLastMessageAtom, conversationId, text)
|
||||
}
|
||||
}
|
||||
|
||||
@ -4,23 +4,32 @@ import { atom } from 'jotai'
|
||||
export const stateModel = atom({ state: 'start', loading: false, model: '' })
|
||||
export const activeAssistantModelAtom = atom<Model | undefined>(undefined)
|
||||
|
||||
export const downloadingModelsAtom = atom<Model[]>([])
|
||||
/**
|
||||
* Stores the list of models which are being downloaded.
|
||||
*/
|
||||
const downloadingModelsAtom = atom<Model[]>([])
|
||||
|
||||
export const addNewDownloadingModelAtom = atom(
|
||||
null,
|
||||
(get, set, model: Model) => {
|
||||
const currentModels = get(downloadingModelsAtom)
|
||||
set(downloadingModelsAtom, [...currentModels, model])
|
||||
export const getDownloadingModelAtom = atom((get) => get(downloadingModelsAtom))
|
||||
|
||||
export const addDownloadingModelAtom = atom(null, (get, set, model: Model) => {
|
||||
const downloadingModels = get(downloadingModelsAtom)
|
||||
if (!downloadingModels.find((e) => e.id === model.id)) {
|
||||
set(downloadingModelsAtom, [...downloadingModels, model])
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
export const removeDownloadingModelAtom = atom(
|
||||
null,
|
||||
(get, set, modelId: string) => {
|
||||
const currentModels = get(downloadingModelsAtom)
|
||||
const downloadingModels = get(downloadingModelsAtom)
|
||||
|
||||
set(
|
||||
downloadingModelsAtom,
|
||||
currentModels.filter((e) => e.id !== modelId)
|
||||
downloadingModels.filter((e) => e.id !== modelId)
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
export const downloadedModelsAtom = atom<Model[]>([])
|
||||
|
||||
export const configuredModelsAtom = atom<Model[]>([])
|
||||
|
||||
@ -5,3 +5,5 @@ export const usedRamAtom = atom<number>(0)
|
||||
export const availableRamAtom = atom<number>(0)
|
||||
|
||||
export const cpuUsageAtom = atom<number>(0)
|
||||
|
||||
export const nvidiaTotalVramAtom = atom<number>(0)
|
||||
|
||||
@ -3,9 +3,9 @@ import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'
|
||||
|
||||
import { toaster } from '@/containers/Toast'
|
||||
|
||||
import { useGetDownloadedModels } from './useGetDownloadedModels'
|
||||
import { LAST_USED_MODEL_ID } from './useRecommendedModel'
|
||||
|
||||
import { downloadedModelsAtom } from '@/helpers/atoms/Model.atom'
|
||||
import { activeThreadAtom } from '@/helpers/atoms/Thread.atom'
|
||||
|
||||
export const activeModelAtom = atom<Model | undefined>(undefined)
|
||||
@ -21,7 +21,7 @@ export function useActiveModel() {
|
||||
const [activeModel, setActiveModel] = useAtom(activeModelAtom)
|
||||
const activeThread = useAtomValue(activeThreadAtom)
|
||||
const [stateModel, setStateModel] = useAtom(stateModelAtom)
|
||||
const { downloadedModels } = useGetDownloadedModels()
|
||||
const downloadedModels = useAtomValue(downloadedModelsAtom)
|
||||
const setLoadModelError = useSetAtom(loadModelErrorAtom)
|
||||
|
||||
const startModel = async (modelId: string) => {
|
||||
|
||||
39
web/hooks/useAssistants.ts
Normal file
39
web/hooks/useAssistants.ts
Normal file
@ -0,0 +1,39 @@
|
||||
import { useCallback, useEffect } from 'react'
|
||||
|
||||
import {
|
||||
Assistant,
|
||||
AssistantEvent,
|
||||
AssistantExtension,
|
||||
ExtensionTypeEnum,
|
||||
events,
|
||||
} from '@janhq/core'
|
||||
|
||||
import { useSetAtom } from 'jotai'
|
||||
|
||||
import { extensionManager } from '@/extension'
|
||||
import { assistantsAtom } from '@/helpers/atoms/Assistant.atom'
|
||||
|
||||
const useAssistants = () => {
|
||||
const setAssistants = useSetAtom(assistantsAtom)
|
||||
|
||||
const getData = useCallback(async () => {
|
||||
const assistants = await getLocalAssistants()
|
||||
setAssistants(assistants)
|
||||
}, [setAssistants])
|
||||
|
||||
useEffect(() => {
|
||||
getData()
|
||||
|
||||
events.on(AssistantEvent.OnAssistantsUpdate, () => getData())
|
||||
return () => {
|
||||
events.off(AssistantEvent.OnAssistantsUpdate, () => getData())
|
||||
}
|
||||
}, [getData])
|
||||
}
|
||||
|
||||
const getLocalAssistants = async (): Promise<Assistant[]> =>
|
||||
extensionManager
|
||||
.get<AssistantExtension>(ExtensionTypeEnum.Assistant)
|
||||
?.getAssistants() ?? []
|
||||
|
||||
export default useAssistants
|
||||
@ -6,8 +6,9 @@ import {
|
||||
ThreadAssistantInfo,
|
||||
ThreadState,
|
||||
Model,
|
||||
MessageStatus,
|
||||
} from '@janhq/core'
|
||||
import { atom, useSetAtom } from 'jotai'
|
||||
import { atom, useAtomValue, useSetAtom } from 'jotai'
|
||||
|
||||
import { selectedModelAtom } from '@/containers/DropdownListSidebar'
|
||||
import { fileUploadAtom } from '@/containers/Providers/Jotai'
|
||||
@ -19,6 +20,8 @@ import useRecommendedModel from './useRecommendedModel'
|
||||
import useSetActiveThread from './useSetActiveThread'
|
||||
|
||||
import { extensionManager } from '@/extension'
|
||||
|
||||
import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
|
||||
import {
|
||||
threadsAtom,
|
||||
threadStatesAtom,
|
||||
@ -50,15 +53,25 @@ export const useCreateNewThread = () => {
|
||||
const setFileUpload = useSetAtom(fileUploadAtom)
|
||||
const setSelectedModel = useSetAtom(selectedModelAtom)
|
||||
const setThreadModelParams = useSetAtom(setThreadModelParamsAtom)
|
||||
const messages = useAtomValue(getCurrentChatMessagesAtom)
|
||||
|
||||
const { recommendedModel, downloadedModels } = useRecommendedModel()
|
||||
|
||||
const threads = useAtomValue(threadsAtom)
|
||||
|
||||
const requestCreateNewThread = async (
|
||||
assistant: Assistant,
|
||||
model?: Model | undefined
|
||||
) => {
|
||||
const defaultModel = model ?? recommendedModel ?? downloadedModels[0]
|
||||
|
||||
// check last thread message, if there empty last message use can not create thread
|
||||
const lastMessage = threads[threads.length - 1]?.metadata?.lastMessage
|
||||
|
||||
if (!lastMessage && threads.length && !messages.length) {
|
||||
return null
|
||||
}
|
||||
|
||||
const createdAt = Date.now()
|
||||
const assistantInfo: ThreadAssistantInfo = {
|
||||
assistant_id: assistant.id,
|
||||
|
||||
@ -1,13 +1,14 @@
|
||||
import { ExtensionTypeEnum, ModelExtension, Model } from '@janhq/core'
|
||||
|
||||
import { useAtom } from 'jotai'
|
||||
|
||||
import { toaster } from '@/containers/Toast'
|
||||
|
||||
import { useGetDownloadedModels } from '@/hooks/useGetDownloadedModels'
|
||||
|
||||
import { extensionManager } from '@/extension/ExtensionManager'
|
||||
import { downloadedModelsAtom } from '@/helpers/atoms/Model.atom'
|
||||
|
||||
export default function useDeleteModel() {
|
||||
const { setDownloadedModels, downloadedModels } = useGetDownloadedModels()
|
||||
const [downloadedModels, setDownloadedModels] = useAtom(downloadedModelsAtom)
|
||||
|
||||
const deleteModel = async (model: Model) => {
|
||||
await extensionManager
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { useContext } from 'react'
|
||||
import { useCallback, useContext } from 'react'
|
||||
|
||||
import {
|
||||
Model,
|
||||
@ -15,36 +15,21 @@ import { FeatureToggleContext } from '@/context/FeatureToggle'
|
||||
|
||||
import { modelBinFileName } from '@/utils/model'
|
||||
|
||||
import { useDownloadState } from './useDownloadState'
|
||||
import { setDownloadStateAtom } from './useDownloadState'
|
||||
|
||||
import { extensionManager } from '@/extension/ExtensionManager'
|
||||
import { addNewDownloadingModelAtom } from '@/helpers/atoms/Model.atom'
|
||||
import { addDownloadingModelAtom } from '@/helpers/atoms/Model.atom'
|
||||
|
||||
export default function useDownloadModel() {
|
||||
const { ignoreSSL, proxy } = useContext(FeatureToggleContext)
|
||||
const { setDownloadState } = useDownloadState()
|
||||
const addNewDownloadingModel = useSetAtom(addNewDownloadingModelAtom)
|
||||
const setDownloadState = useSetAtom(setDownloadStateAtom)
|
||||
const addDownloadingModel = useSetAtom(addDownloadingModelAtom)
|
||||
|
||||
const downloadModel = async (model: Model) => {
|
||||
const childrenDownloadProgress: DownloadState[] = []
|
||||
model.sources.forEach((source: ModelArtifact) => {
|
||||
childrenDownloadProgress.push({
|
||||
modelId: source.filename,
|
||||
time: {
|
||||
elapsed: 0,
|
||||
remaining: 0,
|
||||
},
|
||||
speed: 0,
|
||||
percent: 0,
|
||||
size: {
|
||||
total: 0,
|
||||
transferred: 0,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
// set an initial download state
|
||||
setDownloadState({
|
||||
const downloadModel = useCallback(
|
||||
async (model: Model) => {
|
||||
const childProgresses: DownloadState[] = model.sources.map(
|
||||
(source: ModelArtifact) => ({
|
||||
filename: source.filename,
|
||||
modelId: model.id,
|
||||
time: {
|
||||
elapsed: 0,
|
||||
@ -56,15 +41,34 @@ export default function useDownloadModel() {
|
||||
total: 0,
|
||||
transferred: 0,
|
||||
},
|
||||
children: childrenDownloadProgress,
|
||||
downloadState: 'downloading',
|
||||
})
|
||||
)
|
||||
|
||||
// set an initial download state
|
||||
setDownloadState({
|
||||
filename: '',
|
||||
modelId: model.id,
|
||||
time: {
|
||||
elapsed: 0,
|
||||
remaining: 0,
|
||||
},
|
||||
speed: 0,
|
||||
percent: 0,
|
||||
size: {
|
||||
total: 0,
|
||||
transferred: 0,
|
||||
},
|
||||
children: childProgresses,
|
||||
downloadState: 'downloading',
|
||||
})
|
||||
|
||||
addNewDownloadingModel(model)
|
||||
addDownloadingModel(model)
|
||||
|
||||
await extensionManager
|
||||
.get<ModelExtension>(ExtensionTypeEnum.Model)
|
||||
?.downloadModel(model, { ignoreSSL, proxy })
|
||||
}
|
||||
await localDownloadModel(model, ignoreSSL, proxy)
|
||||
},
|
||||
[ignoreSSL, proxy, addDownloadingModel, setDownloadState]
|
||||
)
|
||||
|
||||
const abortModelDownload = async (model: Model) => {
|
||||
await abortDownload(
|
||||
@ -77,3 +81,12 @@ export default function useDownloadModel() {
|
||||
abortModelDownload,
|
||||
}
|
||||
}
|
||||
|
||||
const localDownloadModel = async (
|
||||
model: Model,
|
||||
ignoreSSL: boolean,
|
||||
proxy: string
|
||||
) =>
|
||||
extensionManager
|
||||
.get<ModelExtension>(ExtensionTypeEnum.Model)
|
||||
?.downloadModel(model, { ignoreSSL, proxy })
|
||||
|
||||
@ -1,96 +1,64 @@
|
||||
import { atom, useSetAtom, useAtomValue } from 'jotai'
|
||||
import { atom } from 'jotai'
|
||||
|
||||
import { toaster } from '@/containers/Toast'
|
||||
|
||||
import {
|
||||
configuredModelsAtom,
|
||||
downloadedModelsAtom,
|
||||
removeDownloadingModelAtom,
|
||||
} from '@/helpers/atoms/Model.atom'
|
||||
|
||||
// download states
|
||||
const modelDownloadStateAtom = atom<Record<string, DownloadState>>({})
|
||||
export const modelDownloadStateAtom = atom<Record<string, DownloadState>>({})
|
||||
|
||||
const setDownloadStateAtom = atom(null, (get, set, state: DownloadState) => {
|
||||
/**
|
||||
* Used to set the download state for a particular model.
|
||||
*/
|
||||
export const setDownloadStateAtom = atom(
|
||||
null,
|
||||
(get, set, state: DownloadState) => {
|
||||
const currentState = { ...get(modelDownloadStateAtom) }
|
||||
console.debug(
|
||||
`current download state for ${state.modelId} is ${JSON.stringify(state)}`
|
||||
|
||||
if (state.downloadState === 'end') {
|
||||
// download successfully
|
||||
delete currentState[state.modelId]
|
||||
set(removeDownloadingModelAtom, state.modelId)
|
||||
const model = get(configuredModelsAtom).find(
|
||||
(e) => e.id === state.modelId
|
||||
)
|
||||
currentState[state.modelId] = state
|
||||
set(modelDownloadStateAtom, currentState)
|
||||
})
|
||||
|
||||
const setDownloadStateSuccessAtom = atom(null, (get, set, modelId: string) => {
|
||||
const currentState = { ...get(modelDownloadStateAtom) }
|
||||
const state = currentState[modelId]
|
||||
if (!state) {
|
||||
console.debug(`Cannot find download state for ${modelId}`)
|
||||
return
|
||||
}
|
||||
delete currentState[modelId]
|
||||
set(modelDownloadStateAtom, currentState)
|
||||
if (model) set(downloadedModelsAtom, (prev) => [...prev, model])
|
||||
toaster({
|
||||
title: 'Download Completed',
|
||||
description: `Download ${modelId} completed`,
|
||||
description: `Download ${state.modelId} completed`,
|
||||
type: 'success',
|
||||
})
|
||||
} else if (state.downloadState === 'error') {
|
||||
// download error
|
||||
delete currentState[state.modelId]
|
||||
set(removeDownloadingModelAtom, state.modelId)
|
||||
if (state.error === 'aborted') {
|
||||
toaster({
|
||||
title: 'Cancel Download',
|
||||
description: `Model ${state.modelId} download cancelled`,
|
||||
type: 'warning',
|
||||
})
|
||||
|
||||
const setDownloadStateFailedAtom = atom(
|
||||
null,
|
||||
(get, set, modelId: string, error: string) => {
|
||||
const currentState = { ...get(modelDownloadStateAtom) }
|
||||
const state = currentState[modelId]
|
||||
if (!state) {
|
||||
console.debug(`Cannot find download state for ${modelId}`)
|
||||
return
|
||||
}
|
||||
if (error.includes('certificate')) {
|
||||
error += '. To fix enable "Ignore SSL Certificates" in Advanced settings.'
|
||||
} else {
|
||||
let error = state.error
|
||||
if (state.error?.includes('certificate')) {
|
||||
error +=
|
||||
'. To fix enable "Ignore SSL Certificates" in Advanced settings.'
|
||||
}
|
||||
toaster({
|
||||
title: 'Download Failed',
|
||||
description: `Model ${modelId} download failed: ${error}`,
|
||||
description: `Model ${state.modelId} download failed: ${error}`,
|
||||
type: 'error',
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// download in progress
|
||||
currentState[state.modelId] = state
|
||||
}
|
||||
|
||||
delete currentState[modelId]
|
||||
set(modelDownloadStateAtom, currentState)
|
||||
}
|
||||
)
|
||||
const setDownloadStateCancelledAtom = atom(
|
||||
null,
|
||||
(get, set, modelId: string) => {
|
||||
const currentState = { ...get(modelDownloadStateAtom) }
|
||||
const state = currentState[modelId]
|
||||
if (!state) {
|
||||
console.debug(`Cannot find download state for ${modelId}`)
|
||||
toaster({
|
||||
title: 'Cancel Download',
|
||||
description: `Model ${modelId} cancel download`,
|
||||
type: 'warning',
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
delete currentState[modelId]
|
||||
set(modelDownloadStateAtom, currentState)
|
||||
}
|
||||
)
|
||||
|
||||
export function useDownloadState() {
|
||||
const modelDownloadState = useAtomValue(modelDownloadStateAtom)
|
||||
const setDownloadState = useSetAtom(setDownloadStateAtom)
|
||||
const setDownloadStateSuccess = useSetAtom(setDownloadStateSuccessAtom)
|
||||
const setDownloadStateFailed = useSetAtom(setDownloadStateFailedAtom)
|
||||
const setDownloadStateCancelled = useSetAtom(setDownloadStateCancelledAtom)
|
||||
|
||||
const downloadStates: DownloadState[] = []
|
||||
for (const [, value] of Object.entries(modelDownloadState)) {
|
||||
downloadStates.push(value)
|
||||
}
|
||||
|
||||
return {
|
||||
modelDownloadStateAtom,
|
||||
modelDownloadState,
|
||||
setDownloadState,
|
||||
setDownloadStateSuccess,
|
||||
setDownloadStateFailed,
|
||||
setDownloadStateCancelled,
|
||||
downloadStates,
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,27 +0,0 @@
|
||||
import { useEffect, useState } from 'react'
|
||||
|
||||
import { Assistant, ExtensionTypeEnum, AssistantExtension } from '@janhq/core'
|
||||
|
||||
import { extensionManager } from '@/extension/ExtensionManager'
|
||||
|
||||
export const getAssistants = async (): Promise<Assistant[]> =>
|
||||
extensionManager
|
||||
.get<AssistantExtension>(ExtensionTypeEnum.Assistant)
|
||||
?.getAssistants() ?? []
|
||||
|
||||
/**
|
||||
* Hooks for get assistants
|
||||
*
|
||||
* @returns assistants
|
||||
*/
|
||||
export default function useGetAssistants() {
|
||||
const [assistants, setAssistants] = useState<Assistant[]>([])
|
||||
|
||||
useEffect(() => {
|
||||
getAssistants()
|
||||
.then((data) => setAssistants(data))
|
||||
.catch((err) => console.error(err))
|
||||
}, [])
|
||||
|
||||
return { assistants }
|
||||
}
|
||||
@ -1,30 +0,0 @@
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
|
||||
import { ExtensionTypeEnum, ModelExtension, Model } from '@janhq/core'
|
||||
|
||||
import { extensionManager } from '@/extension/ExtensionManager'
|
||||
|
||||
export function useGetConfiguredModels() {
|
||||
const [loading, setLoading] = useState<boolean>(false)
|
||||
const [models, setModels] = useState<Model[]>([])
|
||||
|
||||
const fetchModels = useCallback(async () => {
|
||||
setLoading(true)
|
||||
const models = await getConfiguredModels()
|
||||
setLoading(false)
|
||||
setModels(models)
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
fetchModels()
|
||||
}, [fetchModels])
|
||||
|
||||
return { loading, models }
|
||||
}
|
||||
|
||||
const getConfiguredModels = async (): Promise<Model[]> => {
|
||||
const models = await extensionManager
|
||||
.get<ModelExtension>(ExtensionTypeEnum.Model)
|
||||
?.getConfiguredModels()
|
||||
return models ?? []
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user