Merge branch 'dev' into feat/old-mac-support
This commit is contained in:
commit
ea29785a8a
99
.github/workflows/autoqa-template.yml
vendored
99
.github/workflows/autoqa-template.yml
vendored
@ -82,13 +82,13 @@ jobs:
|
|||||||
# Find the exe file in the artifact
|
# Find the exe file in the artifact
|
||||||
$exeFile = Get-ChildItem -Path "${{ runner.temp }}/windows-artifact" -Recurse -Filter "*.exe" | Select-Object -First 1
|
$exeFile = Get-ChildItem -Path "${{ runner.temp }}/windows-artifact" -Recurse -Filter "*.exe" | Select-Object -First 1
|
||||||
if ($exeFile) {
|
if ($exeFile) {
|
||||||
Write-Host "✅ Found local installer: $($exeFile.FullName)"
|
Write-Host "[SUCCESS] Found local installer: $($exeFile.FullName)"
|
||||||
Copy-Item -Path $exeFile.FullName -Destination "$env:TEMP\jan-installer.exe" -Force
|
Copy-Item -Path $exeFile.FullName -Destination "$env:TEMP\jan-installer.exe" -Force
|
||||||
Write-Host "✅ Installer copied to: $env:TEMP\jan-installer.exe"
|
Write-Host "[SUCCESS] Installer copied to: $env:TEMP\jan-installer.exe"
|
||||||
# Don't set JAN_APP_PATH here - let the install script set it to the correct installed app path
|
# Don't set JAN_APP_PATH here - let the install script set it to the correct installed app path
|
||||||
echo "IS_NIGHTLY=${{ inputs.is_nightly }}" >> $env:GITHUB_ENV
|
echo "IS_NIGHTLY=${{ inputs.is_nightly }}" >> $env:GITHUB_ENV
|
||||||
} else {
|
} else {
|
||||||
Write-Error "❌ No .exe file found in artifact"
|
Write-Error "[FAILED] No .exe file found in artifact"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -127,6 +127,37 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
.\scripts\run_tests.ps1 -JanAppPath "$env:JAN_APP_PATH" -ProcessName "$env:JAN_PROCESS_NAME" -RpToken "$env:RP_TOKEN"
|
.\scripts\run_tests.ps1 -JanAppPath "$env:JAN_APP_PATH" -ProcessName "$env:JAN_PROCESS_NAME" -RpToken "$env:RP_TOKEN"
|
||||||
|
|
||||||
|
- name: Collect Jan logs for artifact upload
|
||||||
|
if: always()
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
$logDirs = @(
|
||||||
|
"$env:APPDATA\Jan-nightly\data\logs",
|
||||||
|
"$env:APPDATA\Jan\data\logs"
|
||||||
|
)
|
||||||
|
$dest = "autoqa\jan-logs"
|
||||||
|
mkdir $dest -Force | Out-Null
|
||||||
|
foreach ($dir in $logDirs) {
|
||||||
|
if (Test-Path $dir) {
|
||||||
|
Copy-Item "$dir\*.log" $dest -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Upload screen recordings
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-recordings-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/recordings/
|
||||||
|
|
||||||
|
- name: Upload Jan logs
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-logs-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/jan-logs/
|
||||||
|
|
||||||
- name: Cleanup after tests
|
- name: Cleanup after tests
|
||||||
if: always()
|
if: always()
|
||||||
shell: powershell
|
shell: powershell
|
||||||
@ -196,9 +227,9 @@ jobs:
|
|||||||
# Find the deb file in the artifact
|
# Find the deb file in the artifact
|
||||||
DEB_FILE=$(find "${{ runner.temp }}/ubuntu-artifact" -name "*.deb" -type f | head -1)
|
DEB_FILE=$(find "${{ runner.temp }}/ubuntu-artifact" -name "*.deb" -type f | head -1)
|
||||||
if [ -n "$DEB_FILE" ]; then
|
if [ -n "$DEB_FILE" ]; then
|
||||||
echo "✅ Found local installer: $DEB_FILE"
|
echo "[SUCCESS] Found local installer: $DEB_FILE"
|
||||||
cp "$DEB_FILE" "/tmp/jan-installer.deb"
|
cp "$DEB_FILE" "/tmp/jan-installer.deb"
|
||||||
echo "✅ Installer copied to: /tmp/jan-installer.deb"
|
echo "[SUCCESS] Installer copied to: /tmp/jan-installer.deb"
|
||||||
echo "JAN_APP_PATH=/tmp/jan-installer.deb" >> $GITHUB_ENV
|
echo "JAN_APP_PATH=/tmp/jan-installer.deb" >> $GITHUB_ENV
|
||||||
echo "IS_NIGHTLY=${{ inputs.is_nightly }}" >> $GITHUB_ENV
|
echo "IS_NIGHTLY=${{ inputs.is_nightly }}" >> $GITHUB_ENV
|
||||||
if [ "${{ inputs.is_nightly }}" = "true" ]; then
|
if [ "${{ inputs.is_nightly }}" = "true" ]; then
|
||||||
@ -207,7 +238,7 @@ jobs:
|
|||||||
echo "JAN_PROCESS_NAME=Jan" >> $GITHUB_ENV
|
echo "JAN_PROCESS_NAME=Jan" >> $GITHUB_ENV
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "❌ No .deb file found in artifact"
|
echo "[FAILED] No .deb file found in artifact"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
@ -252,13 +283,35 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
./scripts/run_tests.sh "$JAN_APP_PATH" "$JAN_PROCESS_NAME" "$RP_TOKEN" "ubuntu"
|
./scripts/run_tests.sh "$JAN_APP_PATH" "$JAN_PROCESS_NAME" "$RP_TOKEN" "ubuntu"
|
||||||
|
|
||||||
|
- name: Collect Jan logs for artifact upload
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
mkdir -p autoqa/jan-logs
|
||||||
|
cp ~/.local/share/Jan-nightly/data/logs/*.log autoqa/jan-logs/ 2>/dev/null || true
|
||||||
|
cp ~/.local/share/Jan/data/logs/*.log autoqa/jan-logs/ 2>/dev/null || true
|
||||||
|
|
||||||
|
- name: Upload screen recordings
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-recordings-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/recordings/
|
||||||
|
|
||||||
|
- name: Upload Jan logs
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-logs-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/jan-logs/
|
||||||
|
|
||||||
- name: Cleanup after tests
|
- name: Cleanup after tests
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
./autoqa/scripts/ubuntu_post_cleanup.sh "$IS_NIGHTLY"
|
./autoqa/scripts/ubuntu_post_cleanup.sh "$IS_NIGHTLY"
|
||||||
|
|
||||||
macos:
|
macos:
|
||||||
runs-on: macos-selfhosted-15-arm64
|
runs-on: macos-selfhosted-15-arm64-cua
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
|
|
||||||
env:
|
env:
|
||||||
@ -296,9 +349,9 @@ jobs:
|
|||||||
# Find the dmg file in the artifact
|
# Find the dmg file in the artifact
|
||||||
DMG_FILE=$(find "${{ runner.temp }}/macos-artifact" -name "*.dmg" -type f | head -1)
|
DMG_FILE=$(find "${{ runner.temp }}/macos-artifact" -name "*.dmg" -type f | head -1)
|
||||||
if [ -n "$DMG_FILE" ]; then
|
if [ -n "$DMG_FILE" ]; then
|
||||||
echo "✅ Found local installer: $DMG_FILE"
|
echo "[SUCCESS] Found local installer: $DMG_FILE"
|
||||||
cp "$DMG_FILE" "/tmp/jan-installer.dmg"
|
cp "$DMG_FILE" "/tmp/jan-installer.dmg"
|
||||||
echo "✅ Installer copied to: /tmp/jan-installer.dmg"
|
echo "[SUCCESS] Installer copied to: /tmp/jan-installer.dmg"
|
||||||
echo "JAN_APP_PATH=/tmp/jan-installer.dmg" >> $GITHUB_ENV
|
echo "JAN_APP_PATH=/tmp/jan-installer.dmg" >> $GITHUB_ENV
|
||||||
echo "IS_NIGHTLY=${{ inputs.is_nightly }}" >> $GITHUB_ENV
|
echo "IS_NIGHTLY=${{ inputs.is_nightly }}" >> $GITHUB_ENV
|
||||||
if [ "${{ inputs.is_nightly }}" = "true" ]; then
|
if [ "${{ inputs.is_nightly }}" = "true" ]; then
|
||||||
@ -307,7 +360,7 @@ jobs:
|
|||||||
echo "PROCESS_NAME=Jan" >> $GITHUB_ENV
|
echo "PROCESS_NAME=Jan" >> $GITHUB_ENV
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "❌ No .dmg file found in artifact"
|
echo "[FAILED] No .dmg file found in artifact"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
@ -349,7 +402,7 @@ jobs:
|
|||||||
else
|
else
|
||||||
echo "Homebrew not available, checking if tkinter works..."
|
echo "Homebrew not available, checking if tkinter works..."
|
||||||
python3 -c "import tkinter" || {
|
python3 -c "import tkinter" || {
|
||||||
echo "⚠️ tkinter not available and Homebrew not found"
|
echo "[WARNING] tkinter not available and Homebrew not found"
|
||||||
echo "This may cause issues with mouse control"
|
echo "This may cause issues with mouse control"
|
||||||
}
|
}
|
||||||
fi
|
fi
|
||||||
@ -362,7 +415,7 @@ jobs:
|
|||||||
echo "Installing Python dependencies..."
|
echo "Installing Python dependencies..."
|
||||||
pip install --upgrade pip
|
pip install --upgrade pip
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
echo "✅ Python dependencies installed"
|
echo "[SUCCESS] Python dependencies installed"
|
||||||
|
|
||||||
- name: Setup ReportPortal environment
|
- name: Setup ReportPortal environment
|
||||||
run: |
|
run: |
|
||||||
@ -390,6 +443,28 @@ jobs:
|
|||||||
|
|
||||||
./scripts/run_tests.sh "$JAN_APP_PATH" "$PROCESS_NAME" "$RP_TOKEN" "macos"
|
./scripts/run_tests.sh "$JAN_APP_PATH" "$PROCESS_NAME" "$RP_TOKEN" "macos"
|
||||||
|
|
||||||
|
- name: Collect Jan logs for artifact upload
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
mkdir -p autoqa/jan-logs
|
||||||
|
cp ~/Library/Application\ Support/Jan-nightly/data/logs/*.log autoqa/jan-logs/ 2>/dev/null || true
|
||||||
|
cp ~/Library/Application\ Support/Jan/data/logs/*.log autoqa/jan-logs/ 2>/dev/null || true
|
||||||
|
|
||||||
|
- name: Upload screen recordings
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-recordings-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/recordings/
|
||||||
|
|
||||||
|
- name: Upload Jan logs
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-logs-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/jan-logs/
|
||||||
|
|
||||||
- name: Cleanup after tests
|
- name: Cleanup after tests
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
2
.github/workflows/jan-docs-new-release.yaml
vendored
2
.github/workflows/jan-docs-new-release.yaml
vendored
@ -23,7 +23,7 @@ jobs:
|
|||||||
ref: dev
|
ref: dev
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: 20
|
||||||
|
|
||||||
- name: Install jq
|
- name: Install jq
|
||||||
uses: dcarbone/install-jq-action@v2.0.1
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|||||||
2
.github/workflows/jan-docs.yml
vendored
2
.github/workflows/jan-docs.yml
vendored
@ -29,7 +29,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: 20
|
||||||
|
|
||||||
- name: Install jq
|
- name: Install jq
|
||||||
uses: dcarbone/install-jq-action@v2.0.1
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|||||||
6
.github/workflows/jan-linter-and-test.yml
vendored
6
.github/workflows/jan-linter-and-test.yml
vendored
@ -68,10 +68,10 @@ jobs:
|
|||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ref-lcov.info
|
name: ref-lcov.info
|
||||||
path: coverage/merged/lcov.info
|
path: coverage/lcov.info
|
||||||
|
|
||||||
test-on-macos:
|
test-on-macos:
|
||||||
runs-on: ${{ (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) && 'macos-latest' || 'macos-selfhosted-12-arm64' }}
|
runs-on: ${{ (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) && 'macos-latest' || 'macos-selfhosted-15-arm64' }}
|
||||||
if: github.event_name == 'pull_request' || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'pull_request' || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
|
||||||
steps:
|
steps:
|
||||||
- name: Getting the repo
|
- name: Getting the repo
|
||||||
@ -251,7 +251,7 @@ jobs:
|
|||||||
uses: barecheck/code-coverage-action@v1
|
uses: barecheck/code-coverage-action@v1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
lcov-file: './coverage/merged/lcov.info'
|
lcov-file: './coverage/lcov.info'
|
||||||
base-lcov-file: './lcov.info'
|
base-lcov-file: './lcov.info'
|
||||||
send-summary-comment: true
|
send-summary-comment: true
|
||||||
show-annotations: 'warning'
|
show-annotations: 'warning'
|
||||||
|
|||||||
@ -15,6 +15,7 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- release/**
|
- release/**
|
||||||
|
- dev
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
set-public-provider:
|
set-public-provider:
|
||||||
|
|||||||
@ -106,9 +106,7 @@ jobs:
|
|||||||
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
if [ "${{ inputs.channel }}" != "stable" ]; then
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
|
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun",
|
||||||
"usr/lib/Jan-${{ inputs.channel }}/binaries": "binaries/deps",
|
"usr/lib/Jan-${{ inputs.channel }}/resources/lib/libvulkan.so": "resources/lib/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
|
||||||
"usr/lib/Jan-${{ inputs.channel }}/binaries/engines": "binaries/engines",
|
|
||||||
"usr/lib/Jan-${{ inputs.channel }}/binaries/libvulkan.so": "binaries/libvulkan.so"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
|
|
||||||
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
|
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
|
||||||
fi
|
fi
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||||
|
|||||||
@ -178,9 +178,6 @@ jobs:
|
|||||||
- name: Build app
|
- name: Build app
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
curl -L -o ./src-tauri/binaries/vcomp140.dll https://catalog.jan.ai/vcomp140.dll
|
|
||||||
curl -L -o ./src-tauri/binaries/msvcp140_codecvt_ids.dll https://catalog.jan.ai/msvcp140_codecvt_ids.dll
|
|
||||||
ls ./src-tauri/binaries
|
|
||||||
make build
|
make build
|
||||||
env:
|
env:
|
||||||
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
|
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
|
||||||
|
|||||||
21
.gitignore
vendored
21
.gitignore
vendored
@ -56,3 +56,24 @@ archive/
|
|||||||
autoqa/trajectories
|
autoqa/trajectories
|
||||||
autoqa/recordings
|
autoqa/recordings
|
||||||
autoqa/__pycache__
|
autoqa/__pycache__
|
||||||
|
|
||||||
|
# Astro / Starlight specific
|
||||||
|
website/dist/
|
||||||
|
website/.astro/
|
||||||
|
website/src/content/config.ts.timestamp-*
|
||||||
|
|
||||||
|
# Nextra specific
|
||||||
|
docs/out/
|
||||||
|
docs/.next/
|
||||||
|
|
||||||
|
# General Node.js
|
||||||
|
**/node_modules
|
||||||
|
**/.env
|
||||||
|
**/.env.*
|
||||||
|
**/npm-debug.log*
|
||||||
|
**/yarn-debug.log*
|
||||||
|
**/yarn-error.log*
|
||||||
|
**/pnpm-debug.log*
|
||||||
|
|
||||||
|
# Combined output for local testing
|
||||||
|
combined-output/
|
||||||
|
|||||||
37
Makefile
37
Makefile
@ -30,9 +30,8 @@ endif
|
|||||||
yarn build:extensions
|
yarn build:extensions
|
||||||
|
|
||||||
dev: install-and-build
|
dev: install-and-build
|
||||||
yarn install:cortex
|
|
||||||
yarn download:bin
|
yarn download:bin
|
||||||
yarn copy:lib
|
yarn download:lib
|
||||||
yarn dev
|
yarn dev
|
||||||
|
|
||||||
# Linting
|
# Linting
|
||||||
@ -41,6 +40,8 @@ lint: install-and-build
|
|||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
test: lint
|
test: lint
|
||||||
|
yarn download:bin
|
||||||
|
yarn download:lib
|
||||||
yarn test
|
yarn test
|
||||||
|
|
||||||
# Builds and publishes the app
|
# Builds and publishes the app
|
||||||
@ -49,7 +50,7 @@ build-and-publish: install-and-build
|
|||||||
|
|
||||||
# Build
|
# Build
|
||||||
build: install-and-build
|
build: install-and-build
|
||||||
yarn copy:lib
|
yarn download:lib
|
||||||
yarn build
|
yarn build
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
@ -81,19 +82,19 @@ else ifeq ($(shell uname -s),Linux)
|
|||||||
rm -rf "~/.cache/jan*"
|
rm -rf "~/.cache/jan*"
|
||||||
rm -rf "./.cache"
|
rm -rf "./.cache"
|
||||||
else
|
else
|
||||||
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
find . -name "node_modules" -type d -prune -exec rm -rfv '{}' +
|
||||||
find . -name ".next" -type d -exec rm -rf '{}' +
|
find . -name ".next" -type d -exec rm -rfv '{}' +
|
||||||
find . -name "dist" -type d -exec rm -rf '{}' +
|
find . -name "dist" -type d -exec rm -rfv '{}' +
|
||||||
find . -name "build" -type d -exec rm -rf '{}' +
|
find . -name "build" -type d -exec rm -rfv '{}' +
|
||||||
find . -name "out" -type d -exec rm -rf '{}' +
|
find . -name "out" -type d -exec rm -rfv '{}' +
|
||||||
find . -name ".turbo" -type d -exec rm -rf '{}' +
|
find . -name ".turbo" -type d -exec rm -rfv '{}' +
|
||||||
find . -name ".yarn" -type d -exec rm -rf '{}' +
|
find . -name ".yarn" -type d -exec rm -rfv '{}' +
|
||||||
find . -name "package-lock.json" -type f -exec rm -rf '{}' +
|
find . -name "package-lock.json" -type f -exec rm -rfv '{}' +
|
||||||
rm -rf ./pre-install/*.tgz
|
rm -rfv ./pre-install/*.tgz
|
||||||
rm -rf ./extensions/*/*.tgz
|
rm -rfv ./extensions/*/*.tgz
|
||||||
rm -rf ./electron/pre-install/*.tgz
|
rm -rfv ./electron/pre-install/*.tgz
|
||||||
rm -rf ./src-tauri/resources
|
rm -rfv ./src-tauri/resources
|
||||||
rm -rf ./src-tauri/target
|
rm -rfv ./src-tauri/target
|
||||||
rm -rf ~/jan/extensions
|
rm -rfv ~/jan/extensions
|
||||||
rm -rf ~/Library/Caches/jan*
|
rm -rfv ~/Library/Caches/jan*
|
||||||
endif
|
endif
|
||||||
|
|||||||
@ -449,17 +449,17 @@ async def main():
|
|||||||
# Update counters and log result
|
# Update counters and log result
|
||||||
if test_passed:
|
if test_passed:
|
||||||
test_results["passed"] += 1
|
test_results["passed"] += 1
|
||||||
logger.info(f"✅ Test {i} PASSED: {test_data['path']}")
|
logger.info(f"[SUCCESS] Test {i} PASSED: {test_data['path']}")
|
||||||
else:
|
else:
|
||||||
test_results["failed"] += 1
|
test_results["failed"] += 1
|
||||||
logger.error(f"❌ Test {i} FAILED: {test_data['path']}")
|
logger.error(f"[FAILED] Test {i} FAILED: {test_data['path']}")
|
||||||
|
|
||||||
# Debug log for troubleshooting
|
# Debug log for troubleshooting
|
||||||
logger.info(f"🔍 Debug - Test result: type={type(test_result)}, value={test_result}, success_field={test_result.get('success', 'N/A') if isinstance(test_result, dict) else 'N/A'}, final_passed={test_passed}")
|
logger.info(f"[INFO] Debug - Test result: type={type(test_result)}, value={test_result}, success_field={test_result.get('success', 'N/A') if isinstance(test_result, dict) else 'N/A'}, final_passed={test_passed}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
test_results["failed"] += 1
|
test_results["failed"] += 1
|
||||||
logger.error(f"❌ Test {i} FAILED with exception: {test_data['path']} - {e}")
|
logger.error(f"[FAILED] Test {i} FAILED with exception: {test_data['path']} - {e}")
|
||||||
|
|
||||||
# Add delay between tests
|
# Add delay between tests
|
||||||
if i < len(test_files):
|
if i < len(test_files):
|
||||||
@ -477,10 +477,10 @@ async def main():
|
|||||||
logger.info("=" * 50)
|
logger.info("=" * 50)
|
||||||
|
|
||||||
if test_results["failed"] > 0:
|
if test_results["failed"] > 0:
|
||||||
logger.error(f"❌ Test execution completed with {test_results['failed']} failures!")
|
logger.error(f"[FAILED] Test execution completed with {test_results['failed']} failures!")
|
||||||
final_exit_code = 1
|
final_exit_code = 1
|
||||||
else:
|
else:
|
||||||
logger.info("✅ All tests completed successfully!")
|
logger.info("[SUCCESS] All tests completed successfully!")
|
||||||
final_exit_code = 0
|
final_exit_code = 0
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
|
|||||||
@ -3,6 +3,8 @@ import json
|
|||||||
import mimetypes
|
import mimetypes
|
||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
|
import glob
|
||||||
|
import platform
|
||||||
from reportportal_client.helpers import timestamp
|
from reportportal_client.helpers import timestamp
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -160,7 +162,133 @@ def extract_test_result_from_trajectory(trajectory_dir):
|
|||||||
logger.error(f"Error extracting test result: {e}")
|
logger.error(f"Error extracting test result: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def upload_test_results_to_rp(client, launch_id, test_path, trajectory_dir, force_stopped=False, video_path=None):
|
def get_jan_log_paths(is_nightly=False):
|
||||||
|
"""
|
||||||
|
Get Jan application log file paths based on OS and version (nightly vs regular)
|
||||||
|
Returns list of glob patterns for log files
|
||||||
|
"""
|
||||||
|
system = platform.system().lower()
|
||||||
|
app_name = "Jan-nightly" if is_nightly else "Jan"
|
||||||
|
|
||||||
|
if system == "windows":
|
||||||
|
# Windows: %APPDATA%\Jan(-nightly)\data\logs\*.log
|
||||||
|
appdata = os.path.expandvars("%APPDATA%")
|
||||||
|
return [f"{appdata}\\{app_name}\\data\\logs\\*.log"]
|
||||||
|
|
||||||
|
elif system == "darwin": # macOS
|
||||||
|
# macOS: ~/Library/Application Support/Jan(-nightly)/data/logs/*.log
|
||||||
|
home_dir = os.path.expanduser("~")
|
||||||
|
return [f"{home_dir}/Library/Application Support/{app_name}/data/logs/*.log"]
|
||||||
|
|
||||||
|
elif system == "linux":
|
||||||
|
# Linux: ~/.local/share/Jan(-nightly)/data/logs/*.log
|
||||||
|
home_dir = os.path.expanduser("~")
|
||||||
|
return [f"{home_dir}/.local/share/{app_name}/data/logs/*.log"]
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.warning(f"Unsupported OS: {system}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def upload_jan_logs(client, test_item_id, is_nightly=False, max_log_files=5):
|
||||||
|
"""
|
||||||
|
Upload Jan application log files to ReportPortal
|
||||||
|
"""
|
||||||
|
log_patterns = get_jan_log_paths(is_nightly)
|
||||||
|
app_type = "nightly" if is_nightly else "regular"
|
||||||
|
|
||||||
|
logger.info(f"Looking for Jan {app_type} logs...")
|
||||||
|
|
||||||
|
all_log_files = []
|
||||||
|
for pattern in log_patterns:
|
||||||
|
try:
|
||||||
|
log_files = glob.glob(pattern)
|
||||||
|
all_log_files.extend(log_files)
|
||||||
|
logger.info(f"Found {len(log_files)} log files matching pattern: {pattern}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error searching for logs with pattern {pattern}: {e}")
|
||||||
|
|
||||||
|
if not all_log_files:
|
||||||
|
logger.warning(f"No Jan {app_type} log files found")
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="WARNING",
|
||||||
|
message=f"[INFO] No Jan {app_type} application logs found",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Sort by modification time (newest first) and limit to max_log_files
|
||||||
|
try:
|
||||||
|
all_log_files.sort(key=lambda x: os.path.getmtime(x), reverse=True)
|
||||||
|
log_files_to_upload = all_log_files[:max_log_files]
|
||||||
|
|
||||||
|
logger.info(f"Uploading {len(log_files_to_upload)} most recent Jan {app_type} log files")
|
||||||
|
|
||||||
|
for i, log_file in enumerate(log_files_to_upload, 1):
|
||||||
|
try:
|
||||||
|
file_size = os.path.getsize(log_file)
|
||||||
|
file_name = os.path.basename(log_file)
|
||||||
|
|
||||||
|
# Check file size limit (50MB = 50 * 1024 * 1024 bytes)
|
||||||
|
max_file_size = 50 * 1024 * 1024 # 50MB
|
||||||
|
if file_size > max_file_size:
|
||||||
|
logger.warning(f"Log file {file_name} is too large ({file_size} bytes > {max_file_size} bytes), skipping upload")
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="WARNING",
|
||||||
|
message=f"[INFO] Log file {file_name} skipped (size: {file_size} bytes > 50MB limit)",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
logger.info(f"Uploading log file {i}/{len(log_files_to_upload)}: {file_name} ({file_size} bytes)")
|
||||||
|
|
||||||
|
# Read log file content (safe to read since we checked size)
|
||||||
|
with open(log_file, 'r', encoding='utf-8', errors='ignore') as f:
|
||||||
|
log_content = f.read()
|
||||||
|
|
||||||
|
# Upload as text attachment
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="INFO",
|
||||||
|
message=f"[INFO] Jan {app_type} application log: {file_name}",
|
||||||
|
item_id=test_item_id,
|
||||||
|
attachment={
|
||||||
|
"name": f"jan_{app_type}_log_{i}_{file_name}",
|
||||||
|
"data": log_content.encode('utf-8'),
|
||||||
|
"mime": "text/plain"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Successfully uploaded log: {file_name}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error uploading log file {log_file}: {e}")
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="ERROR",
|
||||||
|
message=f"Failed to upload log file {os.path.basename(log_file)}: {str(e)}",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add summary log
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="INFO",
|
||||||
|
message=f"[INFO] Uploaded {len(log_files_to_upload)} Jan {app_type} log files (total available: {len(all_log_files)})",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error processing Jan logs: {e}")
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="ERROR",
|
||||||
|
message=f"Error processing Jan {app_type} logs: {str(e)}",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
|
||||||
|
def upload_test_results_to_rp(client, launch_id, test_path, trajectory_dir, force_stopped=False, video_path=None, is_nightly=False):
|
||||||
"""
|
"""
|
||||||
Upload test results to ReportPortal with proper status based on test result
|
Upload test results to ReportPortal with proper status based on test result
|
||||||
"""
|
"""
|
||||||
@ -177,7 +305,7 @@ def upload_test_results_to_rp(client, launch_id, test_path, trajectory_dir, forc
|
|||||||
client.log(
|
client.log(
|
||||||
time=timestamp(),
|
time=timestamp(),
|
||||||
level="ERROR",
|
level="ERROR",
|
||||||
message="❌ TEST FAILED ❌\nNo trajectory directory found",
|
message="[FAILED] TEST FAILED [FAILED]\nNo trajectory directory found",
|
||||||
item_id=test_item_id
|
item_id=test_item_id
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -236,7 +364,7 @@ def upload_test_results_to_rp(client, launch_id, test_path, trajectory_dir, forc
|
|||||||
if os.path.isdir(os.path.join(trajectory_dir, f)) and f.startswith("turn_")]
|
if os.path.isdir(os.path.join(trajectory_dir, f)) and f.startswith("turn_")]
|
||||||
|
|
||||||
# Add clear status log
|
# Add clear status log
|
||||||
status_emoji = "✅" if final_status == "PASSED" else "❌"
|
status_emoji = "[SUCCESS]" if final_status == "PASSED" else "[FAILED]"
|
||||||
client.log(
|
client.log(
|
||||||
time=timestamp(),
|
time=timestamp(),
|
||||||
level="INFO" if final_status == "PASSED" else "ERROR",
|
level="INFO" if final_status == "PASSED" else "ERROR",
|
||||||
@ -255,7 +383,7 @@ def upload_test_results_to_rp(client, launch_id, test_path, trajectory_dir, forc
|
|||||||
client.log(
|
client.log(
|
||||||
time=timestamp(),
|
time=timestamp(),
|
||||||
level="INFO",
|
level="INFO",
|
||||||
message="🎥 Screen recording of test execution",
|
message="[INFO] Screen recording of test execution",
|
||||||
item_id=test_item_id,
|
item_id=test_item_id,
|
||||||
attachment={
|
attachment={
|
||||||
"name": f"test_recording_{formatted_test_path}.mp4",
|
"name": f"test_recording_{formatted_test_path}.mp4",
|
||||||
@ -281,6 +409,10 @@ def upload_test_results_to_rp(client, launch_id, test_path, trajectory_dir, forc
|
|||||||
item_id=test_item_id
|
item_id=test_item_id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Upload Jan application logs
|
||||||
|
logger.info("Uploading Jan application logs...")
|
||||||
|
upload_jan_logs(client, test_item_id, is_nightly=is_nightly, max_log_files=5)
|
||||||
|
|
||||||
# Upload all turn data with appropriate status
|
# Upload all turn data with appropriate status
|
||||||
# If test failed, mark all turns as failed
|
# If test failed, mark all turns as failed
|
||||||
force_fail_turns = (final_status == "FAILED")
|
force_fail_turns = (final_status == "FAILED")
|
||||||
|
|||||||
@ -1,18 +1,18 @@
|
|||||||
# Core dependencies
|
# Core dependencies
|
||||||
cua-computer[all]>=0.3.5
|
cua-computer[all]~=0.3.5
|
||||||
cua-agent[all]>=0.3.0
|
cua-agent[all]~=0.3.0
|
||||||
cua-agent @ git+https://github.com/menloresearch/cua.git@compute-agent-0.3.0-patch#subdirectory=libs/python/agent
|
cua-agent @ git+https://github.com/menloresearch/cua.git@compute-agent-0.3.0-patch#subdirectory=libs/python/agent
|
||||||
|
|
||||||
# ReportPortal integration
|
# ReportPortal integration
|
||||||
reportportal-client>=5.6.5
|
reportportal-client~=5.6.5
|
||||||
|
|
||||||
# Screen recording and automation
|
# Screen recording and automation
|
||||||
opencv-python>=4.12.0
|
opencv-python~=4.10.0
|
||||||
numpy>=2.2.6
|
numpy~=2.2.6
|
||||||
PyAutoGUI>=0.9.54
|
PyAutoGUI~=0.9.54
|
||||||
|
|
||||||
# System utilities
|
# System utilities
|
||||||
psutil>=7.0.0
|
psutil~=7.0.0
|
||||||
|
|
||||||
# Server component
|
# Server component
|
||||||
cua-computer-server>=0.1.19
|
cua-computer-server~=0.1.19
|
||||||
@ -41,9 +41,9 @@ echo "Downloading Jan app from: $JAN_APP_URL"
|
|||||||
curl -L -o "/tmp/jan-installer.dmg" "$JAN_APP_URL"
|
curl -L -o "/tmp/jan-installer.dmg" "$JAN_APP_URL"
|
||||||
|
|
||||||
if [ ! -f "/tmp/jan-installer.dmg" ]; then
|
if [ ! -f "/tmp/jan-installer.dmg" ]; then
|
||||||
echo "❌ Failed to download Jan app"
|
echo "[FAILED] Failed to download Jan app"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "✅ Successfully downloaded Jan app"
|
echo "[SUCCESS] Successfully downloaded Jan app"
|
||||||
ls -la "/tmp/jan-installer.dmg"
|
ls -la "/tmp/jan-installer.dmg"
|
||||||
|
|||||||
@ -10,7 +10,7 @@ hdiutil attach "/tmp/jan-installer.dmg" -mountpoint "/tmp/jan-mount"
|
|||||||
APP_FILE=$(find "/tmp/jan-mount" -name "*.app" -type d | head -1)
|
APP_FILE=$(find "/tmp/jan-mount" -name "*.app" -type d | head -1)
|
||||||
|
|
||||||
if [ -z "$APP_FILE" ]; then
|
if [ -z "$APP_FILE" ]; then
|
||||||
echo "❌ No .app file found in DMG"
|
echo "[Failed] No .app file found in DMG"
|
||||||
hdiutil detach "/tmp/jan-mount" || true
|
hdiutil detach "/tmp/jan-mount" || true
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
@ -61,7 +61,7 @@ if [ -z "$APP_PATH" ]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -z "$APP_PATH" ]; then
|
if [ -z "$APP_PATH" ]; then
|
||||||
echo "❌ No executable found in MacOS folder"
|
echo "[FAILED] No executable found in MacOS folder"
|
||||||
ls -la "/Applications/$APP_NAME/Contents/MacOS/"
|
ls -la "/Applications/$APP_NAME/Contents/MacOS/"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
@ -76,11 +76,16 @@ echo "Process name: $PROCESS_NAME"
|
|||||||
echo "JAN_APP_PATH=$APP_PATH" >> $GITHUB_ENV
|
echo "JAN_APP_PATH=$APP_PATH" >> $GITHUB_ENV
|
||||||
echo "PROCESS_NAME=$PROCESS_NAME" >> $GITHUB_ENV
|
echo "PROCESS_NAME=$PROCESS_NAME" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
echo "[INFO] Waiting for Jan app first initialization (120 seconds)..."
|
||||||
|
echo "This allows Jan to complete its initial setup and configuration"
|
||||||
|
sleep 120
|
||||||
|
echo "[SUCCESS] Initialization wait completed"
|
||||||
|
|
||||||
# Verify installation
|
# Verify installation
|
||||||
if [ -f "$APP_PATH" ]; then
|
if [ -f "$APP_PATH" ]; then
|
||||||
echo "✅ Jan app installed successfully"
|
echo "[SUCCESS] Jan app installed successfully"
|
||||||
ls -la "/Applications/$APP_NAME"
|
ls -la "/Applications/$APP_NAME"
|
||||||
else
|
else
|
||||||
echo "❌ Jan app installation failed - executable not found"
|
echo "[FAILED] Jan app installation failed - executable not found"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
@ -9,7 +9,7 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|||||||
# Make all shell scripts executable
|
# Make all shell scripts executable
|
||||||
chmod +x "$SCRIPT_DIR"/*.sh
|
chmod +x "$SCRIPT_DIR"/*.sh
|
||||||
|
|
||||||
echo "✅ All shell scripts are now executable:"
|
echo "[SUCCESS] All shell scripts are now executable:"
|
||||||
ls -la "$SCRIPT_DIR"/*.sh
|
ls -la "$SCRIPT_DIR"/*.sh
|
||||||
|
|
||||||
echo "✅ Permission setup completed"
|
echo "[SUCCESS] Permission setup completed"
|
||||||
|
|||||||
@ -15,6 +15,11 @@ sudo apt-get install -f -y
|
|||||||
# Wait for installation to complete
|
# Wait for installation to complete
|
||||||
sleep 10
|
sleep 10
|
||||||
|
|
||||||
|
echo "[INFO] Waiting for Jan app first initialization (120 seconds)..."
|
||||||
|
echo "This allows Jan to complete its initial setup and configuration"
|
||||||
|
sleep 120
|
||||||
|
echo "[SUCCESS] Initialization wait completed"
|
||||||
|
|
||||||
# Verify installation based on nightly flag
|
# Verify installation based on nightly flag
|
||||||
if [ "$IS_NIGHTLY" = "true" ]; then
|
if [ "$IS_NIGHTLY" = "true" ]; then
|
||||||
DEFAULT_JAN_PATH="/usr/bin/Jan-nightly"
|
DEFAULT_JAN_PATH="/usr/bin/Jan-nightly"
|
||||||
|
|||||||
@ -24,6 +24,11 @@ catch {
|
|||||||
# Wait a bit for installation to complete
|
# Wait a bit for installation to complete
|
||||||
Start-Sleep -Seconds 10
|
Start-Sleep -Seconds 10
|
||||||
|
|
||||||
|
Write-Host "[INFO] Waiting for Jan app first initialization (120 seconds)..."
|
||||||
|
Write-Host "This allows Jan to complete its initial setup and configuration"
|
||||||
|
Start-Sleep -Seconds 120
|
||||||
|
Write-Host "[SUCCESS] Initialization wait completed"
|
||||||
|
|
||||||
# Verify installation based on nightly flag
|
# Verify installation based on nightly flag
|
||||||
if ($isNightly) {
|
if ($isNightly) {
|
||||||
$defaultJanPath = "$env:LOCALAPPDATA\Programs\jan-nightly\Jan-nightly.exe"
|
$defaultJanPath = "$env:LOCALAPPDATA\Programs\jan-nightly\Jan-nightly.exe"
|
||||||
|
|||||||
@ -25,6 +25,9 @@ async def run_single_test_with_timeout(computer, test_data, rp_client, launch_id
|
|||||||
path = test_data['path']
|
path = test_data['path']
|
||||||
prompt = test_data['prompt']
|
prompt = test_data['prompt']
|
||||||
|
|
||||||
|
# Detect if using nightly version based on process name
|
||||||
|
is_nightly = "nightly" in jan_process_name.lower() if jan_process_name else False
|
||||||
|
|
||||||
# Default agent config if not provided
|
# Default agent config if not provided
|
||||||
if agent_config is None:
|
if agent_config is None:
|
||||||
agent_config = {
|
agent_config = {
|
||||||
@ -210,7 +213,7 @@ async def run_single_test_with_timeout(computer, test_data, rp_client, launch_id
|
|||||||
logger.info(f"Video exists: {os.path.exists(video_path)}")
|
logger.info(f"Video exists: {os.path.exists(video_path)}")
|
||||||
if os.path.exists(video_path):
|
if os.path.exists(video_path):
|
||||||
logger.info(f"Video file size: {os.path.getsize(video_path)} bytes")
|
logger.info(f"Video file size: {os.path.getsize(video_path)} bytes")
|
||||||
upload_test_results_to_rp(rp_client, launch_id, path, trajectory_dir, force_stopped_due_to_turns, video_path)
|
upload_test_results_to_rp(rp_client, launch_id, path, trajectory_dir, force_stopped_due_to_turns, video_path, is_nightly)
|
||||||
else:
|
else:
|
||||||
logger.warning(f"Test completed but no trajectory found for: {path}")
|
logger.warning(f"Test completed but no trajectory found for: {path}")
|
||||||
# Handle case where test completed but no trajectory found
|
# Handle case where test completed but no trajectory found
|
||||||
@ -235,7 +238,7 @@ async def run_single_test_with_timeout(computer, test_data, rp_client, launch_id
|
|||||||
rp_client.log(
|
rp_client.log(
|
||||||
time=timestamp(),
|
time=timestamp(),
|
||||||
level="INFO",
|
level="INFO",
|
||||||
message="🎥 Screen recording of failed test",
|
message="[INFO] Screen recording of failed test",
|
||||||
item_id=test_item_id,
|
item_id=test_item_id,
|
||||||
attachment={
|
attachment={
|
||||||
"name": f"failed_test_recording_{formatted_test_path}.mp4",
|
"name": f"failed_test_recording_{formatted_test_path}.mp4",
|
||||||
@ -295,9 +298,9 @@ async def run_single_test_with_timeout(computer, test_data, rp_client, launch_id
|
|||||||
|
|
||||||
if not enable_reportportal:
|
if not enable_reportportal:
|
||||||
# Local development mode - log results
|
# Local development mode - log results
|
||||||
logger.info(f"🏠 LOCAL RESULT: {path} - {final_status} ({status_message})")
|
logger.info(f"[INFO] LOCAL RESULT: {path} - {final_status} ({status_message})")
|
||||||
logger.info(f"📹 Video saved: {video_path}")
|
logger.info(f"[INFO] Video saved: {video_path}")
|
||||||
logger.info(f"📁 Trajectory: {trajectory_dir}")
|
logger.info(f"[INFO] Trajectory: {trajectory_dir}")
|
||||||
else:
|
else:
|
||||||
final_status = "FAILED"
|
final_status = "FAILED"
|
||||||
status_message = "no trajectory found"
|
status_message = "no trajectory found"
|
||||||
@ -309,7 +312,7 @@ async def run_single_test_with_timeout(computer, test_data, rp_client, launch_id
|
|||||||
})
|
})
|
||||||
|
|
||||||
if not enable_reportportal:
|
if not enable_reportportal:
|
||||||
logger.warning(f"🏠 LOCAL RESULT: {path} - {final_status} ({status_message})")
|
logger.warning(f"[INFO] LOCAL RESULT: {path} - {final_status} ({status_message})")
|
||||||
|
|
||||||
# Step 9: Always force close Jan app after test completion
|
# Step 9: Always force close Jan app after test completion
|
||||||
logger.info(f"Cleaning up after test: {path}")
|
logger.info(f"Cleaning up after test: {path}")
|
||||||
|
|||||||
@ -1,15 +1,17 @@
|
|||||||
prompt = """
|
prompt = """
|
||||||
You are going to test the Jan application by downloading and chatting with a model (qwen2.5).
|
You are going to test the Jan application by downloading and chatting with a model (bitcpm4).
|
||||||
|
|
||||||
Step-by-step instructions:
|
Step-by-step instructions:
|
||||||
1. Given the Jan application is already opened.
|
1. Given the Jan application is already opened.
|
||||||
2. In the **bottom-left corner**, click the **“Hub”** menu item.
|
2. In the **bottom-left corner**, click the **Hub** menu item.
|
||||||
3. Scroll through the model list or use the search bar to find **qwen2.5**.
|
3. Scroll through the model list or use the search bar to find **qwen3-0.6B**.
|
||||||
4. Click **“Use”** on the qwen2.5 model.
|
4. Click **Use** on the qwen3-0.6B model.
|
||||||
5. Wait for the model to finish downloading and become ready.
|
5. Wait for the model to finish downloading and become ready.
|
||||||
6. Once redirected to the chat screen, type any message into the input box (e.g. `Hello qwen2.5`).
|
6. Once redirected to the chat screen, type any message into the input box (e.g. `Hello World`).
|
||||||
7. Press **Enter** to send the message.
|
7. Press **Enter** to send the message.
|
||||||
8. Wait for the model’s response.
|
8. Wait for the model’s response.
|
||||||
|
|
||||||
If the model responds correctly, return: {"result": True}, otherwise return: {"result": False}.
|
If the model responds correctly, return: {"result": True}, otherwise return: {"result": False}.
|
||||||
|
|
||||||
|
In all your responses, use only plain ASCII characters. Do NOT use Unicode symbols
|
||||||
"""
|
"""
|
||||||
|
|||||||
@ -279,7 +279,7 @@ def start_jan_app(jan_app_path=None):
|
|||||||
|
|
||||||
# Wait a bit more after maximizing
|
# Wait a bit more after maximizing
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
logger.info("Jan application should be ready")
|
logger.info("Jan application should be ready, waiting for additional setup...")
|
||||||
time.sleep(10) # Additional wait to ensure everything is ready
|
time.sleep(10) # Additional wait to ensure everything is ready
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@ -9,9 +9,6 @@
|
|||||||
```js
|
```js
|
||||||
// Web / extension runtime
|
// Web / extension runtime
|
||||||
import * as core from '@janhq/core'
|
import * as core from '@janhq/core'
|
||||||
|
|
||||||
// Node runtime
|
|
||||||
import * as node from '@janhq/core/node'
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Build an Extension
|
## Build an Extension
|
||||||
|
|||||||
@ -1,17 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
preset: 'ts-jest',
|
|
||||||
testEnvironment: 'node',
|
|
||||||
collectCoverageFrom: ['src/**/*.{ts,tsx}'],
|
|
||||||
moduleNameMapper: {
|
|
||||||
'@/(.*)': '<rootDir>/src/$1',
|
|
||||||
},
|
|
||||||
runner: './testRunner.js',
|
|
||||||
transform: {
|
|
||||||
'^.+\\.tsx?$': [
|
|
||||||
'ts-jest',
|
|
||||||
{
|
|
||||||
diagnostics: false,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@ -17,30 +17,28 @@
|
|||||||
"author": "Jan <service@jan.ai>",
|
"author": "Jan <service@jan.ai>",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"lint": "tslint --project tsconfig.json -t codeFrame 'src/**/*.ts' 'test/**/*.ts'",
|
"lint": "tslint --project tsconfig.json -t codeFrame 'src/**/*.ts' 'test/**/*.ts'",
|
||||||
"test": "jest",
|
"test": "vitest run",
|
||||||
|
"test:watch": "vitest",
|
||||||
|
"test:ui": "vitest --ui",
|
||||||
|
"test:coverage": "vitest run --coverage",
|
||||||
"prebuild": "rimraf dist",
|
"prebuild": "rimraf dist",
|
||||||
"build": "tsc -p . && rolldown -c rolldown.config.mjs"
|
"build": "tsc -p . && rolldown -c rolldown.config.mjs"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@npmcli/arborist": "^7.1.0",
|
"@npmcli/arborist": "^7.1.0",
|
||||||
"@types/jest": "^30.0.0",
|
|
||||||
"@types/node": "^22.10.0",
|
"@types/node": "^22.10.0",
|
||||||
"@types/pacote": "^11.1.7",
|
"@vitest/coverage-v8": "^2.1.8",
|
||||||
"@types/request": "^2.48.12",
|
"@vitest/ui": "^2.1.8",
|
||||||
"electron": "33.2.1",
|
|
||||||
"eslint": "8.57.0",
|
"eslint": "8.57.0",
|
||||||
"eslint-plugin-jest": "^27.9.0",
|
"happy-dom": "^15.11.6",
|
||||||
"jest": "^30.0.3",
|
|
||||||
"jest-junit": "^16.0.0",
|
|
||||||
"jest-runner": "^30.0.3",
|
|
||||||
"pacote": "^21.0.0",
|
"pacote": "^21.0.0",
|
||||||
"request": "^2.88.2",
|
"request": "^2.88.2",
|
||||||
"request-progress": "^3.0.0",
|
"request-progress": "^3.0.0",
|
||||||
"rimraf": "^6.0.1",
|
"rimraf": "^6.0.1",
|
||||||
"rolldown": "1.0.0-beta.1",
|
"rolldown": "1.0.0-beta.1",
|
||||||
"ts-jest": "^29.2.5",
|
|
||||||
"tslib": "^2.6.2",
|
"tslib": "^2.6.2",
|
||||||
"typescript": "^5.8.3"
|
"typescript": "^5.8.3",
|
||||||
|
"vitest": "^2.1.8"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"rxjs": "^7.8.1",
|
"rxjs": "^7.8.1",
|
||||||
|
|||||||
@ -15,36 +15,5 @@ export default defineConfig([
|
|||||||
NODE: JSON.stringify(`${pkgJson.name}/${pkgJson.node}`),
|
NODE: JSON.stringify(`${pkgJson.name}/${pkgJson.node}`),
|
||||||
VERSION: JSON.stringify(pkgJson.version),
|
VERSION: JSON.stringify(pkgJson.version),
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
{
|
|
||||||
input: 'src/node/index.ts',
|
|
||||||
external: [
|
|
||||||
'fs/promises',
|
|
||||||
'path',
|
|
||||||
'pacote',
|
|
||||||
'@types/pacote',
|
|
||||||
'@npmcli/arborist',
|
|
||||||
'ulidx',
|
|
||||||
'fs',
|
|
||||||
'request',
|
|
||||||
'crypto',
|
|
||||||
'url',
|
|
||||||
'http',
|
|
||||||
'os',
|
|
||||||
'util',
|
|
||||||
'child_process',
|
|
||||||
'electron',
|
|
||||||
'request-progress',
|
|
||||||
],
|
|
||||||
output: {
|
|
||||||
format: 'cjs',
|
|
||||||
file: 'dist/node/index.cjs.js',
|
|
||||||
sourcemap: true,
|
|
||||||
inlineDynamicImports: true,
|
|
||||||
},
|
|
||||||
resolve: {
|
|
||||||
extensions: ['.js', '.ts'],
|
|
||||||
},
|
|
||||||
platform: 'node',
|
|
||||||
},
|
|
||||||
])
|
])
|
||||||
|
|||||||
@ -1,6 +1,4 @@
|
|||||||
/**
|
import { describe, it, expect, vi } from 'vitest'
|
||||||
* @jest-environment jsdom
|
|
||||||
*/
|
|
||||||
import { openExternalUrl } from './core'
|
import { openExternalUrl } from './core'
|
||||||
import { joinPath } from './core'
|
import { joinPath } from './core'
|
||||||
import { openFileExplorer } from './core'
|
import { openFileExplorer } from './core'
|
||||||
@ -12,7 +10,7 @@ describe('test core apis', () => {
|
|||||||
const url = 'http://example.com'
|
const url = 'http://example.com'
|
||||||
globalThis.core = {
|
globalThis.core = {
|
||||||
api: {
|
api: {
|
||||||
openExternalUrl: jest.fn().mockResolvedValue('opened'),
|
openExternalUrl: vi.fn().mockResolvedValue('opened'),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
const result = await openExternalUrl(url)
|
const result = await openExternalUrl(url)
|
||||||
@ -24,7 +22,7 @@ describe('test core apis', () => {
|
|||||||
const paths = ['/path/one', '/path/two']
|
const paths = ['/path/one', '/path/two']
|
||||||
globalThis.core = {
|
globalThis.core = {
|
||||||
api: {
|
api: {
|
||||||
joinPath: jest.fn().mockResolvedValue('/path/one/path/two'),
|
joinPath: vi.fn().mockResolvedValue('/path/one/path/two'),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
const result = await joinPath(paths)
|
const result = await joinPath(paths)
|
||||||
@ -36,7 +34,7 @@ describe('test core apis', () => {
|
|||||||
const path = '/path/to/open'
|
const path = '/path/to/open'
|
||||||
globalThis.core = {
|
globalThis.core = {
|
||||||
api: {
|
api: {
|
||||||
openFileExplorer: jest.fn().mockResolvedValue('opened'),
|
openFileExplorer: vi.fn().mockResolvedValue('opened'),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
const result = await openFileExplorer(path)
|
const result = await openFileExplorer(path)
|
||||||
@ -47,7 +45,7 @@ describe('test core apis', () => {
|
|||||||
it('should get jan data folder path', async () => {
|
it('should get jan data folder path', async () => {
|
||||||
globalThis.core = {
|
globalThis.core = {
|
||||||
api: {
|
api: {
|
||||||
getJanDataFolderPath: jest.fn().mockResolvedValue('/path/to/jan/data'),
|
getJanDataFolderPath: vi.fn().mockResolvedValue('/path/to/jan/data'),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
const result = await getJanDataFolderPath()
|
const result = await getJanDataFolderPath()
|
||||||
@ -58,7 +56,7 @@ describe('test core apis', () => {
|
|||||||
|
|
||||||
describe('dirName - just a pass thru api', () => {
|
describe('dirName - just a pass thru api', () => {
|
||||||
it('should retrieve the directory name from a file path', async () => {
|
it('should retrieve the directory name from a file path', async () => {
|
||||||
const mockDirName = jest.fn()
|
const mockDirName = vi.fn()
|
||||||
globalThis.core = {
|
globalThis.core = {
|
||||||
api: {
|
api: {
|
||||||
dirName: mockDirName.mockResolvedValue('/path/to'),
|
dirName: mockDirName.mockResolvedValue('/path/to'),
|
||||||
|
|||||||
@ -1,24 +1,5 @@
|
|||||||
import { SystemInformation } from '../types'
|
import { SystemInformation } from '../types'
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute a extension module function in main process
|
|
||||||
*
|
|
||||||
* @param extension extension name to import
|
|
||||||
* @param method function name to execute
|
|
||||||
* @param args arguments to pass to the function
|
|
||||||
* @returns Promise<any>
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
const executeOnMain: (extension: string, method: string, ...args: any[]) => Promise<any> = (
|
|
||||||
extension,
|
|
||||||
method,
|
|
||||||
...args
|
|
||||||
) => {
|
|
||||||
if ('electronAPI' in window && window.electronAPI)
|
|
||||||
return globalThis.core?.api?.invokeExtensionFunc(extension, method, ...args)
|
|
||||||
return () => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets Jan's data folder path.
|
* Gets Jan's data folder path.
|
||||||
*
|
*
|
||||||
@ -97,13 +78,6 @@ const log: (message: string, fileName?: string) => void = (message, fileName) =>
|
|||||||
const isSubdirectory: (from: string, to: string) => Promise<boolean> = (from: string, to: string) =>
|
const isSubdirectory: (from: string, to: string) => Promise<boolean> = (from: string, to: string) =>
|
||||||
globalThis.core.api?.isSubdirectory(from, to)
|
globalThis.core.api?.isSubdirectory(from, to)
|
||||||
|
|
||||||
/**
|
|
||||||
* Get system information
|
|
||||||
* @returns {Promise<any>} - A promise that resolves with the system information.
|
|
||||||
*/
|
|
||||||
const systemInformation: () => Promise<SystemInformation> = () =>
|
|
||||||
globalThis.core.api?.systemInformation()
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Show toast message from browser processes.
|
* Show toast message from browser processes.
|
||||||
* @param title
|
* @param title
|
||||||
@ -127,7 +101,6 @@ export type RegisterExtensionPoint = (
|
|||||||
* Functions exports
|
* Functions exports
|
||||||
*/
|
*/
|
||||||
export {
|
export {
|
||||||
executeOnMain,
|
|
||||||
getJanDataFolderPath,
|
getJanDataFolderPath,
|
||||||
openFileExplorer,
|
openFileExplorer,
|
||||||
getResourcePath,
|
getResourcePath,
|
||||||
@ -137,7 +110,6 @@ export {
|
|||||||
log,
|
log,
|
||||||
isSubdirectory,
|
isSubdirectory,
|
||||||
getUserHomePath,
|
getUserHomePath,
|
||||||
systemInformation,
|
|
||||||
showToast,
|
showToast,
|
||||||
dirName,
|
dirName,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,11 +1,11 @@
|
|||||||
|
import { it, expect, vi } from 'vitest'
|
||||||
import { events } from './events';
|
import { events } from './events';
|
||||||
import { jest } from '@jest/globals';
|
|
||||||
|
|
||||||
it('should emit an event', () => {
|
it('should emit an event', () => {
|
||||||
const mockObject = { key: 'value' };
|
const mockObject = { key: 'value' };
|
||||||
globalThis.core = {
|
globalThis.core = {
|
||||||
events: {
|
events: {
|
||||||
emit: jest.fn()
|
emit: vi.fn()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
events.emit('testEvent', mockObject);
|
events.emit('testEvent', mockObject);
|
||||||
@ -14,10 +14,10 @@ it('should emit an event', () => {
|
|||||||
|
|
||||||
|
|
||||||
it('should remove an observer for an event', () => {
|
it('should remove an observer for an event', () => {
|
||||||
const mockHandler = jest.fn();
|
const mockHandler = vi.fn();
|
||||||
globalThis.core = {
|
globalThis.core = {
|
||||||
events: {
|
events: {
|
||||||
off: jest.fn()
|
off: vi.fn()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
events.off('testEvent', mockHandler);
|
events.off('testEvent', mockHandler);
|
||||||
@ -26,10 +26,10 @@ it('should remove an observer for an event', () => {
|
|||||||
|
|
||||||
|
|
||||||
it('should add an observer for an event', () => {
|
it('should add an observer for an event', () => {
|
||||||
const mockHandler = jest.fn();
|
const mockHandler = vi.fn();
|
||||||
globalThis.core = {
|
globalThis.core = {
|
||||||
events: {
|
events: {
|
||||||
on: jest.fn()
|
on: vi.fn()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
events.on('testEvent', mockHandler);
|
events.on('testEvent', mockHandler);
|
||||||
|
|||||||
@ -1,7 +1,8 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'
|
||||||
import { BaseExtension } from './extension'
|
import { BaseExtension } from './extension'
|
||||||
import { SettingComponentProps } from '../types'
|
import { SettingComponentProps } from '../types'
|
||||||
jest.mock('./core')
|
vi.mock('./core')
|
||||||
jest.mock('./fs')
|
vi.mock('./fs')
|
||||||
|
|
||||||
class TestBaseExtension extends BaseExtension {
|
class TestBaseExtension extends BaseExtension {
|
||||||
onLoad(): void {}
|
onLoad(): void {}
|
||||||
@ -16,7 +17,7 @@ describe('BaseExtension', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
jest.resetAllMocks()
|
vi.clearAllMocks()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should have the correct properties', () => {
|
it('should have the correct properties', () => {
|
||||||
@ -56,7 +57,7 @@ describe('BaseExtension', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
jest.resetAllMocks()
|
vi.clearAllMocks()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should have the correct properties', () => {
|
it('should have the correct properties', () => {
|
||||||
@ -108,7 +109,7 @@ describe('BaseExtension', () => {
|
|||||||
Object.defineProperty(global, 'localStorage', {
|
Object.defineProperty(global, 'localStorage', {
|
||||||
value: localStorageMock,
|
value: localStorageMock,
|
||||||
})
|
})
|
||||||
const mock = jest.spyOn(localStorage, 'setItem')
|
const mock = vi.spyOn(localStorage, 'setItem')
|
||||||
await baseExtension.registerSettings(settings)
|
await baseExtension.registerSettings(settings)
|
||||||
|
|
||||||
expect(mock).toHaveBeenCalledWith(
|
expect(mock).toHaveBeenCalledWith(
|
||||||
@ -122,7 +123,7 @@ describe('BaseExtension', () => {
|
|||||||
{ key: 'setting1', controllerProps: { value: 'value1' } } as any,
|
{ key: 'setting1', controllerProps: { value: 'value1' } } as any,
|
||||||
]
|
]
|
||||||
|
|
||||||
jest.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings)
|
vi.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings)
|
||||||
|
|
||||||
const value = await baseExtension.getSetting('setting1', 'defaultValue')
|
const value = await baseExtension.getSetting('setting1', 'defaultValue')
|
||||||
expect(value).toBe('value1')
|
expect(value).toBe('value1')
|
||||||
@ -136,8 +137,8 @@ describe('BaseExtension', () => {
|
|||||||
{ key: 'setting1', controllerProps: { value: 'value1' } } as any,
|
{ key: 'setting1', controllerProps: { value: 'value1' } } as any,
|
||||||
]
|
]
|
||||||
|
|
||||||
jest.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings)
|
vi.spyOn(baseExtension, 'getSettings').mockResolvedValue(settings)
|
||||||
const mockSetItem = jest.spyOn(localStorage, 'setItem')
|
const mockSetItem = vi.spyOn(localStorage, 'setItem')
|
||||||
|
|
||||||
await baseExtension.updateSettings([
|
await baseExtension.updateSettings([
|
||||||
{ key: 'setting1', controllerProps: { value: 'newValue' } } as any,
|
{ key: 'setting1', controllerProps: { value: 'newValue' } } as any,
|
||||||
|
|||||||
@ -128,6 +128,10 @@ export abstract class BaseExtension implements ExtensionType {
|
|||||||
setting.controllerProps.value = oldSettings.find(
|
setting.controllerProps.value = oldSettings.find(
|
||||||
(e: any) => e.key === setting.key
|
(e: any) => e.key === setting.key
|
||||||
)?.controllerProps?.value
|
)?.controllerProps?.value
|
||||||
|
if ('options' in setting.controllerProps)
|
||||||
|
setting.controllerProps.options = setting.controllerProps.options?.length
|
||||||
|
? setting.controllerProps.options
|
||||||
|
: oldSettings.find((e: any) => e.key === setting.key)?.controllerProps?.options
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
localStorage.setItem(this.name, JSON.stringify(settings))
|
localStorage.setItem(this.name, JSON.stringify(settings))
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
|
|
||||||
|
import { it, expect } from 'vitest'
|
||||||
import { AssistantExtension } from './assistant';
|
import { AssistantExtension } from './assistant';
|
||||||
import { ExtensionTypeEnum } from '../extension';
|
import { ExtensionTypeEnum } from '../extension';
|
||||||
|
|
||||||
|
|||||||
@ -1,3 +1,4 @@
|
|||||||
|
import { describe, it, test, expect, beforeEach } from 'vitest'
|
||||||
import { ConversationalExtension } from './conversational'
|
import { ConversationalExtension } from './conversational'
|
||||||
import { ExtensionTypeEnum } from '../extension'
|
import { ExtensionTypeEnum } from '../extension'
|
||||||
import { Thread, ThreadAssistantInfo, ThreadMessage } from '../../types'
|
import { Thread, ThreadAssistantInfo, ThreadMessage } from '../../types'
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi } from 'vitest'
|
||||||
import { AIEngine } from './AIEngine'
|
import { AIEngine } from './AIEngine'
|
||||||
import { events } from '../../events'
|
import { events } from '../../events'
|
||||||
import { ModelEvent, Model } from '../../../types'
|
import { ModelEvent, Model } from '../../../types'
|
||||||
|
|
||||||
jest.mock('../../events')
|
vi.mock('../../events')
|
||||||
jest.mock('./EngineManager')
|
vi.mock('./EngineManager')
|
||||||
jest.mock('../../fs')
|
vi.mock('../../fs')
|
||||||
|
|
||||||
class TestAIEngine extends AIEngine {
|
class TestAIEngine extends AIEngine {
|
||||||
onUnload(): void {}
|
onUnload(): void {}
|
||||||
@ -13,6 +14,38 @@ class TestAIEngine extends AIEngine {
|
|||||||
inference(data: any) {}
|
inference(data: any) {}
|
||||||
|
|
||||||
stopInference() {}
|
stopInference() {}
|
||||||
|
|
||||||
|
async list(): Promise<any[]> {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
async load(modelId: string): Promise<any> {
|
||||||
|
return { pid: 1, port: 8080, model_id: modelId, model_path: '', api_key: '' }
|
||||||
|
}
|
||||||
|
|
||||||
|
async unload(sessionId: string): Promise<any> {
|
||||||
|
return { success: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
async chat(opts: any): Promise<any> {
|
||||||
|
return { id: 'test', object: 'chat.completion', created: Date.now(), model: 'test', choices: [] }
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(modelId: string): Promise<void> {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
async import(modelId: string, opts: any): Promise<void> {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
async abortImport(modelId: string): Promise<void> {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
async getLoadedModels(): Promise<string[]> {
|
||||||
|
return []
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('AIEngine', () => {
|
describe('AIEngine', () => {
|
||||||
@ -20,38 +53,34 @@ describe('AIEngine', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
engine = new TestAIEngine('', '')
|
engine = new TestAIEngine('', '')
|
||||||
jest.clearAllMocks()
|
vi.clearAllMocks()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should load model if provider matches', async () => {
|
it('should load model successfully', async () => {
|
||||||
const model: any = { id: 'model1', engine: 'test-provider' } as any
|
const modelId = 'model1'
|
||||||
|
|
||||||
await engine.loadModel(model)
|
const result = await engine.load(modelId)
|
||||||
|
|
||||||
expect(events.emit).toHaveBeenCalledWith(ModelEvent.OnModelReady, model)
|
expect(result).toEqual({ pid: 1, port: 8080, model_id: modelId, model_path: '', api_key: '' })
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not load model if provider does not match', async () => {
|
it('should unload model successfully', async () => {
|
||||||
const model: any = { id: 'model1', engine: 'other-provider' } as any
|
const sessionId = 'session1'
|
||||||
|
|
||||||
await engine.loadModel(model)
|
const result = await engine.unload(sessionId)
|
||||||
|
|
||||||
expect(events.emit).not.toHaveBeenCalledWith(ModelEvent.OnModelReady, model)
|
expect(result).toEqual({ success: true })
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should unload model if provider matches', async () => {
|
it('should list models', async () => {
|
||||||
const model: Model = { id: 'model1', version: '1.0', engine: 'test-provider' } as any
|
const result = await engine.list()
|
||||||
|
|
||||||
await engine.unloadModel(model)
|
expect(result).toEqual([])
|
||||||
|
|
||||||
expect(events.emit).toHaveBeenCalledWith(ModelEvent.OnModelStopped, model)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not unload model if provider does not match', async () => {
|
it('should get loaded models', async () => {
|
||||||
const model: Model = { id: 'model1', version: '1.0', engine: 'other-provider' } as any
|
const result = await engine.getLoadedModels()
|
||||||
|
|
||||||
await engine.unloadModel(model)
|
expect(result).toEqual([])
|
||||||
|
|
||||||
expect(events.emit).not.toHaveBeenCalledWith(ModelEvent.OnModelStopped, model)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,24 +1,219 @@
|
|||||||
import { events } from '../../events'
|
|
||||||
import { BaseExtension } from '../../extension'
|
import { BaseExtension } from '../../extension'
|
||||||
import { MessageRequest, Model, ModelEvent } from '../../../types'
|
|
||||||
import { EngineManager } from './EngineManager'
|
import { EngineManager } from './EngineManager'
|
||||||
|
|
||||||
|
/* AIEngine class types */
|
||||||
|
|
||||||
|
export interface chatCompletionRequestMessage {
|
||||||
|
role: 'system' | 'user' | 'assistant' | 'tool'
|
||||||
|
content: string | null | Content[] // Content can be a string OR an array of content parts
|
||||||
|
name?: string
|
||||||
|
tool_calls?: any[] // Simplified tool_call_id?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Content {
|
||||||
|
type: 'text' | 'input_image' | 'input_audio'
|
||||||
|
text?: string
|
||||||
|
image_url?: string
|
||||||
|
input_audio?: InputAudio
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface InputAudio {
|
||||||
|
data: string // Base64 encoded audio data
|
||||||
|
format: 'mp3' | 'wav' | 'ogg' | 'flac' // Add more formats as needed/llama-server seems to support mp3
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ToolFunction {
|
||||||
|
name: string // Required: a-z, A-Z, 0-9, _, -, max length 64
|
||||||
|
description?: string
|
||||||
|
parameters?: Record<string, unknown> // JSON Schema object
|
||||||
|
strict?: boolean | null // Defaults to false
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Tool {
|
||||||
|
type: 'function' // Currently, only 'function' is supported
|
||||||
|
function: ToolFunction
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ToolCallOptions {
|
||||||
|
tools?: Tool[]
|
||||||
|
}
|
||||||
|
|
||||||
|
// A specific tool choice to force the model to call
|
||||||
|
export interface ToolCallSpec {
|
||||||
|
type: 'function'
|
||||||
|
function: {
|
||||||
|
name: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// tool_choice may be one of several modes or a specific call
|
||||||
|
export type ToolChoice = 'none' | 'auto' | 'required' | ToolCallSpec
|
||||||
|
|
||||||
|
export interface chatCompletionRequest {
|
||||||
|
model: string // Model ID, though for local it might be implicit via sessionInfo
|
||||||
|
messages: chatCompletionRequestMessage[]
|
||||||
|
tools?: Tool[]
|
||||||
|
tool_choice?: ToolChoice
|
||||||
|
// Core sampling parameters
|
||||||
|
temperature?: number | null
|
||||||
|
dynatemp_range?: number | null
|
||||||
|
dynatemp_exponent?: number | null
|
||||||
|
top_k?: number | null
|
||||||
|
top_p?: number | null
|
||||||
|
min_p?: number | null
|
||||||
|
typical_p?: number | null
|
||||||
|
repeat_penalty?: number | null
|
||||||
|
repeat_last_n?: number | null
|
||||||
|
presence_penalty?: number | null
|
||||||
|
frequency_penalty?: number | null
|
||||||
|
dry_multiplier?: number | null
|
||||||
|
dry_base?: number | null
|
||||||
|
dry_allowed_length?: number | null
|
||||||
|
dry_penalty_last_n?: number | null
|
||||||
|
dry_sequence_breakers?: string[] | null
|
||||||
|
xtc_probability?: number | null
|
||||||
|
xtc_threshold?: number | null
|
||||||
|
mirostat?: number | null // 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0
|
||||||
|
mirostat_tau?: number | null
|
||||||
|
mirostat_eta?: number | null
|
||||||
|
|
||||||
|
n_predict?: number | null
|
||||||
|
n_indent?: number | null
|
||||||
|
n_keep?: number | null
|
||||||
|
stream?: boolean | null
|
||||||
|
stop?: string | string[] | null
|
||||||
|
seed?: number | null // RNG seed
|
||||||
|
|
||||||
|
// Advanced sampling
|
||||||
|
logit_bias?: { [key: string]: number } | null
|
||||||
|
n_probs?: number | null
|
||||||
|
min_keep?: number | null
|
||||||
|
t_max_predict_ms?: number | null
|
||||||
|
image_data?: Array<{ data: string; id: number }> | null
|
||||||
|
|
||||||
|
// Internal/optimization parameters
|
||||||
|
id_slot?: number | null
|
||||||
|
cache_prompt?: boolean | null
|
||||||
|
return_tokens?: boolean | null
|
||||||
|
samplers?: string[] | null
|
||||||
|
timings_per_token?: boolean | null
|
||||||
|
post_sampling_probs?: boolean | null
|
||||||
|
chat_template_kwargs?: chat_template_kdict | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface chat_template_kdict {
|
||||||
|
enable_thinking: false
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface chatCompletionChunkChoiceDelta {
|
||||||
|
content?: string | null
|
||||||
|
role?: 'system' | 'user' | 'assistant' | 'tool'
|
||||||
|
tool_calls?: any[] // Simplified
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface chatCompletionChunkChoice {
|
||||||
|
index: number
|
||||||
|
delta: chatCompletionChunkChoiceDelta
|
||||||
|
finish_reason?: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call' | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface chatCompletionChunk {
|
||||||
|
id: string
|
||||||
|
object: 'chat.completion.chunk'
|
||||||
|
created: number
|
||||||
|
model: string
|
||||||
|
choices: chatCompletionChunkChoice[]
|
||||||
|
system_fingerprint?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface chatCompletionChoice {
|
||||||
|
index: number
|
||||||
|
message: chatCompletionRequestMessage // Response message
|
||||||
|
finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call'
|
||||||
|
logprobs?: any // Simplified
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface chatCompletion {
|
||||||
|
id: string
|
||||||
|
object: 'chat.completion'
|
||||||
|
created: number
|
||||||
|
model: string // Model ID used
|
||||||
|
choices: chatCompletionChoice[]
|
||||||
|
usage?: {
|
||||||
|
prompt_tokens: number
|
||||||
|
completion_tokens: number
|
||||||
|
total_tokens: number
|
||||||
|
}
|
||||||
|
system_fingerprint?: string
|
||||||
|
}
|
||||||
|
// --- End OpenAI types ---
|
||||||
|
|
||||||
|
// Shared model metadata
|
||||||
|
export interface modelInfo {
|
||||||
|
id: string // e.g. "qwen3-4B" or "org/model/quant"
|
||||||
|
name: string // human‑readable, e.g., "Qwen3 4B Q4_0"
|
||||||
|
quant_type?: string // q4_0 (optional as it might be part of ID or name)
|
||||||
|
providerId: string // e.g. "llama.cpp"
|
||||||
|
port: number
|
||||||
|
sizeBytes: number
|
||||||
|
tags?: string[]
|
||||||
|
path?: string // Absolute path to the model file, if applicable
|
||||||
|
// Additional provider-specific metadata can be added here
|
||||||
|
[key: string]: any
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. /list
|
||||||
|
export type listResult = modelInfo[]
|
||||||
|
|
||||||
|
export interface SessionInfo {
|
||||||
|
pid: number // opaque handle for unload/chat
|
||||||
|
port: number // llama-server output port (corrected from portid)
|
||||||
|
model_id: string //name of the model
|
||||||
|
model_path: string // path of the loaded model
|
||||||
|
api_key: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UnloadResult {
|
||||||
|
success: boolean
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. /chat
|
||||||
|
export interface chatOptions {
|
||||||
|
providerId: string
|
||||||
|
sessionId: string
|
||||||
|
/** Full OpenAI ChatCompletionRequest payload */
|
||||||
|
payload: chatCompletionRequest
|
||||||
|
}
|
||||||
|
// Output for /chat will be Promise<ChatCompletion> for non-streaming
|
||||||
|
// or Promise<AsyncIterable<ChatCompletionChunk>> for streaming
|
||||||
|
|
||||||
|
// 7. /import
|
||||||
|
export interface ImportOptions {
|
||||||
|
modelPath: string
|
||||||
|
mmprojPath?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface importResult {
|
||||||
|
success: boolean
|
||||||
|
modelInfo?: modelInfo
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base AIEngine
|
* Base AIEngine
|
||||||
* Applicable to all AI Engines
|
* Applicable to all AI Engines
|
||||||
*/
|
*/
|
||||||
|
|
||||||
export abstract class AIEngine extends BaseExtension {
|
export abstract class AIEngine extends BaseExtension {
|
||||||
// The inference engine
|
// The inference engine ID, implementing the readonly providerId from interface
|
||||||
abstract provider: string
|
abstract readonly provider: string
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* On extension load, subscribe to events.
|
* On extension load, subscribe to events.
|
||||||
*/
|
*/
|
||||||
override onLoad() {
|
override onLoad() {
|
||||||
this.registerEngine()
|
this.registerEngine()
|
||||||
|
|
||||||
events.on(ModelEvent.OnModelInit, (model: Model) => this.loadModel(model))
|
|
||||||
events.on(ModelEvent.OnModelStop, (model: Model) => this.unloadModel(model))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -29,29 +224,50 @@ export abstract class AIEngine extends BaseExtension {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Loads the model.
|
* Lists available models
|
||||||
*/
|
*/
|
||||||
async loadModel(model: Partial<Model>, abortController?: AbortController): Promise<any> {
|
abstract list(): Promise<modelInfo[]>
|
||||||
if (model?.engine?.toString() !== this.provider) return Promise.resolve()
|
|
||||||
events.emit(ModelEvent.OnModelReady, model)
|
|
||||||
return Promise.resolve()
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Stops the model.
|
|
||||||
*/
|
|
||||||
async unloadModel(model?: Partial<Model>): Promise<any> {
|
|
||||||
if (model?.engine && model.engine.toString() !== this.provider) return Promise.resolve()
|
|
||||||
events.emit(ModelEvent.OnModelStopped, model ?? {})
|
|
||||||
return Promise.resolve()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Inference request
|
* Loads a model into memory
|
||||||
*/
|
*/
|
||||||
inference(data: MessageRequest) {}
|
abstract load(modelId: string, settings?: any): Promise<SessionInfo>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stop inference
|
* Unloads a model from memory
|
||||||
*/
|
*/
|
||||||
stopInference() {}
|
abstract unload(sessionId: string): Promise<UnloadResult>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sends a chat request to the model
|
||||||
|
*/
|
||||||
|
abstract chat(
|
||||||
|
opts: chatCompletionRequest,
|
||||||
|
abortController?: AbortController
|
||||||
|
): Promise<chatCompletion | AsyncIterable<chatCompletionChunk>>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes a model
|
||||||
|
*/
|
||||||
|
abstract delete(modelId: string): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Imports a model
|
||||||
|
*/
|
||||||
|
abstract import(modelId: string, opts: ImportOptions): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aborts an ongoing model import
|
||||||
|
*/
|
||||||
|
abstract abortImport(modelId: string): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get currently loaded models
|
||||||
|
*/
|
||||||
|
abstract getLoadedModels(): Promise<string[]>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optional method to get the underlying chat client
|
||||||
|
*/
|
||||||
|
getChatClient?(sessionId: string): any
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,4 @@
|
|||||||
/**
|
import { describe, it, test, expect, beforeEach } from 'vitest'
|
||||||
* @jest-environment jsdom
|
|
||||||
*/
|
|
||||||
import { EngineManager } from './EngineManager'
|
import { EngineManager } from './EngineManager'
|
||||||
import { AIEngine } from './AIEngine'
|
import { AIEngine } from './AIEngine'
|
||||||
import { InferenceEngine } from '../../../types'
|
import { InferenceEngine } from '../../../types'
|
||||||
|
|||||||
@ -1,98 +1,134 @@
|
|||||||
/**
|
import { describe, it, expect, beforeEach, vi, type Mock } from 'vitest'
|
||||||
* @jest-environment jsdom
|
|
||||||
*/
|
|
||||||
import { LocalOAIEngine } from './LocalOAIEngine'
|
import { LocalOAIEngine } from './LocalOAIEngine'
|
||||||
import { events } from '../../events'
|
import { events } from '../../events'
|
||||||
import { ModelEvent, Model } from '../../../types'
|
import { Model, ModelEvent } from '../../../types'
|
||||||
import { executeOnMain, systemInformation, dirName } from '../../core'
|
|
||||||
|
|
||||||
jest.mock('../../core', () => ({
|
vi.mock('../../events')
|
||||||
executeOnMain: jest.fn(),
|
|
||||||
systemInformation: jest.fn(),
|
|
||||||
dirName: jest.fn(),
|
|
||||||
}))
|
|
||||||
|
|
||||||
jest.mock('../../events', () => ({
|
|
||||||
events: {
|
|
||||||
on: jest.fn(),
|
|
||||||
emit: jest.fn(),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
class TestLocalOAIEngine extends LocalOAIEngine {
|
class TestLocalOAIEngine extends LocalOAIEngine {
|
||||||
inferenceUrl = ''
|
inferenceUrl = 'http://test-local-inference-url'
|
||||||
nodeModule = 'testNodeModule'
|
provider = 'test-local-provider'
|
||||||
provider = 'testProvider'
|
nodeModule = 'test-node-module'
|
||||||
|
|
||||||
|
async headers() {
|
||||||
|
return { Authorization: 'Bearer test-token' }
|
||||||
|
}
|
||||||
|
|
||||||
|
async loadModel(model: Model & { file_path?: string }): Promise<void> {
|
||||||
|
this.loadedModel = model
|
||||||
|
}
|
||||||
|
|
||||||
|
async unloadModel(model?: Model) {
|
||||||
|
this.loadedModel = undefined
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('LocalOAIEngine', () => {
|
describe('LocalOAIEngine', () => {
|
||||||
let engine: TestLocalOAIEngine
|
let engine: TestLocalOAIEngine
|
||||||
|
const mockModel: Model & { file_path?: string } = {
|
||||||
|
object: 'model',
|
||||||
|
version: '1.0.0',
|
||||||
|
format: 'gguf',
|
||||||
|
sources: [],
|
||||||
|
id: 'test-model',
|
||||||
|
name: 'Test Model',
|
||||||
|
description: 'A test model',
|
||||||
|
settings: {},
|
||||||
|
parameters: {},
|
||||||
|
metadata: {},
|
||||||
|
file_path: '/path/to/model.gguf'
|
||||||
|
}
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
engine = new TestLocalOAIEngine('', '')
|
engine = new TestLocalOAIEngine('', '')
|
||||||
|
vi.clearAllMocks()
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
describe('onLoad', () => {
|
||||||
jest.clearAllMocks()
|
it('should call super.onLoad and subscribe to model events', () => {
|
||||||
|
const superOnLoadSpy = vi.spyOn(Object.getPrototypeOf(Object.getPrototypeOf(engine)), 'onLoad')
|
||||||
|
|
||||||
|
engine.onLoad()
|
||||||
|
|
||||||
|
expect(superOnLoadSpy).toHaveBeenCalled()
|
||||||
|
expect(events.on).toHaveBeenCalledWith(
|
||||||
|
ModelEvent.OnModelInit,
|
||||||
|
expect.any(Function)
|
||||||
|
)
|
||||||
|
expect(events.on).toHaveBeenCalledWith(
|
||||||
|
ModelEvent.OnModelStop,
|
||||||
|
expect.any(Function)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should load model when OnModelInit event is triggered', () => {
|
||||||
|
const loadModelSpy = vi.spyOn(engine, 'loadModel')
|
||||||
|
engine.onLoad()
|
||||||
|
|
||||||
|
// Get the event handler for OnModelInit
|
||||||
|
const onModelInitCall = (events.on as Mock).mock.calls.find(
|
||||||
|
call => call[0] === ModelEvent.OnModelInit
|
||||||
|
)
|
||||||
|
const onModelInitHandler = onModelInitCall[1]
|
||||||
|
|
||||||
|
// Trigger the event handler
|
||||||
|
onModelInitHandler(mockModel)
|
||||||
|
|
||||||
|
expect(loadModelSpy).toHaveBeenCalledWith(mockModel)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should unload model when OnModelStop event is triggered', () => {
|
||||||
|
const unloadModelSpy = vi.spyOn(engine, 'unloadModel')
|
||||||
|
engine.onLoad()
|
||||||
|
|
||||||
|
// Get the event handler for OnModelStop
|
||||||
|
const onModelStopCall = (events.on as Mock).mock.calls.find(
|
||||||
|
call => call[0] === ModelEvent.OnModelStop
|
||||||
|
)
|
||||||
|
const onModelStopHandler = onModelStopCall[1]
|
||||||
|
|
||||||
|
// Trigger the event handler
|
||||||
|
onModelStopHandler(mockModel)
|
||||||
|
|
||||||
|
expect(unloadModelSpy).toHaveBeenCalledWith(mockModel)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should subscribe to events on load', () => {
|
describe('properties', () => {
|
||||||
engine.onLoad()
|
it('should have correct default function names', () => {
|
||||||
expect(events.on).toHaveBeenCalledWith(ModelEvent.OnModelInit, expect.any(Function))
|
expect(engine.loadModelFunctionName).toBe('loadModel')
|
||||||
expect(events.on).toHaveBeenCalledWith(ModelEvent.OnModelStop, expect.any(Function))
|
expect(engine.unloadModelFunctionName).toBe('unloadModel')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should have abstract nodeModule property implemented', () => {
|
||||||
|
expect(engine.nodeModule).toBe('test-node-module')
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should load model correctly', async () => {
|
describe('loadModel', () => {
|
||||||
const model: any = { engine: 'testProvider', file_path: 'path/to/model' } as any
|
it('should load the model and set loadedModel', async () => {
|
||||||
const modelFolder = 'path/to'
|
await engine.loadModel(mockModel)
|
||||||
const systemInfo = { os: 'testOS' }
|
expect(engine.loadedModel).toBe(mockModel)
|
||||||
const res = { error: null }
|
})
|
||||||
|
|
||||||
;(dirName as jest.Mock).mockResolvedValue(modelFolder)
|
it('should handle model with file_path', async () => {
|
||||||
;(systemInformation as jest.Mock).mockResolvedValue(systemInfo)
|
const modelWithPath = { ...mockModel, file_path: '/custom/path/model.gguf' }
|
||||||
;(executeOnMain as jest.Mock).mockResolvedValue(res)
|
await engine.loadModel(modelWithPath)
|
||||||
|
expect(engine.loadedModel).toBe(modelWithPath)
|
||||||
await engine.loadModel(model)
|
})
|
||||||
|
|
||||||
expect(dirName).toHaveBeenCalledWith(model.file_path)
|
|
||||||
expect(systemInformation).toHaveBeenCalled()
|
|
||||||
expect(executeOnMain).toHaveBeenCalledWith(
|
|
||||||
engine.nodeModule,
|
|
||||||
engine.loadModelFunctionName,
|
|
||||||
{ modelFolder, model },
|
|
||||||
systemInfo
|
|
||||||
)
|
|
||||||
expect(events.emit).toHaveBeenCalledWith(ModelEvent.OnModelReady, model)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should handle load model error', async () => {
|
describe('unloadModel', () => {
|
||||||
const model: any = { engine: 'testProvider', file_path: 'path/to/model' } as any
|
it('should unload the model and clear loadedModel', async () => {
|
||||||
const modelFolder = 'path/to'
|
engine.loadedModel = mockModel
|
||||||
const systemInfo = { os: 'testOS' }
|
await engine.unloadModel(mockModel)
|
||||||
const res = { error: 'load error' }
|
expect(engine.loadedModel).toBeUndefined()
|
||||||
|
})
|
||||||
|
|
||||||
;(dirName as jest.Mock).mockResolvedValue(modelFolder)
|
it('should handle unload without passing a model', async () => {
|
||||||
;(systemInformation as jest.Mock).mockResolvedValue(systemInfo)
|
engine.loadedModel = mockModel
|
||||||
;(executeOnMain as jest.Mock).mockResolvedValue(res)
|
await engine.unloadModel()
|
||||||
|
expect(engine.loadedModel).toBeUndefined()
|
||||||
await expect(engine.loadModel(model)).rejects.toEqual('load error')
|
})
|
||||||
|
|
||||||
expect(events.emit).toHaveBeenCalledWith(ModelEvent.OnModelFail, { error: res.error })
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should unload model correctly', async () => {
|
|
||||||
const model: Model = { engine: 'testProvider' } as any
|
|
||||||
|
|
||||||
await engine.unloadModel(model)
|
|
||||||
|
|
||||||
expect(executeOnMain).toHaveBeenCalledWith(engine.nodeModule, engine.unloadModelFunctionName)
|
|
||||||
expect(events.emit).toHaveBeenCalledWith(ModelEvent.OnModelStopped, {})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should not unload model if engine does not match', async () => {
|
|
||||||
const model: Model = { engine: 'otherProvider' } as any
|
|
||||||
await engine.unloadModel(model)
|
|
||||||
expect(executeOnMain).not.toHaveBeenCalled()
|
|
||||||
expect(events.emit).not.toHaveBeenCalledWith(ModelEvent.OnModelStopped, {})
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -1,4 +1,3 @@
|
|||||||
import { executeOnMain, systemInformation, dirName, joinPath, getJanDataFolderPath } from '../../core'
|
|
||||||
import { events } from '../../events'
|
import { events } from '../../events'
|
||||||
import { Model, ModelEvent } from '../../../types'
|
import { Model, ModelEvent } from '../../../types'
|
||||||
import { OAIEngine } from './OAIEngine'
|
import { OAIEngine } from './OAIEngine'
|
||||||
@ -29,46 +28,14 @@ export abstract class LocalOAIEngine extends OAIEngine {
|
|||||||
/**
|
/**
|
||||||
* Load the model.
|
* Load the model.
|
||||||
*/
|
*/
|
||||||
override async loadModel(model: Model & { file_path?: string }, abortController?: AbortController): Promise<void> {
|
async loadModel(model: Model & { file_path?: string }): Promise<void> {
|
||||||
if (model.engine.toString() !== this.provider) return
|
// Implementation of loading the model
|
||||||
const modelFolder = 'file_path' in model && model.file_path ? await dirName(model.file_path) : await this.getModelFilePath(model.id)
|
|
||||||
const systemInfo = await systemInformation()
|
|
||||||
const res = await executeOnMain(
|
|
||||||
this.nodeModule,
|
|
||||||
this.loadModelFunctionName,
|
|
||||||
{
|
|
||||||
modelFolder,
|
|
||||||
model,
|
|
||||||
},
|
|
||||||
systemInfo
|
|
||||||
)
|
|
||||||
|
|
||||||
if (res?.error) {
|
|
||||||
events.emit(ModelEvent.OnModelFail, { error: res.error })
|
|
||||||
return Promise.reject(res.error)
|
|
||||||
} else {
|
|
||||||
this.loadedModel = model
|
|
||||||
events.emit(ModelEvent.OnModelReady, model)
|
|
||||||
return Promise.resolve()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stops the model.
|
* Stops the model.
|
||||||
*/
|
*/
|
||||||
override async unloadModel(model?: Model) {
|
async unloadModel(model?: Model) {
|
||||||
if (model?.engine && model.engine?.toString() !== this.provider) return Promise.resolve()
|
// Implementation of unloading the model
|
||||||
|
|
||||||
this.loadedModel = undefined
|
|
||||||
await executeOnMain(this.nodeModule, this.unloadModelFunctionName).then(() => {
|
|
||||||
events.emit(ModelEvent.OnModelStopped, {})
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Legacy
|
|
||||||
private getModelFilePath = async (
|
|
||||||
id: string,
|
|
||||||
): Promise<string> => {
|
|
||||||
return joinPath([await getJanDataFolderPath(), 'models', id])
|
|
||||||
}
|
|
||||||
///
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,4 @@
|
|||||||
/**
|
import { describe, it, expect, beforeEach, vi } from 'vitest'
|
||||||
* @jest-environment jsdom
|
|
||||||
*/
|
|
||||||
import { OAIEngine } from './OAIEngine'
|
import { OAIEngine } from './OAIEngine'
|
||||||
import { events } from '../../events'
|
import { events } from '../../events'
|
||||||
import {
|
import {
|
||||||
@ -13,7 +11,7 @@ import {
|
|||||||
ContentType,
|
ContentType,
|
||||||
} from '../../../types'
|
} from '../../../types'
|
||||||
|
|
||||||
jest.mock('../../events')
|
vi.mock('../../events')
|
||||||
|
|
||||||
class TestOAIEngine extends OAIEngine {
|
class TestOAIEngine extends OAIEngine {
|
||||||
inferenceUrl = 'http://test-inference-url'
|
inferenceUrl = 'http://test-inference-url'
|
||||||
@ -29,7 +27,7 @@ describe('OAIEngine', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
engine = new TestOAIEngine('', '')
|
engine = new TestOAIEngine('', '')
|
||||||
jest.clearAllMocks()
|
vi.clearAllMocks()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should subscribe to events on load', () => {
|
it('should subscribe to events on load', () => {
|
||||||
|
|||||||
@ -44,10 +44,12 @@ export abstract class OAIEngine extends AIEngine {
|
|||||||
*/
|
*/
|
||||||
override onUnload(): void {}
|
override onUnload(): void {}
|
||||||
|
|
||||||
|
inference(data: MessageRequest) {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stops the inference.
|
* Stops the inference.
|
||||||
*/
|
*/
|
||||||
override stopInference() {
|
stopInference() {
|
||||||
this.isCancelled = true
|
this.isCancelled = true
|
||||||
this.controller?.abort()
|
this.controller?.abort()
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,4 @@
|
|||||||
/**
|
import { describe, test, expect, beforeEach, vi } from 'vitest'
|
||||||
* @jest-environment jsdom
|
|
||||||
*/
|
|
||||||
import { RemoteOAIEngine } from './'
|
import { RemoteOAIEngine } from './'
|
||||||
|
|
||||||
class TestRemoteOAIEngine extends RemoteOAIEngine {
|
class TestRemoteOAIEngine extends RemoteOAIEngine {
|
||||||
@ -16,8 +14,8 @@ describe('RemoteOAIEngine', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
test('should call onLoad and super.onLoad', () => {
|
test('should call onLoad and super.onLoad', () => {
|
||||||
const onLoadSpy = jest.spyOn(engine, 'onLoad')
|
const onLoadSpy = vi.spyOn(engine, 'onLoad')
|
||||||
const superOnLoadSpy = jest.spyOn(Object.getPrototypeOf(RemoteOAIEngine.prototype), 'onLoad')
|
const superOnLoadSpy = vi.spyOn(Object.getPrototypeOf(RemoteOAIEngine.prototype), 'onLoad')
|
||||||
engine.onLoad()
|
engine.onLoad()
|
||||||
|
|
||||||
expect(onLoadSpy).toHaveBeenCalled()
|
expect(onLoadSpy).toHaveBeenCalled()
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
|
import { it, expect } from 'vitest'
|
||||||
import { expect } from '@jest/globals';
|
import * as engines from './index'
|
||||||
|
|
||||||
it('should re-export all exports from ./AIEngine', () => {
|
it('should re-export all exports from ./AIEngine', () => {
|
||||||
expect(require('./index')).toHaveProperty('AIEngine');
|
expect(engines).toHaveProperty('AIEngine')
|
||||||
});
|
})
|
||||||
|
|||||||
@ -1,566 +0,0 @@
|
|||||||
import { EngineManagementExtension } from './enginesManagement'
|
|
||||||
import { ExtensionTypeEnum } from '../extension'
|
|
||||||
import {
|
|
||||||
EngineConfig,
|
|
||||||
EngineReleased,
|
|
||||||
EngineVariant,
|
|
||||||
Engines,
|
|
||||||
InferenceEngine,
|
|
||||||
DefaultEngineVariant,
|
|
||||||
Model
|
|
||||||
} from '../../types'
|
|
||||||
|
|
||||||
// Mock implementation of EngineManagementExtension
|
|
||||||
class MockEngineManagementExtension extends EngineManagementExtension {
|
|
||||||
private mockEngines: Engines = {
|
|
||||||
llama: {
|
|
||||||
name: 'llama',
|
|
||||||
variants: [
|
|
||||||
{
|
|
||||||
variant: 'cpu',
|
|
||||||
version: '1.0.0',
|
|
||||||
path: '/engines/llama/cpu/1.0.0',
|
|
||||||
installed: true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
variant: 'cuda',
|
|
||||||
version: '1.0.0',
|
|
||||||
path: '/engines/llama/cuda/1.0.0',
|
|
||||||
installed: false
|
|
||||||
}
|
|
||||||
],
|
|
||||||
default: {
|
|
||||||
variant: 'cpu',
|
|
||||||
version: '1.0.0'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
gpt4all: {
|
|
||||||
name: 'gpt4all',
|
|
||||||
variants: [
|
|
||||||
{
|
|
||||||
variant: 'cpu',
|
|
||||||
version: '2.0.0',
|
|
||||||
path: '/engines/gpt4all/cpu/2.0.0',
|
|
||||||
installed: true
|
|
||||||
}
|
|
||||||
],
|
|
||||||
default: {
|
|
||||||
variant: 'cpu',
|
|
||||||
version: '2.0.0'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private mockReleases: { [key: string]: EngineReleased[] } = {
|
|
||||||
'llama-1.0.0': [
|
|
||||||
{
|
|
||||||
variant: 'cpu',
|
|
||||||
version: '1.0.0',
|
|
||||||
os: ['macos', 'linux', 'windows'],
|
|
||||||
url: 'https://example.com/llama/1.0.0/cpu'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
variant: 'cuda',
|
|
||||||
version: '1.0.0',
|
|
||||||
os: ['linux', 'windows'],
|
|
||||||
url: 'https://example.com/llama/1.0.0/cuda'
|
|
||||||
}
|
|
||||||
],
|
|
||||||
'llama-1.1.0': [
|
|
||||||
{
|
|
||||||
variant: 'cpu',
|
|
||||||
version: '1.1.0',
|
|
||||||
os: ['macos', 'linux', 'windows'],
|
|
||||||
url: 'https://example.com/llama/1.1.0/cpu'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
variant: 'cuda',
|
|
||||||
version: '1.1.0',
|
|
||||||
os: ['linux', 'windows'],
|
|
||||||
url: 'https://example.com/llama/1.1.0/cuda'
|
|
||||||
}
|
|
||||||
],
|
|
||||||
'gpt4all-2.0.0': [
|
|
||||||
{
|
|
||||||
variant: 'cpu',
|
|
||||||
version: '2.0.0',
|
|
||||||
os: ['macos', 'linux', 'windows'],
|
|
||||||
url: 'https://example.com/gpt4all/2.0.0/cpu'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
private remoteModels: { [engine: string]: Model[] } = {
|
|
||||||
'llama': [],
|
|
||||||
'gpt4all': []
|
|
||||||
}
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
super('http://mock-url.com', 'mock-engine-extension', 'Mock Engine Extension', true, 'A mock engine extension', '1.0.0')
|
|
||||||
}
|
|
||||||
|
|
||||||
onLoad(): void {
|
|
||||||
// Mock implementation
|
|
||||||
}
|
|
||||||
|
|
||||||
onUnload(): void {
|
|
||||||
// Mock implementation
|
|
||||||
}
|
|
||||||
|
|
||||||
async getEngines(): Promise<Engines> {
|
|
||||||
return JSON.parse(JSON.stringify(this.mockEngines))
|
|
||||||
}
|
|
||||||
|
|
||||||
async getInstalledEngines(name: InferenceEngine): Promise<EngineVariant[]> {
|
|
||||||
if (!this.mockEngines[name]) {
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.mockEngines[name].variants.filter(variant => variant.installed)
|
|
||||||
}
|
|
||||||
|
|
||||||
async getReleasedEnginesByVersion(
|
|
||||||
name: InferenceEngine,
|
|
||||||
version: string,
|
|
||||||
platform?: string
|
|
||||||
): Promise<EngineReleased[]> {
|
|
||||||
const key = `${name}-${version}`
|
|
||||||
let releases = this.mockReleases[key] || []
|
|
||||||
|
|
||||||
if (platform) {
|
|
||||||
releases = releases.filter(release => release.os.includes(platform))
|
|
||||||
}
|
|
||||||
|
|
||||||
return releases
|
|
||||||
}
|
|
||||||
|
|
||||||
async getLatestReleasedEngine(
|
|
||||||
name: InferenceEngine,
|
|
||||||
platform?: string
|
|
||||||
): Promise<EngineReleased[]> {
|
|
||||||
// For mock, let's assume latest versions are 1.1.0 for llama and 2.0.0 for gpt4all
|
|
||||||
const latestVersions = {
|
|
||||||
'llama': '1.1.0',
|
|
||||||
'gpt4all': '2.0.0'
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!latestVersions[name]) {
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.getReleasedEnginesByVersion(name, latestVersions[name], platform)
|
|
||||||
}
|
|
||||||
|
|
||||||
async installEngine(
|
|
||||||
name: string,
|
|
||||||
engineConfig: EngineConfig
|
|
||||||
): Promise<{ messages: string }> {
|
|
||||||
if (!this.mockEngines[name]) {
|
|
||||||
this.mockEngines[name] = {
|
|
||||||
name,
|
|
||||||
variants: [],
|
|
||||||
default: {
|
|
||||||
variant: engineConfig.variant,
|
|
||||||
version: engineConfig.version
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if variant already exists
|
|
||||||
const existingVariantIndex = this.mockEngines[name].variants.findIndex(
|
|
||||||
v => v.variant === engineConfig.variant && v.version === engineConfig.version
|
|
||||||
)
|
|
||||||
|
|
||||||
if (existingVariantIndex >= 0) {
|
|
||||||
this.mockEngines[name].variants[existingVariantIndex].installed = true
|
|
||||||
} else {
|
|
||||||
this.mockEngines[name].variants.push({
|
|
||||||
variant: engineConfig.variant,
|
|
||||||
version: engineConfig.version,
|
|
||||||
path: `/engines/${name}/${engineConfig.variant}/${engineConfig.version}`,
|
|
||||||
installed: true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return { messages: `Successfully installed ${name} ${engineConfig.variant} ${engineConfig.version}` }
|
|
||||||
}
|
|
||||||
|
|
||||||
async addRemoteEngine(
|
|
||||||
engineConfig: EngineConfig
|
|
||||||
): Promise<{ messages: string }> {
|
|
||||||
const name = engineConfig.name || 'remote-engine'
|
|
||||||
|
|
||||||
if (!this.mockEngines[name]) {
|
|
||||||
this.mockEngines[name] = {
|
|
||||||
name,
|
|
||||||
variants: [],
|
|
||||||
default: {
|
|
||||||
variant: engineConfig.variant,
|
|
||||||
version: engineConfig.version
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.mockEngines[name].variants.push({
|
|
||||||
variant: engineConfig.variant,
|
|
||||||
version: engineConfig.version,
|
|
||||||
path: engineConfig.path || `/engines/${name}/${engineConfig.variant}/${engineConfig.version}`,
|
|
||||||
installed: true,
|
|
||||||
url: engineConfig.url
|
|
||||||
})
|
|
||||||
|
|
||||||
return { messages: `Successfully added remote engine ${name}` }
|
|
||||||
}
|
|
||||||
|
|
||||||
async uninstallEngine(
|
|
||||||
name: InferenceEngine,
|
|
||||||
engineConfig: EngineConfig
|
|
||||||
): Promise<{ messages: string }> {
|
|
||||||
if (!this.mockEngines[name]) {
|
|
||||||
return { messages: `Engine ${name} not found` }
|
|
||||||
}
|
|
||||||
|
|
||||||
const variantIndex = this.mockEngines[name].variants.findIndex(
|
|
||||||
v => v.variant === engineConfig.variant && v.version === engineConfig.version
|
|
||||||
)
|
|
||||||
|
|
||||||
if (variantIndex >= 0) {
|
|
||||||
this.mockEngines[name].variants[variantIndex].installed = false
|
|
||||||
|
|
||||||
// If this was the default variant, reset default
|
|
||||||
if (
|
|
||||||
this.mockEngines[name].default.variant === engineConfig.variant &&
|
|
||||||
this.mockEngines[name].default.version === engineConfig.version
|
|
||||||
) {
|
|
||||||
// Find another installed variant to set as default
|
|
||||||
const installedVariant = this.mockEngines[name].variants.find(v => v.installed)
|
|
||||||
if (installedVariant) {
|
|
||||||
this.mockEngines[name].default = {
|
|
||||||
variant: installedVariant.variant,
|
|
||||||
version: installedVariant.version
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// No installed variants remain, clear default
|
|
||||||
this.mockEngines[name].default = { variant: '', version: '' }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { messages: `Successfully uninstalled ${name} ${engineConfig.variant} ${engineConfig.version}` }
|
|
||||||
} else {
|
|
||||||
return { messages: `Variant ${engineConfig.variant} ${engineConfig.version} not found for engine ${name}` }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async getDefaultEngineVariant(
|
|
||||||
name: InferenceEngine
|
|
||||||
): Promise<DefaultEngineVariant> {
|
|
||||||
if (!this.mockEngines[name]) {
|
|
||||||
return { variant: '', version: '' }
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.mockEngines[name].default
|
|
||||||
}
|
|
||||||
|
|
||||||
async setDefaultEngineVariant(
|
|
||||||
name: InferenceEngine,
|
|
||||||
engineConfig: EngineConfig
|
|
||||||
): Promise<{ messages: string }> {
|
|
||||||
if (!this.mockEngines[name]) {
|
|
||||||
return { messages: `Engine ${name} not found` }
|
|
||||||
}
|
|
||||||
|
|
||||||
const variantExists = this.mockEngines[name].variants.some(
|
|
||||||
v => v.variant === engineConfig.variant && v.version === engineConfig.version && v.installed
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!variantExists) {
|
|
||||||
return { messages: `Variant ${engineConfig.variant} ${engineConfig.version} not found or not installed` }
|
|
||||||
}
|
|
||||||
|
|
||||||
this.mockEngines[name].default = {
|
|
||||||
variant: engineConfig.variant,
|
|
||||||
version: engineConfig.version
|
|
||||||
}
|
|
||||||
|
|
||||||
return { messages: `Successfully set ${engineConfig.variant} ${engineConfig.version} as default for ${name}` }
|
|
||||||
}
|
|
||||||
|
|
||||||
async updateEngine(
|
|
||||||
name: InferenceEngine,
|
|
||||||
engineConfig?: EngineConfig
|
|
||||||
): Promise<{ messages: string }> {
|
|
||||||
if (!this.mockEngines[name]) {
|
|
||||||
return { messages: `Engine ${name} not found` }
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!engineConfig) {
|
|
||||||
// Assume we're updating to the latest version
|
|
||||||
return { messages: `Successfully updated ${name} to the latest version` }
|
|
||||||
}
|
|
||||||
|
|
||||||
const variantIndex = this.mockEngines[name].variants.findIndex(
|
|
||||||
v => v.variant === engineConfig.variant && v.installed
|
|
||||||
)
|
|
||||||
|
|
||||||
if (variantIndex >= 0) {
|
|
||||||
// Update the version
|
|
||||||
this.mockEngines[name].variants[variantIndex].version = engineConfig.version
|
|
||||||
|
|
||||||
// If this was the default variant, update default version too
|
|
||||||
if (this.mockEngines[name].default.variant === engineConfig.variant) {
|
|
||||||
this.mockEngines[name].default.version = engineConfig.version
|
|
||||||
}
|
|
||||||
|
|
||||||
return { messages: `Successfully updated ${name} ${engineConfig.variant} to version ${engineConfig.version}` }
|
|
||||||
} else {
|
|
||||||
return { messages: `Installed variant ${engineConfig.variant} not found for engine ${name}` }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async addRemoteModel(model: Model): Promise<void> {
|
|
||||||
const engine = model.engine as string
|
|
||||||
|
|
||||||
if (!this.remoteModels[engine]) {
|
|
||||||
this.remoteModels[engine] = []
|
|
||||||
}
|
|
||||||
|
|
||||||
this.remoteModels[engine].push(model)
|
|
||||||
}
|
|
||||||
|
|
||||||
async getRemoteModels(name: InferenceEngine | string): Promise<Model[]> {
|
|
||||||
return this.remoteModels[name] || []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('EngineManagementExtension', () => {
|
|
||||||
let extension: MockEngineManagementExtension
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
extension = new MockEngineManagementExtension()
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should return the correct extension type', () => {
|
|
||||||
expect(extension.type()).toBe(ExtensionTypeEnum.Engine)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should get all engines', async () => {
|
|
||||||
const engines = await extension.getEngines()
|
|
||||||
|
|
||||||
expect(engines).toBeDefined()
|
|
||||||
expect(engines.llama).toBeDefined()
|
|
||||||
expect(engines.gpt4all).toBeDefined()
|
|
||||||
expect(engines.llama.variants).toHaveLength(2)
|
|
||||||
expect(engines.gpt4all.variants).toHaveLength(1)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should get installed engines', async () => {
|
|
||||||
const llamaEngines = await extension.getInstalledEngines('llama')
|
|
||||||
|
|
||||||
expect(llamaEngines).toHaveLength(1)
|
|
||||||
expect(llamaEngines[0].variant).toBe('cpu')
|
|
||||||
expect(llamaEngines[0].installed).toBe(true)
|
|
||||||
|
|
||||||
const gpt4allEngines = await extension.getInstalledEngines('gpt4all')
|
|
||||||
|
|
||||||
expect(gpt4allEngines).toHaveLength(1)
|
|
||||||
expect(gpt4allEngines[0].variant).toBe('cpu')
|
|
||||||
expect(gpt4allEngines[0].installed).toBe(true)
|
|
||||||
|
|
||||||
// Test non-existent engine
|
|
||||||
const nonExistentEngines = await extension.getInstalledEngines('non-existent' as InferenceEngine)
|
|
||||||
expect(nonExistentEngines).toHaveLength(0)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should get released engines by version', async () => {
|
|
||||||
const llamaReleases = await extension.getReleasedEnginesByVersion('llama', '1.0.0')
|
|
||||||
|
|
||||||
expect(llamaReleases).toHaveLength(2)
|
|
||||||
expect(llamaReleases[0].variant).toBe('cpu')
|
|
||||||
expect(llamaReleases[1].variant).toBe('cuda')
|
|
||||||
|
|
||||||
// Test with platform filter
|
|
||||||
const llamaLinuxReleases = await extension.getReleasedEnginesByVersion('llama', '1.0.0', 'linux')
|
|
||||||
|
|
||||||
expect(llamaLinuxReleases).toHaveLength(2)
|
|
||||||
|
|
||||||
const llamaMacReleases = await extension.getReleasedEnginesByVersion('llama', '1.0.0', 'macos')
|
|
||||||
|
|
||||||
expect(llamaMacReleases).toHaveLength(1)
|
|
||||||
expect(llamaMacReleases[0].variant).toBe('cpu')
|
|
||||||
|
|
||||||
// Test non-existent version
|
|
||||||
const nonExistentReleases = await extension.getReleasedEnginesByVersion('llama', '9.9.9')
|
|
||||||
expect(nonExistentReleases).toHaveLength(0)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should get latest released engines', async () => {
|
|
||||||
const latestLlamaReleases = await extension.getLatestReleasedEngine('llama')
|
|
||||||
|
|
||||||
expect(latestLlamaReleases).toHaveLength(2)
|
|
||||||
expect(latestLlamaReleases[0].version).toBe('1.1.0')
|
|
||||||
|
|
||||||
// Test with platform filter
|
|
||||||
const latestLlamaMacReleases = await extension.getLatestReleasedEngine('llama', 'macos')
|
|
||||||
|
|
||||||
expect(latestLlamaMacReleases).toHaveLength(1)
|
|
||||||
expect(latestLlamaMacReleases[0].variant).toBe('cpu')
|
|
||||||
expect(latestLlamaMacReleases[0].version).toBe('1.1.0')
|
|
||||||
|
|
||||||
// Test non-existent engine
|
|
||||||
const nonExistentReleases = await extension.getLatestReleasedEngine('non-existent' as InferenceEngine)
|
|
||||||
expect(nonExistentReleases).toHaveLength(0)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should install engine', async () => {
|
|
||||||
// Install existing engine variant that is not installed
|
|
||||||
const result = await extension.installEngine('llama', { variant: 'cuda', version: '1.0.0' })
|
|
||||||
|
|
||||||
expect(result.messages).toContain('Successfully installed')
|
|
||||||
|
|
||||||
const installedEngines = await extension.getInstalledEngines('llama')
|
|
||||||
expect(installedEngines).toHaveLength(2)
|
|
||||||
expect(installedEngines.some(e => e.variant === 'cuda')).toBe(true)
|
|
||||||
|
|
||||||
// Install non-existent engine
|
|
||||||
const newEngineResult = await extension.installEngine('new-engine', { variant: 'cpu', version: '1.0.0' })
|
|
||||||
|
|
||||||
expect(newEngineResult.messages).toContain('Successfully installed')
|
|
||||||
|
|
||||||
const engines = await extension.getEngines()
|
|
||||||
expect(engines['new-engine']).toBeDefined()
|
|
||||||
expect(engines['new-engine'].variants).toHaveLength(1)
|
|
||||||
expect(engines['new-engine'].variants[0].installed).toBe(true)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should add remote engine', async () => {
|
|
||||||
const result = await extension.addRemoteEngine({
|
|
||||||
name: 'remote-llm',
|
|
||||||
variant: 'remote',
|
|
||||||
version: '1.0.0',
|
|
||||||
url: 'https://example.com/remote-llm-api'
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(result.messages).toContain('Successfully added remote engine')
|
|
||||||
|
|
||||||
const engines = await extension.getEngines()
|
|
||||||
expect(engines['remote-llm']).toBeDefined()
|
|
||||||
expect(engines['remote-llm'].variants).toHaveLength(1)
|
|
||||||
expect(engines['remote-llm'].variants[0].url).toBe('https://example.com/remote-llm-api')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should uninstall engine', async () => {
|
|
||||||
const result = await extension.uninstallEngine('llama', { variant: 'cpu', version: '1.0.0' })
|
|
||||||
|
|
||||||
expect(result.messages).toContain('Successfully uninstalled')
|
|
||||||
|
|
||||||
const installedEngines = await extension.getInstalledEngines('llama')
|
|
||||||
expect(installedEngines).toHaveLength(0)
|
|
||||||
|
|
||||||
// Test uninstalling non-existent variant
|
|
||||||
const nonExistentResult = await extension.uninstallEngine('llama', { variant: 'non-existent', version: '1.0.0' })
|
|
||||||
|
|
||||||
expect(nonExistentResult.messages).toContain('not found')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should handle default variant when uninstalling', async () => {
|
|
||||||
// First install cuda variant
|
|
||||||
await extension.installEngine('llama', { variant: 'cuda', version: '1.0.0' })
|
|
||||||
|
|
||||||
// Set cuda as default
|
|
||||||
await extension.setDefaultEngineVariant('llama', { variant: 'cuda', version: '1.0.0' })
|
|
||||||
|
|
||||||
// Check that cuda is now default
|
|
||||||
let defaultVariant = await extension.getDefaultEngineVariant('llama')
|
|
||||||
expect(defaultVariant.variant).toBe('cuda')
|
|
||||||
|
|
||||||
// Uninstall cuda
|
|
||||||
await extension.uninstallEngine('llama', { variant: 'cuda', version: '1.0.0' })
|
|
||||||
|
|
||||||
// Check that default has changed to another installed variant
|
|
||||||
defaultVariant = await extension.getDefaultEngineVariant('llama')
|
|
||||||
expect(defaultVariant.variant).toBe('cpu')
|
|
||||||
|
|
||||||
// Uninstall all variants
|
|
||||||
await extension.uninstallEngine('llama', { variant: 'cpu', version: '1.0.0' })
|
|
||||||
|
|
||||||
// Check that default is now empty
|
|
||||||
defaultVariant = await extension.getDefaultEngineVariant('llama')
|
|
||||||
expect(defaultVariant.variant).toBe('')
|
|
||||||
expect(defaultVariant.version).toBe('')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should get default engine variant', async () => {
|
|
||||||
const llamaDefault = await extension.getDefaultEngineVariant('llama')
|
|
||||||
|
|
||||||
expect(llamaDefault.variant).toBe('cpu')
|
|
||||||
expect(llamaDefault.version).toBe('1.0.0')
|
|
||||||
|
|
||||||
// Test non-existent engine
|
|
||||||
const nonExistentDefault = await extension.getDefaultEngineVariant('non-existent' as InferenceEngine)
|
|
||||||
expect(nonExistentDefault.variant).toBe('')
|
|
||||||
expect(nonExistentDefault.version).toBe('')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should set default engine variant', async () => {
|
|
||||||
// Install cuda variant
|
|
||||||
await extension.installEngine('llama', { variant: 'cuda', version: '1.0.0' })
|
|
||||||
|
|
||||||
const result = await extension.setDefaultEngineVariant('llama', { variant: 'cuda', version: '1.0.0' })
|
|
||||||
|
|
||||||
expect(result.messages).toContain('Successfully set')
|
|
||||||
|
|
||||||
const defaultVariant = await extension.getDefaultEngineVariant('llama')
|
|
||||||
expect(defaultVariant.variant).toBe('cuda')
|
|
||||||
expect(defaultVariant.version).toBe('1.0.0')
|
|
||||||
|
|
||||||
// Test setting non-existent variant as default
|
|
||||||
const nonExistentResult = await extension.setDefaultEngineVariant('llama', { variant: 'non-existent', version: '1.0.0' })
|
|
||||||
|
|
||||||
expect(nonExistentResult.messages).toContain('not found')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should update engine', async () => {
|
|
||||||
const result = await extension.updateEngine('llama', { variant: 'cpu', version: '1.1.0' })
|
|
||||||
|
|
||||||
expect(result.messages).toContain('Successfully updated')
|
|
||||||
|
|
||||||
const engines = await extension.getEngines()
|
|
||||||
const cpuVariant = engines.llama.variants.find(v => v.variant === 'cpu')
|
|
||||||
expect(cpuVariant).toBeDefined()
|
|
||||||
expect(cpuVariant?.version).toBe('1.1.0')
|
|
||||||
|
|
||||||
// Default should also be updated since cpu was default
|
|
||||||
expect(engines.llama.default.version).toBe('1.1.0')
|
|
||||||
|
|
||||||
// Test updating non-existent variant
|
|
||||||
const nonExistentResult = await extension.updateEngine('llama', { variant: 'non-existent', version: '1.1.0' })
|
|
||||||
|
|
||||||
expect(nonExistentResult.messages).toContain('not found')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should add and get remote models', async () => {
|
|
||||||
const model: Model = {
|
|
||||||
id: 'remote-model-1',
|
|
||||||
name: 'Remote Model 1',
|
|
||||||
path: '/path/to/remote-model',
|
|
||||||
engine: 'llama',
|
|
||||||
format: 'gguf',
|
|
||||||
modelFormat: 'gguf',
|
|
||||||
source: 'remote',
|
|
||||||
status: 'ready',
|
|
||||||
contextLength: 4096,
|
|
||||||
sizeInGB: 4,
|
|
||||||
created: new Date().toISOString()
|
|
||||||
}
|
|
||||||
|
|
||||||
await extension.addRemoteModel(model)
|
|
||||||
|
|
||||||
const llamaModels = await extension.getRemoteModels('llama')
|
|
||||||
expect(llamaModels).toHaveLength(1)
|
|
||||||
expect(llamaModels[0].id).toBe('remote-model-1')
|
|
||||||
|
|
||||||
// Test non-existent engine
|
|
||||||
const nonExistentModels = await extension.getRemoteModels('non-existent')
|
|
||||||
expect(nonExistentModels).toHaveLength(0)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@ -1,115 +0,0 @@
|
|||||||
import {
|
|
||||||
Engines,
|
|
||||||
EngineVariant,
|
|
||||||
EngineReleased,
|
|
||||||
EngineConfig,
|
|
||||||
DefaultEngineVariant,
|
|
||||||
Model,
|
|
||||||
} from '../../types'
|
|
||||||
import { BaseExtension, ExtensionTypeEnum } from '../extension'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Engine management extension. Persists and retrieves engine management.
|
|
||||||
* @abstract
|
|
||||||
* @extends BaseExtension
|
|
||||||
*/
|
|
||||||
export abstract class EngineManagementExtension extends BaseExtension {
|
|
||||||
type(): ExtensionTypeEnum | undefined {
|
|
||||||
return ExtensionTypeEnum.Engine
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns A Promise that resolves to an object of list engines.
|
|
||||||
*/
|
|
||||||
abstract getEngines(): Promise<Engines>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param name - Inference engine name.
|
|
||||||
* @returns A Promise that resolves to an array of installed engine.
|
|
||||||
*/
|
|
||||||
abstract getInstalledEngines(name: string): Promise<EngineVariant[]>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param name - Inference engine name.
|
|
||||||
* @param version - Version of the engine.
|
|
||||||
* @param platform - Optional to sort by operating system. macOS, linux, windows.
|
|
||||||
* @returns A Promise that resolves to an array of latest released engine by version.
|
|
||||||
*/
|
|
||||||
abstract getReleasedEnginesByVersion(
|
|
||||||
name: string,
|
|
||||||
version: string,
|
|
||||||
platform?: string
|
|
||||||
): Promise<EngineReleased[]>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param name - Inference engine name.
|
|
||||||
* @param platform - Optional to sort by operating system. macOS, linux, windows.
|
|
||||||
* @returns A Promise that resolves to an array of latest released engine.
|
|
||||||
*/
|
|
||||||
abstract getLatestReleasedEngine(
|
|
||||||
name: string,
|
|
||||||
platform?: string
|
|
||||||
): Promise<EngineReleased[]>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param name - Inference engine name.
|
|
||||||
* @returns A Promise that resolves to intall of engine.
|
|
||||||
*/
|
|
||||||
abstract installEngine(
|
|
||||||
name: string,
|
|
||||||
engineConfig: EngineConfig
|
|
||||||
): Promise<{ messages: string }>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a new remote engine
|
|
||||||
* @returns A Promise that resolves to intall of engine.
|
|
||||||
*/
|
|
||||||
abstract addRemoteEngine(
|
|
||||||
engineConfig: EngineConfig
|
|
||||||
): Promise<{ messages: string }>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param name - Inference engine name.
|
|
||||||
* @returns A Promise that resolves to unintall of engine.
|
|
||||||
*/
|
|
||||||
abstract uninstallEngine(
|
|
||||||
name: string,
|
|
||||||
engineConfig: EngineConfig
|
|
||||||
): Promise<{ messages: string }>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param name - Inference engine name.
|
|
||||||
* @returns A Promise that resolves to an object of default engine.
|
|
||||||
*/
|
|
||||||
abstract getDefaultEngineVariant(
|
|
||||||
name: string
|
|
||||||
): Promise<DefaultEngineVariant>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @body variant - string
|
|
||||||
* @body version - string
|
|
||||||
* @returns A Promise that resolves to set default engine.
|
|
||||||
*/
|
|
||||||
abstract setDefaultEngineVariant(
|
|
||||||
name: string,
|
|
||||||
engineConfig: EngineConfig
|
|
||||||
): Promise<{ messages: string }>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns A Promise that resolves to update engine.
|
|
||||||
*/
|
|
||||||
abstract updateEngine(
|
|
||||||
name: string,
|
|
||||||
engineConfig?: EngineConfig
|
|
||||||
): Promise<{ messages: string }>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a new remote model for a specific engine
|
|
||||||
*/
|
|
||||||
abstract addRemoteModel(model: Model): Promise<void>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns A Promise that resolves to an object of remote models list .
|
|
||||||
*/
|
|
||||||
abstract getRemoteModels(name: string): Promise<any>
|
|
||||||
}
|
|
||||||
@ -1,146 +0,0 @@
|
|||||||
import { HardwareManagementExtension } from './hardwareManagement'
|
|
||||||
import { ExtensionTypeEnum } from '../extension'
|
|
||||||
import { HardwareInformation } from '../../types'
|
|
||||||
|
|
||||||
// Mock implementation of HardwareManagementExtension
|
|
||||||
class MockHardwareManagementExtension extends HardwareManagementExtension {
|
|
||||||
private activeGpus: number[] = [0]
|
|
||||||
private mockHardwareInfo: HardwareInformation = {
|
|
||||||
cpu: {
|
|
||||||
manufacturer: 'Mock CPU Manufacturer',
|
|
||||||
brand: 'Mock CPU',
|
|
||||||
cores: 8,
|
|
||||||
physicalCores: 4,
|
|
||||||
speed: 3.5,
|
|
||||||
},
|
|
||||||
memory: {
|
|
||||||
total: 16 * 1024 * 1024 * 1024, // 16GB in bytes
|
|
||||||
free: 8 * 1024 * 1024 * 1024, // 8GB in bytes
|
|
||||||
},
|
|
||||||
gpus: [
|
|
||||||
{
|
|
||||||
id: 0,
|
|
||||||
vendor: 'Mock GPU Vendor',
|
|
||||||
model: 'Mock GPU Model 1',
|
|
||||||
memory: 8 * 1024 * 1024 * 1024, // 8GB in bytes
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 1,
|
|
||||||
vendor: 'Mock GPU Vendor',
|
|
||||||
model: 'Mock GPU Model 2',
|
|
||||||
memory: 4 * 1024 * 1024 * 1024, // 4GB in bytes
|
|
||||||
}
|
|
||||||
],
|
|
||||||
active_gpus: [0],
|
|
||||||
}
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
super('http://mock-url.com', 'mock-hardware-extension', 'Mock Hardware Extension', true, 'A mock hardware extension', '1.0.0')
|
|
||||||
}
|
|
||||||
|
|
||||||
onLoad(): void {
|
|
||||||
// Mock implementation
|
|
||||||
}
|
|
||||||
|
|
||||||
onUnload(): void {
|
|
||||||
// Mock implementation
|
|
||||||
}
|
|
||||||
|
|
||||||
async getHardware(): Promise<HardwareInformation> {
|
|
||||||
// Return a copy to prevent test side effects
|
|
||||||
return JSON.parse(JSON.stringify(this.mockHardwareInfo))
|
|
||||||
}
|
|
||||||
|
|
||||||
async setAvtiveGpu(data: { gpus: number[] }): Promise<{
|
|
||||||
message: string
|
|
||||||
activated_gpus: number[]
|
|
||||||
}> {
|
|
||||||
// Validate GPUs exist
|
|
||||||
const validGpus = data.gpus.filter(gpuId =>
|
|
||||||
this.mockHardwareInfo.gpus.some(gpu => gpu.id === gpuId)
|
|
||||||
)
|
|
||||||
|
|
||||||
if (validGpus.length === 0) {
|
|
||||||
throw new Error('No valid GPUs selected')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update active GPUs
|
|
||||||
this.activeGpus = validGpus
|
|
||||||
this.mockHardwareInfo.active_gpus = validGpus
|
|
||||||
|
|
||||||
return {
|
|
||||||
message: 'GPU activation successful',
|
|
||||||
activated_gpus: validGpus
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('HardwareManagementExtension', () => {
|
|
||||||
let extension: MockHardwareManagementExtension
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
extension = new MockHardwareManagementExtension()
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should return the correct extension type', () => {
|
|
||||||
expect(extension.type()).toBe(ExtensionTypeEnum.Hardware)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should get hardware information', async () => {
|
|
||||||
const hardwareInfo = await extension.getHardware()
|
|
||||||
|
|
||||||
// Check CPU info
|
|
||||||
expect(hardwareInfo.cpu).toBeDefined()
|
|
||||||
expect(hardwareInfo.cpu.manufacturer).toBe('Mock CPU Manufacturer')
|
|
||||||
expect(hardwareInfo.cpu.cores).toBe(8)
|
|
||||||
|
|
||||||
// Check memory info
|
|
||||||
expect(hardwareInfo.memory).toBeDefined()
|
|
||||||
expect(hardwareInfo.memory.total).toBe(16 * 1024 * 1024 * 1024)
|
|
||||||
|
|
||||||
// Check GPU info
|
|
||||||
expect(hardwareInfo.gpus).toHaveLength(2)
|
|
||||||
expect(hardwareInfo.gpus[0].model).toBe('Mock GPU Model 1')
|
|
||||||
expect(hardwareInfo.gpus[1].model).toBe('Mock GPU Model 2')
|
|
||||||
|
|
||||||
// Check active GPUs
|
|
||||||
expect(hardwareInfo.active_gpus).toEqual([0])
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should set active GPUs', async () => {
|
|
||||||
const result = await extension.setAvtiveGpu({ gpus: [1] })
|
|
||||||
|
|
||||||
expect(result.message).toBe('GPU activation successful')
|
|
||||||
expect(result.activated_gpus).toEqual([1])
|
|
||||||
|
|
||||||
// Verify the change in hardware info
|
|
||||||
const hardwareInfo = await extension.getHardware()
|
|
||||||
expect(hardwareInfo.active_gpus).toEqual([1])
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should set multiple active GPUs', async () => {
|
|
||||||
const result = await extension.setAvtiveGpu({ gpus: [0, 1] })
|
|
||||||
|
|
||||||
expect(result.message).toBe('GPU activation successful')
|
|
||||||
expect(result.activated_gpus).toEqual([0, 1])
|
|
||||||
|
|
||||||
// Verify the change in hardware info
|
|
||||||
const hardwareInfo = await extension.getHardware()
|
|
||||||
expect(hardwareInfo.active_gpus).toEqual([0, 1])
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should throw error for invalid GPU ids', async () => {
|
|
||||||
await expect(extension.setAvtiveGpu({ gpus: [999] })).rejects.toThrow('No valid GPUs selected')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should handle mix of valid and invalid GPU ids', async () => {
|
|
||||||
const result = await extension.setAvtiveGpu({ gpus: [0, 999] })
|
|
||||||
|
|
||||||
// Should only activate valid GPUs
|
|
||||||
expect(result.activated_gpus).toEqual([0])
|
|
||||||
|
|
||||||
// Verify the change in hardware info
|
|
||||||
const hardwareInfo = await extension.getHardware()
|
|
||||||
expect(hardwareInfo.active_gpus).toEqual([0])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@ -1,26 +0,0 @@
|
|||||||
import { HardwareInformation } from '../../types'
|
|
||||||
import { BaseExtension, ExtensionTypeEnum } from '../extension'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Engine management extension. Persists and retrieves engine management.
|
|
||||||
* @abstract
|
|
||||||
* @extends BaseExtension
|
|
||||||
*/
|
|
||||||
export abstract class HardwareManagementExtension extends BaseExtension {
|
|
||||||
type(): ExtensionTypeEnum | undefined {
|
|
||||||
return ExtensionTypeEnum.Hardware
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns A Promise that resolves to an object of list hardware.
|
|
||||||
*/
|
|
||||||
abstract getHardware(): Promise<HardwareInformation>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns A Promise that resolves to an object of set active gpus.
|
|
||||||
*/
|
|
||||||
abstract setActiveGpu(data: { gpus: number[] }): Promise<{
|
|
||||||
message: string
|
|
||||||
activated_gpus: number[]
|
|
||||||
}>
|
|
||||||
}
|
|
||||||
@ -1,7 +1,7 @@
|
|||||||
|
import { describe, test, expect } from 'vitest'
|
||||||
import { ConversationalExtension } from './index';
|
import { ConversationalExtension } from './index';
|
||||||
import { InferenceExtension } from './index';
|
import { InferenceExtension } from './index';
|
||||||
import { AssistantExtension } from './index';
|
import { AssistantExtension } from './index';
|
||||||
import { ModelExtension } from './index';
|
|
||||||
import * as Engines from './index';
|
import * as Engines from './index';
|
||||||
|
|
||||||
describe('index.ts exports', () => {
|
describe('index.ts exports', () => {
|
||||||
@ -17,9 +17,6 @@ describe('index.ts exports', () => {
|
|||||||
expect(AssistantExtension).toBeDefined();
|
expect(AssistantExtension).toBeDefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
test('should export ModelExtension', () => {
|
|
||||||
expect(ModelExtension).toBeDefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should export Engines', () => {
|
test('should export Engines', () => {
|
||||||
expect(Engines).toBeDefined();
|
expect(Engines).toBeDefined();
|
||||||
|
|||||||
@ -9,29 +9,12 @@ export { ConversationalExtension } from './conversational'
|
|||||||
*/
|
*/
|
||||||
export { InferenceExtension } from './inference'
|
export { InferenceExtension } from './inference'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Assistant extension for managing assistants.
|
* Assistant extension for managing assistants.
|
||||||
*/
|
*/
|
||||||
export { AssistantExtension } from './assistant'
|
export { AssistantExtension } from './assistant'
|
||||||
|
|
||||||
/**
|
|
||||||
* Model extension for managing models.
|
|
||||||
*/
|
|
||||||
export { ModelExtension } from './model'
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base AI Engines.
|
* Base AI Engines.
|
||||||
*/
|
*/
|
||||||
export * from './engines'
|
export * from './engines'
|
||||||
|
|
||||||
/**
|
|
||||||
* Engines Management
|
|
||||||
*/
|
|
||||||
export * from './enginesManagement'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Hardware Management
|
|
||||||
*/
|
|
||||||
export * from './hardwareManagement'
|
|
||||||
|
|||||||
@ -1,3 +1,4 @@
|
|||||||
|
import { describe, it, expect, beforeEach } from 'vitest'
|
||||||
import { MessageRequest, ThreadMessage } from '../../types'
|
import { MessageRequest, ThreadMessage } from '../../types'
|
||||||
import { BaseExtension, ExtensionTypeEnum } from '../extension'
|
import { BaseExtension, ExtensionTypeEnum } from '../extension'
|
||||||
import { InferenceExtension } from './'
|
import { InferenceExtension } from './'
|
||||||
|
|||||||
@ -1,286 +0,0 @@
|
|||||||
import { ModelExtension } from './model'
|
|
||||||
import { ExtensionTypeEnum } from '../extension'
|
|
||||||
import { Model, OptionType, ModelSource } from '../../types'
|
|
||||||
|
|
||||||
// Mock implementation of ModelExtension
|
|
||||||
class MockModelExtension extends ModelExtension {
|
|
||||||
private models: Model[] = []
|
|
||||||
private sources: ModelSource[] = []
|
|
||||||
private loadedModels: Set<string> = new Set()
|
|
||||||
private modelsPulling: Set<string> = new Set()
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
super('http://mock-url.com', 'mock-model-extension', 'Mock Model Extension', true, 'A mock model extension', '1.0.0')
|
|
||||||
}
|
|
||||||
|
|
||||||
onLoad(): void {
|
|
||||||
// Mock implementation
|
|
||||||
}
|
|
||||||
|
|
||||||
onUnload(): void {
|
|
||||||
// Mock implementation
|
|
||||||
}
|
|
||||||
|
|
||||||
async configurePullOptions(configs: { [key: string]: any }): Promise<any> {
|
|
||||||
return configs
|
|
||||||
}
|
|
||||||
|
|
||||||
async getModels(): Promise<Model[]> {
|
|
||||||
return this.models
|
|
||||||
}
|
|
||||||
|
|
||||||
async pullModel(model: string, id?: string, name?: string): Promise<void> {
|
|
||||||
const modelId = id || `model-${Date.now()}`
|
|
||||||
this.modelsPulling.add(modelId)
|
|
||||||
|
|
||||||
// Simulate model pull by adding it to the model list
|
|
||||||
const newModel: Model = {
|
|
||||||
id: modelId,
|
|
||||||
path: `/models/${model}`,
|
|
||||||
name: name || model,
|
|
||||||
source: 'mock-source',
|
|
||||||
modelFormat: 'mock-format',
|
|
||||||
engine: 'mock-engine',
|
|
||||||
format: 'mock-format',
|
|
||||||
status: 'ready',
|
|
||||||
contextLength: 2048,
|
|
||||||
sizeInGB: 2,
|
|
||||||
created: new Date().toISOString(),
|
|
||||||
pullProgress: {
|
|
||||||
percent: 100,
|
|
||||||
transferred: 0,
|
|
||||||
total: 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.models.push(newModel)
|
|
||||||
this.loadedModels.add(modelId)
|
|
||||||
this.modelsPulling.delete(modelId)
|
|
||||||
}
|
|
||||||
|
|
||||||
async cancelModelPull(modelId: string): Promise<void> {
|
|
||||||
this.modelsPulling.delete(modelId)
|
|
||||||
// Remove the model if it's in the pulling state
|
|
||||||
this.models = this.models.filter(m => m.id !== modelId)
|
|
||||||
}
|
|
||||||
|
|
||||||
async importModel(
|
|
||||||
model: string,
|
|
||||||
modelPath: string,
|
|
||||||
name?: string,
|
|
||||||
optionType?: OptionType
|
|
||||||
): Promise<void> {
|
|
||||||
const newModel: Model = {
|
|
||||||
id: `model-${Date.now()}`,
|
|
||||||
path: modelPath,
|
|
||||||
name: name || model,
|
|
||||||
source: 'local',
|
|
||||||
modelFormat: optionType?.format || 'mock-format',
|
|
||||||
engine: optionType?.engine || 'mock-engine',
|
|
||||||
format: optionType?.format || 'mock-format',
|
|
||||||
status: 'ready',
|
|
||||||
contextLength: optionType?.contextLength || 2048,
|
|
||||||
sizeInGB: 2,
|
|
||||||
created: new Date().toISOString(),
|
|
||||||
}
|
|
||||||
|
|
||||||
this.models.push(newModel)
|
|
||||||
this.loadedModels.add(newModel.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
async updateModel(modelInfo: Partial<Model>): Promise<Model> {
|
|
||||||
if (!modelInfo.id) throw new Error('Model ID is required')
|
|
||||||
|
|
||||||
const index = this.models.findIndex(m => m.id === modelInfo.id)
|
|
||||||
if (index === -1) throw new Error('Model not found')
|
|
||||||
|
|
||||||
this.models[index] = { ...this.models[index], ...modelInfo }
|
|
||||||
return this.models[index]
|
|
||||||
}
|
|
||||||
|
|
||||||
async deleteModel(modelId: string): Promise<void> {
|
|
||||||
this.models = this.models.filter(m => m.id !== modelId)
|
|
||||||
this.loadedModels.delete(modelId)
|
|
||||||
}
|
|
||||||
|
|
||||||
async isModelLoaded(modelId: string): Promise<boolean> {
|
|
||||||
return this.loadedModels.has(modelId)
|
|
||||||
}
|
|
||||||
|
|
||||||
async getSources(): Promise<ModelSource[]> {
|
|
||||||
return this.sources
|
|
||||||
}
|
|
||||||
|
|
||||||
async addSource(source: string): Promise<void> {
|
|
||||||
const newSource: ModelSource = {
|
|
||||||
id: `source-${Date.now()}`,
|
|
||||||
url: source,
|
|
||||||
name: `Source ${this.sources.length + 1}`,
|
|
||||||
type: 'mock-type'
|
|
||||||
}
|
|
||||||
|
|
||||||
this.sources.push(newSource)
|
|
||||||
}
|
|
||||||
|
|
||||||
async deleteSource(sourceId: string): Promise<void> {
|
|
||||||
this.sources = this.sources.filter(s => s.id !== sourceId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('ModelExtension', () => {
|
|
||||||
let extension: MockModelExtension
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
extension = new MockModelExtension()
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should return the correct extension type', () => {
|
|
||||||
expect(extension.type()).toBe(ExtensionTypeEnum.Model)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should configure pull options', async () => {
|
|
||||||
const configs = { apiKey: 'test-key', baseUrl: 'https://test-url.com' }
|
|
||||||
const result = await extension.configurePullOptions(configs)
|
|
||||||
expect(result).toEqual(configs)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should add and get models', async () => {
|
|
||||||
await extension.pullModel('test-model', 'test-id', 'Test Model')
|
|
||||||
|
|
||||||
const models = await extension.getModels()
|
|
||||||
expect(models).toHaveLength(1)
|
|
||||||
expect(models[0].id).toBe('test-id')
|
|
||||||
expect(models[0].name).toBe('Test Model')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should pull model with default id and name', async () => {
|
|
||||||
await extension.pullModel('test-model')
|
|
||||||
|
|
||||||
const models = await extension.getModels()
|
|
||||||
expect(models).toHaveLength(1)
|
|
||||||
expect(models[0].name).toBe('test-model')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should cancel model pull', async () => {
|
|
||||||
await extension.pullModel('test-model', 'test-id')
|
|
||||||
|
|
||||||
// Verify model exists
|
|
||||||
let models = await extension.getModels()
|
|
||||||
expect(models).toHaveLength(1)
|
|
||||||
|
|
||||||
// Cancel the pull
|
|
||||||
await extension.cancelModelPull('test-id')
|
|
||||||
|
|
||||||
// Verify model was removed
|
|
||||||
models = await extension.getModels()
|
|
||||||
expect(models).toHaveLength(0)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should import model', async () => {
|
|
||||||
const optionType: OptionType = {
|
|
||||||
engine: 'test-engine',
|
|
||||||
format: 'test-format',
|
|
||||||
contextLength: 4096
|
|
||||||
}
|
|
||||||
|
|
||||||
await extension.importModel('test-model', '/path/to/model', 'Imported Model', optionType)
|
|
||||||
|
|
||||||
const models = await extension.getModels()
|
|
||||||
expect(models).toHaveLength(1)
|
|
||||||
expect(models[0].name).toBe('Imported Model')
|
|
||||||
expect(models[0].engine).toBe('test-engine')
|
|
||||||
expect(models[0].format).toBe('test-format')
|
|
||||||
expect(models[0].contextLength).toBe(4096)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should import model with default values', async () => {
|
|
||||||
await extension.importModel('test-model', '/path/to/model')
|
|
||||||
|
|
||||||
const models = await extension.getModels()
|
|
||||||
expect(models).toHaveLength(1)
|
|
||||||
expect(models[0].name).toBe('test-model')
|
|
||||||
expect(models[0].engine).toBe('mock-engine')
|
|
||||||
expect(models[0].format).toBe('mock-format')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should update model', async () => {
|
|
||||||
await extension.pullModel('test-model', 'test-id', 'Test Model')
|
|
||||||
|
|
||||||
const updatedModel = await extension.updateModel({
|
|
||||||
id: 'test-id',
|
|
||||||
name: 'Updated Model',
|
|
||||||
contextLength: 8192
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(updatedModel.name).toBe('Updated Model')
|
|
||||||
expect(updatedModel.contextLength).toBe(8192)
|
|
||||||
|
|
||||||
// Verify changes persisted
|
|
||||||
const models = await extension.getModels()
|
|
||||||
expect(models[0].name).toBe('Updated Model')
|
|
||||||
expect(models[0].contextLength).toBe(8192)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should throw error when updating non-existent model', async () => {
|
|
||||||
await expect(extension.updateModel({
|
|
||||||
id: 'non-existent',
|
|
||||||
name: 'Updated Model'
|
|
||||||
})).rejects.toThrow('Model not found')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should throw error when updating model without ID', async () => {
|
|
||||||
await expect(extension.updateModel({
|
|
||||||
name: 'Updated Model'
|
|
||||||
})).rejects.toThrow('Model ID is required')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should delete model', async () => {
|
|
||||||
await extension.pullModel('test-model', 'test-id')
|
|
||||||
|
|
||||||
// Verify model exists
|
|
||||||
let models = await extension.getModels()
|
|
||||||
expect(models).toHaveLength(1)
|
|
||||||
|
|
||||||
// Delete the model
|
|
||||||
await extension.deleteModel('test-id')
|
|
||||||
|
|
||||||
// Verify model was removed
|
|
||||||
models = await extension.getModels()
|
|
||||||
expect(models).toHaveLength(0)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should check if model is loaded', async () => {
|
|
||||||
await extension.pullModel('test-model', 'test-id')
|
|
||||||
|
|
||||||
// Check if model is loaded
|
|
||||||
const isLoaded = await extension.isModelLoaded('test-id')
|
|
||||||
expect(isLoaded).toBe(true)
|
|
||||||
|
|
||||||
// Check if non-existent model is loaded
|
|
||||||
const nonExistentLoaded = await extension.isModelLoaded('non-existent')
|
|
||||||
expect(nonExistentLoaded).toBe(false)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should add and get sources', async () => {
|
|
||||||
await extension.addSource('https://test-source.com')
|
|
||||||
|
|
||||||
const sources = await extension.getSources()
|
|
||||||
expect(sources).toHaveLength(1)
|
|
||||||
expect(sources[0].url).toBe('https://test-source.com')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should delete source', async () => {
|
|
||||||
await extension.addSource('https://test-source.com')
|
|
||||||
|
|
||||||
// Get the source ID
|
|
||||||
const sources = await extension.getSources()
|
|
||||||
const sourceId = sources[0].id
|
|
||||||
|
|
||||||
// Delete the source
|
|
||||||
await extension.deleteSource(sourceId)
|
|
||||||
|
|
||||||
// Verify source was removed
|
|
||||||
const updatedSources = await extension.getSources()
|
|
||||||
expect(updatedSources).toHaveLength(0)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@ -1,48 +0,0 @@
|
|||||||
import { BaseExtension, ExtensionTypeEnum } from '../extension'
|
|
||||||
import { Model, ModelInterface, ModelSource, OptionType } from '../../types'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Model extension for managing models.
|
|
||||||
*/
|
|
||||||
export abstract class ModelExtension
|
|
||||||
extends BaseExtension
|
|
||||||
implements ModelInterface
|
|
||||||
{
|
|
||||||
/**
|
|
||||||
* Model extension type.
|
|
||||||
*/
|
|
||||||
type(): ExtensionTypeEnum | undefined {
|
|
||||||
return ExtensionTypeEnum.Model
|
|
||||||
}
|
|
||||||
|
|
||||||
abstract configurePullOptions(configs: { [key: string]: any }): Promise<any>
|
|
||||||
abstract getModels(): Promise<Model[]>
|
|
||||||
abstract pullModel(model: string, id?: string, name?: string): Promise<void>
|
|
||||||
abstract cancelModelPull(modelId: string): Promise<void>
|
|
||||||
abstract importModel(
|
|
||||||
model: string,
|
|
||||||
modePath: string,
|
|
||||||
name?: string,
|
|
||||||
optionType?: OptionType
|
|
||||||
): Promise<void>
|
|
||||||
abstract updateModel(modelInfo: Partial<Model>): Promise<Model>
|
|
||||||
abstract deleteModel(model: string): Promise<void>
|
|
||||||
abstract isModelLoaded(model: string): Promise<boolean>
|
|
||||||
/**
|
|
||||||
* Get model sources
|
|
||||||
*/
|
|
||||||
abstract getSources(): Promise<ModelSource[]>
|
|
||||||
/**
|
|
||||||
* Add a model source
|
|
||||||
*/
|
|
||||||
abstract addSource(source: string): Promise<void>
|
|
||||||
/**
|
|
||||||
* Delete a model source
|
|
||||||
*/
|
|
||||||
abstract deleteSource(source: string): Promise<void>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch models hub
|
|
||||||
*/
|
|
||||||
abstract fetchModelsHub(): Promise<void>
|
|
||||||
}
|
|
||||||
@ -1,21 +1,22 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi } from 'vitest'
|
||||||
import { fs } from './fs'
|
import { fs } from './fs'
|
||||||
|
|
||||||
describe('fs module', () => {
|
describe('fs module', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
globalThis.core = {
|
globalThis.core = {
|
||||||
api: {
|
api: {
|
||||||
writeFileSync: jest.fn(),
|
writeFileSync: vi.fn(),
|
||||||
writeBlob: jest.fn(),
|
writeBlob: vi.fn(),
|
||||||
readFileSync: jest.fn(),
|
readFileSync: vi.fn(),
|
||||||
existsSync: jest.fn(),
|
existsSync: vi.fn(),
|
||||||
readdirSync: jest.fn(),
|
readdirSync: vi.fn(),
|
||||||
mkdir: jest.fn(),
|
mkdir: vi.fn(),
|
||||||
rm: jest.fn(),
|
rm: vi.fn(),
|
||||||
unlinkSync: jest.fn(),
|
unlinkSync: vi.fn(),
|
||||||
appendFileSync: jest.fn(),
|
appendFileSync: vi.fn(),
|
||||||
copyFile: jest.fn(),
|
copyFile: vi.fn(),
|
||||||
getGgufFiles: jest.fn(),
|
getGgufFiles: vi.fn(),
|
||||||
fileStat: jest.fn(),
|
fileStat: vi.fn(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,3 +1,4 @@
|
|||||||
|
import { describe, it, expect } from 'vitest'
|
||||||
import * as Core from './core'
|
import * as Core from './core'
|
||||||
import * as Events from './events'
|
import * as Events from './events'
|
||||||
import * as FileSystem from './fs'
|
import * as FileSystem from './fs'
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi } from 'vitest'
|
||||||
import { ModelManager } from './manager'
|
import { ModelManager } from './manager'
|
||||||
import { Model, ModelEvent } from '../../types'
|
import { Model, ModelEvent } from '../../types'
|
||||||
import { events } from '../events'
|
import { events } from '../events'
|
||||||
|
|
||||||
jest.mock('../events', () => ({
|
vi.mock('../events', () => ({
|
||||||
events: {
|
events: {
|
||||||
emit: jest.fn(),
|
emit: vi.fn(),
|
||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
@ -20,7 +21,7 @@ describe('ModelManager', () => {
|
|||||||
let mockModel: Model
|
let mockModel: Model
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
jest.clearAllMocks()
|
vi.clearAllMocks()
|
||||||
;(global.window as any).core = {}
|
;(global.window as any).core = {}
|
||||||
modelManager = new ModelManager()
|
modelManager = new ModelManager()
|
||||||
mockModel = {
|
mockModel = {
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
// web/utils/modelParam.test.ts
|
// web/utils/modelParam.test.ts
|
||||||
|
import { describe, it, expect } from 'vitest'
|
||||||
import {
|
import {
|
||||||
normalizeValue,
|
normalizeValue,
|
||||||
validationRules,
|
validationRules,
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
|
|
||||||
|
import { it, expect } from 'vitest'
|
||||||
|
|
||||||
it('should declare global object core when importing the module and then deleting it', () => {
|
it('should declare global object core when importing the module and then deleting it', () => {
|
||||||
import('./index');
|
import('./index');
|
||||||
|
|||||||
@ -1,10 +0,0 @@
|
|||||||
import { RequestAdapter } from './adapter';
|
|
||||||
|
|
||||||
it('should return undefined for unknown route', () => {
|
|
||||||
const adapter = new RequestAdapter();
|
|
||||||
const route = 'unknownRoute';
|
|
||||||
|
|
||||||
const result = adapter.process(route, 'arg1', 'arg2');
|
|
||||||
|
|
||||||
expect(result).toBeUndefined();
|
|
||||||
});
|
|
||||||
@ -1,37 +0,0 @@
|
|||||||
import {
|
|
||||||
AppRoute,
|
|
||||||
ExtensionRoute,
|
|
||||||
FileManagerRoute,
|
|
||||||
FileSystemRoute,
|
|
||||||
} from '../../../types/api'
|
|
||||||
import { FileSystem } from '../processors/fs'
|
|
||||||
import { Extension } from '../processors/extension'
|
|
||||||
import { FSExt } from '../processors/fsExt'
|
|
||||||
import { App } from '../processors/app'
|
|
||||||
|
|
||||||
export class RequestAdapter {
|
|
||||||
fileSystem: FileSystem
|
|
||||||
extension: Extension
|
|
||||||
fsExt: FSExt
|
|
||||||
app: App
|
|
||||||
|
|
||||||
constructor(observer?: Function) {
|
|
||||||
this.fileSystem = new FileSystem()
|
|
||||||
this.extension = new Extension()
|
|
||||||
this.fsExt = new FSExt()
|
|
||||||
this.app = new App()
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Clearer Factory pattern here
|
|
||||||
process(route: string, ...args: any) {
|
|
||||||
if (route in FileSystemRoute) {
|
|
||||||
return this.fileSystem.process(route, ...args)
|
|
||||||
} else if (route in ExtensionRoute) {
|
|
||||||
return this.extension.process(route, ...args)
|
|
||||||
} else if (route in FileManagerRoute) {
|
|
||||||
return this.fsExt.process(route, ...args)
|
|
||||||
} else if (route in AppRoute) {
|
|
||||||
return this.app.process(route, ...args)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,25 +0,0 @@
|
|||||||
import { CoreRoutes } from '../../../types/api';
|
|
||||||
import { RequestHandler } from './handler';
|
|
||||||
import { RequestAdapter } from './adapter';
|
|
||||||
|
|
||||||
it('should not call handler if CoreRoutes is empty', () => {
|
|
||||||
const mockHandler = jest.fn();
|
|
||||||
const mockObserver = jest.fn();
|
|
||||||
const requestHandler = new RequestHandler(mockHandler, mockObserver);
|
|
||||||
|
|
||||||
CoreRoutes.length = 0; // Ensure CoreRoutes is empty
|
|
||||||
|
|
||||||
requestHandler.handle();
|
|
||||||
|
|
||||||
expect(mockHandler).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should initialize handler and adapter correctly', () => {
|
|
||||||
const mockHandler = jest.fn();
|
|
||||||
const mockObserver = jest.fn();
|
|
||||||
const requestHandler = new RequestHandler(mockHandler, mockObserver);
|
|
||||||
|
|
||||||
expect(requestHandler.handler).toBe(mockHandler);
|
|
||||||
expect(requestHandler.adapter).toBeInstanceOf(RequestAdapter);
|
|
||||||
});
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
import { CoreRoutes } from '../../../types/api'
|
|
||||||
import { RequestAdapter } from './adapter'
|
|
||||||
|
|
||||||
export type Handler = (route: string, args: any) => any
|
|
||||||
|
|
||||||
export class RequestHandler {
|
|
||||||
handler: Handler
|
|
||||||
adapter: RequestAdapter
|
|
||||||
|
|
||||||
constructor(handler: Handler, observer?: Function) {
|
|
||||||
this.handler = handler
|
|
||||||
this.adapter = new RequestAdapter(observer)
|
|
||||||
}
|
|
||||||
|
|
||||||
handle() {
|
|
||||||
CoreRoutes.map((route) => {
|
|
||||||
this.handler(route, async (...args: any[]) => this.adapter.process(route, ...args))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1 +0,0 @@
|
|||||||
export * from './common/handler'
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
|
|
||||||
import { Processor } from './Processor';
|
|
||||||
|
|
||||||
it('should be defined', () => {
|
|
||||||
expect(Processor).toBeDefined();
|
|
||||||
});
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
export abstract class Processor {
|
|
||||||
abstract process(key: string, ...args: any[]): any
|
|
||||||
}
|
|
||||||
@ -1,50 +0,0 @@
|
|||||||
jest.mock('../../helper', () => ({
|
|
||||||
...jest.requireActual('../../helper'),
|
|
||||||
getJanDataFolderPath: () => './app',
|
|
||||||
}))
|
|
||||||
import { App } from './app'
|
|
||||||
|
|
||||||
it('should correctly retrieve basename', () => {
|
|
||||||
const app = new App()
|
|
||||||
const result = app.baseName('/path/to/file.txt')
|
|
||||||
expect(result).toBe('file.txt')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should correctly identify subdirectories', () => {
|
|
||||||
const app = new App()
|
|
||||||
const basePath = process.platform === 'win32' ? 'C:\\path\\to' : '/path/to'
|
|
||||||
const subPath =
|
|
||||||
process.platform === 'win32' ? 'C:\\path\\to\\subdir' : '/path/to/subdir'
|
|
||||||
const result = app.isSubdirectory(basePath, subPath)
|
|
||||||
expect(result).toBe(true)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should correctly join multiple paths', () => {
|
|
||||||
const app = new App()
|
|
||||||
const result = app.joinPath(['path', 'to', 'file'])
|
|
||||||
const expectedPath =
|
|
||||||
process.platform === 'win32' ? 'path\\to\\file' : 'path/to/file'
|
|
||||||
expect(result).toBe(expectedPath)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should call correct function with provided arguments using process method', () => {
|
|
||||||
const app = new App()
|
|
||||||
const mockFunc = jest.fn()
|
|
||||||
app.joinPath = mockFunc
|
|
||||||
app.process('joinPath', ['path1', 'path2'])
|
|
||||||
expect(mockFunc).toHaveBeenCalledWith(['path1', 'path2'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should retrieve the directory name from a file path (Unix/Windows)', async () => {
|
|
||||||
const app = new App()
|
|
||||||
const path = 'C:/Users/John Doe/Desktop/file.txt'
|
|
||||||
expect(await app.dirName(path)).toBe('C:/Users/John Doe/Desktop')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should retrieve the directory name when using file protocol', async () => {
|
|
||||||
const app = new App()
|
|
||||||
const path = 'file:/models/file.txt'
|
|
||||||
expect(await app.dirName(path)).toBe(
|
|
||||||
process.platform === 'win32' ? 'app\\models' : 'app/models'
|
|
||||||
)
|
|
||||||
})
|
|
||||||
@ -1,83 +0,0 @@
|
|||||||
import { basename, dirname, isAbsolute, join, relative } from 'path'
|
|
||||||
|
|
||||||
import { Processor } from './Processor'
|
|
||||||
import {
|
|
||||||
log as writeLog,
|
|
||||||
getAppConfigurations as appConfiguration,
|
|
||||||
updateAppConfiguration,
|
|
||||||
normalizeFilePath,
|
|
||||||
getJanDataFolderPath,
|
|
||||||
} from '../../helper'
|
|
||||||
import { readdirSync, readFileSync } from 'fs'
|
|
||||||
|
|
||||||
export class App implements Processor {
|
|
||||||
observer?: Function
|
|
||||||
|
|
||||||
constructor(observer?: Function) {
|
|
||||||
this.observer = observer
|
|
||||||
}
|
|
||||||
|
|
||||||
process(key: string, ...args: any[]): any {
|
|
||||||
const instance = this as any
|
|
||||||
const func = instance[key]
|
|
||||||
return func(...args)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Joins multiple paths together, respect to the current OS.
|
|
||||||
*/
|
|
||||||
joinPath(args: any) {
|
|
||||||
return join(...('args' in args ? args.args : args))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get dirname of a file path.
|
|
||||||
* @param path - The file path to retrieve dirname.
|
|
||||||
*/
|
|
||||||
dirName(path: string) {
|
|
||||||
const arg =
|
|
||||||
path.startsWith(`file:/`) || path.startsWith(`file:\\`)
|
|
||||||
? join(getJanDataFolderPath(), normalizeFilePath(path))
|
|
||||||
: path
|
|
||||||
return dirname(arg)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if the given path is a subdirectory of the given directory.
|
|
||||||
*
|
|
||||||
* @param from - The path to check.
|
|
||||||
* @param to - The directory to check against.
|
|
||||||
*/
|
|
||||||
isSubdirectory(from: any, to: any) {
|
|
||||||
const rel = relative(from, to)
|
|
||||||
const isSubdir = rel && !rel.startsWith('..') && !isAbsolute(rel)
|
|
||||||
|
|
||||||
if (isSubdir === '') return false
|
|
||||||
else return isSubdir
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Retrieve basename from given path, respect to the current OS.
|
|
||||||
*/
|
|
||||||
baseName(args: any) {
|
|
||||||
return basename(args)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Log message to log file.
|
|
||||||
*/
|
|
||||||
log(args: any) {
|
|
||||||
writeLog(args)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get app configurations.
|
|
||||||
*/
|
|
||||||
getAppConfigurations() {
|
|
||||||
return appConfiguration()
|
|
||||||
}
|
|
||||||
|
|
||||||
async updateAppConfiguration(args: any) {
|
|
||||||
await updateAppConfiguration(args)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,40 +0,0 @@
|
|||||||
import { Extension } from './extension';
|
|
||||||
|
|
||||||
it('should call function associated with key in process method', () => {
|
|
||||||
const mockFunc = jest.fn();
|
|
||||||
const extension = new Extension();
|
|
||||||
(extension as any).testKey = mockFunc;
|
|
||||||
extension.process('testKey', 'arg1', 'arg2');
|
|
||||||
expect(mockFunc).toHaveBeenCalledWith('arg1', 'arg2');
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should_handle_empty_extension_list_for_install', async () => {
|
|
||||||
jest.mock('../../extension/store', () => ({
|
|
||||||
installExtensions: jest.fn(() => Promise.resolve([])),
|
|
||||||
}));
|
|
||||||
const extension = new Extension();
|
|
||||||
const result = await extension.installExtension([]);
|
|
||||||
expect(result).toEqual([]);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should_handle_empty_extension_list_for_update', async () => {
|
|
||||||
jest.mock('../../extension/store', () => ({
|
|
||||||
getExtension: jest.fn(() => ({ update: jest.fn(() => Promise.resolve(true)) })),
|
|
||||||
}));
|
|
||||||
const extension = new Extension();
|
|
||||||
const result = await extension.updateExtension([]);
|
|
||||||
expect(result).toEqual([]);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should_handle_empty_extension_list', async () => {
|
|
||||||
jest.mock('../../extension/store', () => ({
|
|
||||||
getExtension: jest.fn(() => ({ uninstall: jest.fn(() => Promise.resolve(true)) })),
|
|
||||||
removeExtension: jest.fn(),
|
|
||||||
}));
|
|
||||||
const extension = new Extension();
|
|
||||||
const result = await extension.uninstallExtension([]);
|
|
||||||
expect(result).toBe(true);
|
|
||||||
});
|
|
||||||
@ -1,88 +0,0 @@
|
|||||||
import { readdirSync } from 'fs'
|
|
||||||
import { join, extname } from 'path'
|
|
||||||
|
|
||||||
import { Processor } from './Processor'
|
|
||||||
import { ModuleManager } from '../../helper/module'
|
|
||||||
import { getJanExtensionsPath as getPath } from '../../helper'
|
|
||||||
import {
|
|
||||||
getActiveExtensions as getExtensions,
|
|
||||||
getExtension,
|
|
||||||
removeExtension,
|
|
||||||
installExtensions,
|
|
||||||
} from '../../extension/store'
|
|
||||||
import { appResourcePath } from '../../helper/path'
|
|
||||||
|
|
||||||
export class Extension implements Processor {
|
|
||||||
observer?: Function
|
|
||||||
|
|
||||||
constructor(observer?: Function) {
|
|
||||||
this.observer = observer
|
|
||||||
}
|
|
||||||
|
|
||||||
process(key: string, ...args: any[]): any {
|
|
||||||
const instance = this as any
|
|
||||||
const func = instance[key]
|
|
||||||
return func(...args)
|
|
||||||
}
|
|
||||||
|
|
||||||
invokeExtensionFunc(modulePath: string, method: string, ...params: any[]) {
|
|
||||||
const module = require(join(getPath(), modulePath))
|
|
||||||
ModuleManager.instance.setModule(modulePath, module)
|
|
||||||
|
|
||||||
if (typeof module[method] === 'function') {
|
|
||||||
return module[method](...params)
|
|
||||||
} else {
|
|
||||||
console.debug(module[method])
|
|
||||||
console.error(`Function "${method}" does not exist in the module.`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the paths of the base extensions.
|
|
||||||
* @returns An array of paths to the base extensions.
|
|
||||||
*/
|
|
||||||
async baseExtensions() {
|
|
||||||
const baseExtensionPath = join(appResourcePath(), 'pre-install')
|
|
||||||
return readdirSync(baseExtensionPath)
|
|
||||||
.filter((file) => extname(file) === '.tgz')
|
|
||||||
.map((file) => join(baseExtensionPath, file))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**MARK: Extension Manager handlers */
|
|
||||||
async installExtension(extensions: any) {
|
|
||||||
// Install and activate all provided extensions
|
|
||||||
const installed = await installExtensions(extensions)
|
|
||||||
return JSON.parse(JSON.stringify(installed))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register IPC route to uninstall a extension
|
|
||||||
async uninstallExtension(extensions: any) {
|
|
||||||
// Uninstall all provided extensions
|
|
||||||
for (const ext of extensions) {
|
|
||||||
const extension = getExtension(ext)
|
|
||||||
await extension.uninstall()
|
|
||||||
if (extension.name) removeExtension(extension.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reload all renderer pages if needed
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register IPC route to update a extension
|
|
||||||
async updateExtension(extensions: any) {
|
|
||||||
// Update all provided extensions
|
|
||||||
const updated: any[] = []
|
|
||||||
for (const ext of extensions) {
|
|
||||||
const extension = getExtension(ext)
|
|
||||||
const res = await extension.update()
|
|
||||||
if (res) updated.push(extension)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reload all renderer pages if needed
|
|
||||||
return JSON.parse(JSON.stringify(updated))
|
|
||||||
}
|
|
||||||
|
|
||||||
getActiveExtensions() {
|
|
||||||
return JSON.parse(JSON.stringify(getExtensions()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
import { FileSystem } from './fs';
|
|
||||||
|
|
||||||
it('should throw an error when the route does not exist in process', async () => {
|
|
||||||
const fileSystem = new FileSystem();
|
|
||||||
await expect(fileSystem.process('nonExistentRoute', 'arg1')).rejects.toThrow();
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should throw an error for invalid argument in mkdir', async () => {
|
|
||||||
const fileSystem = new FileSystem();
|
|
||||||
expect(() => fileSystem.mkdir(123)).toThrow('mkdir error: Invalid argument [123]');
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should throw an error for invalid argument in rm', async () => {
|
|
||||||
const fileSystem = new FileSystem();
|
|
||||||
expect(() => fileSystem.rm(123)).toThrow('rm error: Invalid argument [123]');
|
|
||||||
});
|
|
||||||
@ -1,94 +0,0 @@
|
|||||||
import { join, resolve } from 'path'
|
|
||||||
import { normalizeFilePath } from '../../helper/path'
|
|
||||||
import { getJanDataFolderPath } from '../../helper'
|
|
||||||
import { Processor } from './Processor'
|
|
||||||
import fs from 'fs'
|
|
||||||
|
|
||||||
export class FileSystem implements Processor {
|
|
||||||
observer?: Function
|
|
||||||
private static moduleName = 'fs'
|
|
||||||
|
|
||||||
constructor(observer?: Function) {
|
|
||||||
this.observer = observer
|
|
||||||
}
|
|
||||||
|
|
||||||
process(route: string, ...args: any): any {
|
|
||||||
const instance = this as any
|
|
||||||
const func = instance[route]
|
|
||||||
if (func) {
|
|
||||||
return func(...args)
|
|
||||||
} else {
|
|
||||||
return import(FileSystem.moduleName).then((mdl) =>
|
|
||||||
mdl[route](
|
|
||||||
...args.map((arg: any, index: number) => {
|
|
||||||
const arg0 = args[0]
|
|
||||||
if ('args' in arg0) arg = arg0.args
|
|
||||||
if (Array.isArray(arg)) arg = arg[0]
|
|
||||||
if (index !== 0) {
|
|
||||||
return arg
|
|
||||||
}
|
|
||||||
if (index === 0 && typeof arg !== 'string') {
|
|
||||||
throw new Error(`Invalid argument ${JSON.stringify(args)}`)
|
|
||||||
}
|
|
||||||
const path =
|
|
||||||
arg.startsWith(`file:/`) || arg.startsWith(`file:\\`)
|
|
||||||
? join(getJanDataFolderPath(), normalizeFilePath(arg))
|
|
||||||
: arg
|
|
||||||
|
|
||||||
if (path.startsWith(`http://`) || path.startsWith(`https://`)) {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
const absolutePath = resolve(path)
|
|
||||||
return absolutePath
|
|
||||||
})
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
rm(...args: any): Promise<void> {
|
|
||||||
if (typeof args[0] !== 'string') {
|
|
||||||
throw new Error(`rm error: Invalid argument ${JSON.stringify(args)}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
let path = args[0]
|
|
||||||
if (path.startsWith(`file:/`) || path.startsWith(`file:\\`)) {
|
|
||||||
path = join(getJanDataFolderPath(), normalizeFilePath(path))
|
|
||||||
}
|
|
||||||
|
|
||||||
const absolutePath = resolve(path)
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
fs.rm(absolutePath, { recursive: true, force: true }, (err) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err)
|
|
||||||
} else {
|
|
||||||
resolve()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
mkdir(...args: any): Promise<void> {
|
|
||||||
if (typeof args[0] !== 'string') {
|
|
||||||
throw new Error(`mkdir error: Invalid argument ${JSON.stringify(args)}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
let path = args[0]
|
|
||||||
if (path.startsWith(`file:/`) || path.startsWith(`file:\\`)) {
|
|
||||||
path = join(getJanDataFolderPath(), normalizeFilePath(path))
|
|
||||||
}
|
|
||||||
|
|
||||||
const absolutePath = resolve(path)
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
fs.mkdir(absolutePath, { recursive: true }, (err) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err)
|
|
||||||
} else {
|
|
||||||
resolve()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,34 +0,0 @@
|
|||||||
import { FSExt } from './fsExt';
|
|
||||||
import { defaultAppConfig } from '../../helper';
|
|
||||||
|
|
||||||
it('should handle errors in writeBlob', () => {
|
|
||||||
const fsExt = new FSExt();
|
|
||||||
const consoleSpy = jest.spyOn(console, 'error').mockImplementation(() => {});
|
|
||||||
fsExt.writeBlob('invalid-path', 'data');
|
|
||||||
expect(consoleSpy).toHaveBeenCalled();
|
|
||||||
consoleSpy.mockRestore();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should call correct function in process method', () => {
|
|
||||||
const fsExt = new FSExt();
|
|
||||||
const mockFunction = jest.fn();
|
|
||||||
(fsExt as any).mockFunction = mockFunction;
|
|
||||||
fsExt.process('mockFunction', 'arg1', 'arg2');
|
|
||||||
expect(mockFunction).toHaveBeenCalledWith('arg1', 'arg2');
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should return correct user home path', () => {
|
|
||||||
const fsExt = new FSExt();
|
|
||||||
const userHomePath = fsExt.getUserHomePath();
|
|
||||||
expect(userHomePath).toBe(defaultAppConfig().data_folder);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
it('should return empty array when no files are provided', async () => {
|
|
||||||
const fsExt = new FSExt();
|
|
||||||
const result = await fsExt.getGgufFiles([]);
|
|
||||||
expect(result.supportedFiles).toEqual([]);
|
|
||||||
expect(result.unsupportedFiles).toEqual([]);
|
|
||||||
});
|
|
||||||
@ -1,130 +0,0 @@
|
|||||||
import { basename, join } from 'path'
|
|
||||||
import fs, { readdirSync } from 'fs'
|
|
||||||
import { appResourcePath, normalizeFilePath } from '../../helper/path'
|
|
||||||
import { defaultAppConfig, getJanDataFolderPath, getJanDataFolderPath as getPath } from '../../helper'
|
|
||||||
import { Processor } from './Processor'
|
|
||||||
import { FileStat } from '../../../types'
|
|
||||||
|
|
||||||
export class FSExt implements Processor {
|
|
||||||
observer?: Function
|
|
||||||
|
|
||||||
constructor(observer?: Function) {
|
|
||||||
this.observer = observer
|
|
||||||
}
|
|
||||||
|
|
||||||
process(key: string, ...args: any): any {
|
|
||||||
const instance = this as any
|
|
||||||
const func = instance[key]
|
|
||||||
return func(...args)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handles the 'getJanDataFolderPath' IPC event. This event is triggered to get the user space path.
|
|
||||||
getJanDataFolderPath() {
|
|
||||||
return Promise.resolve(getPath())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handles the 'getResourcePath' IPC event. This event is triggered to get the resource path.
|
|
||||||
getResourcePath() {
|
|
||||||
return appResourcePath()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handles the 'getUserHomePath' IPC event. This event is triggered to get the user app data path.
|
|
||||||
// CAUTION: This would not return OS home path but the app data path.
|
|
||||||
getUserHomePath() {
|
|
||||||
return defaultAppConfig().data_folder
|
|
||||||
}
|
|
||||||
|
|
||||||
// handle fs is directory here
|
|
||||||
fileStat(path: string, outsideJanDataFolder?: boolean) {
|
|
||||||
const normalizedPath = normalizeFilePath(path)
|
|
||||||
|
|
||||||
const fullPath = outsideJanDataFolder
|
|
||||||
? normalizedPath
|
|
||||||
: join(getJanDataFolderPath(), normalizedPath)
|
|
||||||
const isExist = fs.existsSync(fullPath)
|
|
||||||
if (!isExist) return undefined
|
|
||||||
|
|
||||||
const isDirectory = fs.lstatSync(fullPath).isDirectory()
|
|
||||||
const size = fs.statSync(fullPath).size
|
|
||||||
|
|
||||||
const fileStat: FileStat = {
|
|
||||||
isDirectory,
|
|
||||||
size,
|
|
||||||
}
|
|
||||||
|
|
||||||
return fileStat
|
|
||||||
}
|
|
||||||
|
|
||||||
writeBlob(path: string, data: any) {
|
|
||||||
try {
|
|
||||||
const normalizedPath = normalizeFilePath(path)
|
|
||||||
|
|
||||||
const dataBuffer = Buffer.from(data, 'base64')
|
|
||||||
const writePath = join(getJanDataFolderPath(), normalizedPath)
|
|
||||||
fs.writeFileSync(writePath, dataBuffer)
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`writeFile ${path} result: ${err}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
copyFile(src: string, dest: string): Promise<void> {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
fs.copyFile(src, dest, (err) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err)
|
|
||||||
} else {
|
|
||||||
resolve()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async getGgufFiles(paths: string[]) {
|
|
||||||
const sanitizedFilePaths: {
|
|
||||||
path: string
|
|
||||||
name: string
|
|
||||||
size: number
|
|
||||||
}[] = []
|
|
||||||
for (const filePath of paths) {
|
|
||||||
const normalizedPath = normalizeFilePath(filePath)
|
|
||||||
|
|
||||||
const isExist = fs.existsSync(normalizedPath)
|
|
||||||
if (!isExist) continue
|
|
||||||
const fileStats = fs.statSync(normalizedPath)
|
|
||||||
if (!fileStats) continue
|
|
||||||
if (!fileStats.isDirectory()) {
|
|
||||||
const fileName = await basename(normalizedPath)
|
|
||||||
sanitizedFilePaths.push({
|
|
||||||
path: normalizedPath,
|
|
||||||
name: fileName,
|
|
||||||
size: fileStats.size,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
// allowing only one level of directory
|
|
||||||
const files = await readdirSync(normalizedPath)
|
|
||||||
|
|
||||||
for (const file of files) {
|
|
||||||
const fullPath = await join(normalizedPath, file)
|
|
||||||
const fileStats = await fs.statSync(fullPath)
|
|
||||||
if (!fileStats || fileStats.isDirectory()) continue
|
|
||||||
|
|
||||||
sanitizedFilePaths.push({
|
|
||||||
path: fullPath,
|
|
||||||
name: file,
|
|
||||||
size: fileStats.size,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const unsupportedFiles = sanitizedFilePaths.filter(
|
|
||||||
(file) => !file.path.endsWith('.gguf')
|
|
||||||
)
|
|
||||||
const supportedFiles = sanitizedFilePaths.filter((file) =>
|
|
||||||
file.path.endsWith('.gguf')
|
|
||||||
)
|
|
||||||
return {
|
|
||||||
unsupportedFiles,
|
|
||||||
supportedFiles,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,122 +0,0 @@
|
|||||||
import Extension from './extension';
|
|
||||||
import { join } from 'path';
|
|
||||||
import 'pacote';
|
|
||||||
|
|
||||||
it('should set active and call emitUpdate', () => {
|
|
||||||
const extension = new Extension();
|
|
||||||
extension.emitUpdate = jest.fn();
|
|
||||||
|
|
||||||
extension.setActive(true);
|
|
||||||
|
|
||||||
expect(extension._active).toBe(true);
|
|
||||||
expect(extension.emitUpdate).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should return correct specifier', () => {
|
|
||||||
const origin = 'test-origin';
|
|
||||||
const options = { version: '1.0.0' };
|
|
||||||
const extension = new Extension(origin, options);
|
|
||||||
|
|
||||||
expect(extension.specifier).toBe('test-origin@1.0.0');
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should set origin and installOptions in constructor', () => {
|
|
||||||
const origin = 'test-origin';
|
|
||||||
const options = { someOption: true };
|
|
||||||
const extension = new Extension(origin, options);
|
|
||||||
|
|
||||||
expect(extension.origin).toBe(origin);
|
|
||||||
expect(extension.installOptions.someOption).toBe(true);
|
|
||||||
expect(extension.installOptions.fullMetadata).toBe(true); // default option
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should install extension and set url', async () => {
|
|
||||||
const origin = 'test-origin';
|
|
||||||
const options = {};
|
|
||||||
const extension = new Extension(origin, options);
|
|
||||||
|
|
||||||
const mockManifest = {
|
|
||||||
name: 'test-name',
|
|
||||||
productName: 'Test Product',
|
|
||||||
version: '1.0.0',
|
|
||||||
main: 'index.js',
|
|
||||||
description: 'Test description'
|
|
||||||
};
|
|
||||||
|
|
||||||
jest.mock('pacote', () => ({
|
|
||||||
manifest: jest.fn().mockResolvedValue(mockManifest),
|
|
||||||
extract: jest.fn().mockResolvedValue(null)
|
|
||||||
}));
|
|
||||||
|
|
||||||
extension.emitUpdate = jest.fn();
|
|
||||||
await extension._install();
|
|
||||||
|
|
||||||
expect(extension.url).toBe('extension://test-name/index.js');
|
|
||||||
expect(extension.emitUpdate).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should call all listeners in emitUpdate', () => {
|
|
||||||
const extension = new Extension();
|
|
||||||
const callback1 = jest.fn();
|
|
||||||
const callback2 = jest.fn();
|
|
||||||
|
|
||||||
extension.subscribe('listener1', callback1);
|
|
||||||
extension.subscribe('listener2', callback2);
|
|
||||||
|
|
||||||
extension.emitUpdate();
|
|
||||||
|
|
||||||
expect(callback1).toHaveBeenCalledWith(extension);
|
|
||||||
expect(callback2).toHaveBeenCalledWith(extension);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should remove listener in unsubscribe', () => {
|
|
||||||
const extension = new Extension();
|
|
||||||
const callback = jest.fn();
|
|
||||||
|
|
||||||
extension.subscribe('testListener', callback);
|
|
||||||
extension.unsubscribe('testListener');
|
|
||||||
|
|
||||||
expect(extension.listeners['testListener']).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should add listener in subscribe', () => {
|
|
||||||
const extension = new Extension();
|
|
||||||
const callback = jest.fn();
|
|
||||||
|
|
||||||
extension.subscribe('testListener', callback);
|
|
||||||
|
|
||||||
expect(extension.listeners['testListener']).toBe(callback);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should set properties from manifest', async () => {
|
|
||||||
const origin = 'test-origin';
|
|
||||||
const options = {};
|
|
||||||
const extension = new Extension(origin, options);
|
|
||||||
|
|
||||||
const mockManifest = {
|
|
||||||
name: 'test-name',
|
|
||||||
productName: 'Test Product',
|
|
||||||
version: '1.0.0',
|
|
||||||
main: 'index.js',
|
|
||||||
description: 'Test description'
|
|
||||||
};
|
|
||||||
|
|
||||||
jest.mock('pacote', () => ({
|
|
||||||
manifest: jest.fn().mockResolvedValue(mockManifest)
|
|
||||||
}));
|
|
||||||
|
|
||||||
await extension.getManifest();
|
|
||||||
|
|
||||||
expect(extension.name).toBe('test-name');
|
|
||||||
expect(extension.productName).toBe('Test Product');
|
|
||||||
expect(extension.version).toBe('1.0.0');
|
|
||||||
expect(extension.main).toBe('index.js');
|
|
||||||
expect(extension.description).toBe('Test description');
|
|
||||||
});
|
|
||||||
|
|
||||||
@ -1,209 +0,0 @@
|
|||||||
import { rmdirSync } from 'fs'
|
|
||||||
import { resolve, join } from 'path'
|
|
||||||
import { ExtensionManager } from './manager'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An NPM package that can be used as an extension.
|
|
||||||
* Used to hold all the information and functions necessary to handle the extension lifecycle.
|
|
||||||
*/
|
|
||||||
export default class Extension {
|
|
||||||
/**
|
|
||||||
* @property {string} origin Original specification provided to fetch the package.
|
|
||||||
* @property {Object} installOptions Options provided to pacote when fetching the manifest.
|
|
||||||
* @property {name} name The name of the extension as defined in the manifest.
|
|
||||||
* @property {name} productName The display name of the extension as defined in the manifest.
|
|
||||||
* @property {string} url Electron URL where the package can be accessed.
|
|
||||||
* @property {string} version Version of the package as defined in the manifest.
|
|
||||||
* @property {string} main The entry point as defined in the main entry of the manifest.
|
|
||||||
* @property {string} description The description of extension as defined in the manifest.
|
|
||||||
*/
|
|
||||||
origin?: string
|
|
||||||
installOptions: any
|
|
||||||
name?: string
|
|
||||||
productName?: string
|
|
||||||
url?: string
|
|
||||||
version?: string
|
|
||||||
main?: string
|
|
||||||
description?: string
|
|
||||||
|
|
||||||
/** @private */
|
|
||||||
_active = false
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @private
|
|
||||||
* @property {Object.<string, Function>} #listeners A list of callbacks to be executed when the Extension is updated.
|
|
||||||
*/
|
|
||||||
listeners: Record<string, (obj: any) => void> = {}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set installOptions with defaults for options that have not been provided.
|
|
||||||
* @param {string} [origin] Original specification provided to fetch the package.
|
|
||||||
* @param {Object} [options] Options provided to pacote when fetching the manifest.
|
|
||||||
*/
|
|
||||||
constructor(origin?: string, options = {}) {
|
|
||||||
const Arborist = require('@npmcli/arborist')
|
|
||||||
const defaultOpts = {
|
|
||||||
version: false,
|
|
||||||
fullMetadata: true,
|
|
||||||
Arborist,
|
|
||||||
}
|
|
||||||
|
|
||||||
this.origin = origin
|
|
||||||
this.installOptions = { ...defaultOpts, ...options }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Package name with version number.
|
|
||||||
* @type {string}
|
|
||||||
*/
|
|
||||||
get specifier() {
|
|
||||||
return (
|
|
||||||
this.origin +
|
|
||||||
(this.installOptions.version ? '@' + this.installOptions.version : '')
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Whether the extension should be registered with its activation points.
|
|
||||||
* @type {boolean}
|
|
||||||
*/
|
|
||||||
get active() {
|
|
||||||
return this._active
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set Package details based on it's manifest
|
|
||||||
* @returns {Promise.<Boolean>} Resolves to true when the action completed
|
|
||||||
*/
|
|
||||||
async getManifest() {
|
|
||||||
// Get the package's manifest (package.json object)
|
|
||||||
try {
|
|
||||||
const pacote = require('pacote')
|
|
||||||
return pacote
|
|
||||||
.manifest(this.specifier, this.installOptions)
|
|
||||||
.then((mnf: any) => {
|
|
||||||
// set the Package properties based on the it's manifest
|
|
||||||
this.name = mnf.name
|
|
||||||
this.productName = mnf.productName as string | undefined
|
|
||||||
this.version = mnf.version
|
|
||||||
this.main = mnf.main
|
|
||||||
this.description = mnf.description
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(
|
|
||||||
`Package ${this.origin} does not contain a valid manifest: ${error}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract extension to extensions folder.
|
|
||||||
* @returns {Promise.<Extension>} This extension
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
async _install() {
|
|
||||||
try {
|
|
||||||
// import the manifest details
|
|
||||||
await this.getManifest()
|
|
||||||
|
|
||||||
// Install the package in a child folder of the given folder
|
|
||||||
const pacote = require('pacote')
|
|
||||||
await pacote.extract(
|
|
||||||
this.specifier,
|
|
||||||
join(
|
|
||||||
ExtensionManager.instance.getExtensionsPath() ?? '',
|
|
||||||
this.name ?? ''
|
|
||||||
),
|
|
||||||
this.installOptions
|
|
||||||
)
|
|
||||||
|
|
||||||
// Set the url using the custom extensions protocol
|
|
||||||
this.url = `extension://${this.name}/${this.main}`
|
|
||||||
|
|
||||||
this.emitUpdate()
|
|
||||||
} catch (err) {
|
|
||||||
// Ensure the extension is not stored and the folder is removed if the installation fails
|
|
||||||
this.setActive(false)
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
|
|
||||||
return [this]
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Subscribe to updates of this extension
|
|
||||||
* @param {string} name name of the callback to register
|
|
||||||
* @param {callback} cb The function to execute on update
|
|
||||||
*/
|
|
||||||
subscribe(name: string, cb: () => void) {
|
|
||||||
this.listeners[name] = cb
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove subscription
|
|
||||||
* @param {string} name name of the callback to remove
|
|
||||||
*/
|
|
||||||
unsubscribe(name: string) {
|
|
||||||
delete this.listeners[name]
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute listeners
|
|
||||||
*/
|
|
||||||
emitUpdate() {
|
|
||||||
for (const cb in this.listeners) {
|
|
||||||
this.listeners[cb].call(null, this)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check for updates and install if available.
|
|
||||||
* @param {string} version The version to update to.
|
|
||||||
* @returns {boolean} Whether an update was performed.
|
|
||||||
*/
|
|
||||||
async update(version = false) {
|
|
||||||
if (await this.isUpdateAvailable()) {
|
|
||||||
this.installOptions.version = version
|
|
||||||
await this._install()
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if a new version of the extension is available at the origin.
|
|
||||||
* @returns the latest available version if a new version is available or false if not.
|
|
||||||
*/
|
|
||||||
async isUpdateAvailable() {
|
|
||||||
const pacote = require('pacote')
|
|
||||||
if (this.origin) {
|
|
||||||
return pacote.manifest(this.origin).then((mnf: any) => {
|
|
||||||
return mnf.version !== this.version ? mnf.version : false
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove extension and refresh renderers.
|
|
||||||
* @returns {Promise}
|
|
||||||
*/
|
|
||||||
async uninstall(): Promise<void> {
|
|
||||||
const path = ExtensionManager.instance.getExtensionsPath()
|
|
||||||
const extPath = resolve(path ?? '', this.name ?? '')
|
|
||||||
rmdirSync(extPath, { recursive: true })
|
|
||||||
|
|
||||||
this.emitUpdate()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set a extension's active state. This determines if a extension should be loaded on initialisation.
|
|
||||||
* @param {boolean} active State to set _active to
|
|
||||||
* @returns {Extension} This extension
|
|
||||||
*/
|
|
||||||
setActive(active: boolean) {
|
|
||||||
this._active = active
|
|
||||||
this.emitUpdate()
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
import { useExtensions } from './index'
|
|
||||||
|
|
||||||
test('testUseExtensionsMissingPath', () => {
|
|
||||||
expect(() => useExtensions(undefined as any)).toThrow(
|
|
||||||
'A path to the extensions folder is required to use extensions'
|
|
||||||
)
|
|
||||||
})
|
|
||||||
@ -1,136 +0,0 @@
|
|||||||
import { readFileSync } from 'fs'
|
|
||||||
|
|
||||||
import { normalize } from 'path'
|
|
||||||
|
|
||||||
import Extension from './extension'
|
|
||||||
import {
|
|
||||||
getAllExtensions,
|
|
||||||
removeExtension,
|
|
||||||
persistExtensions,
|
|
||||||
installExtensions,
|
|
||||||
getExtension,
|
|
||||||
getActiveExtensions,
|
|
||||||
addExtension,
|
|
||||||
} from './store'
|
|
||||||
import { ExtensionManager } from './manager'
|
|
||||||
|
|
||||||
export function init(options: any) {
|
|
||||||
// Create extensions protocol to serve extensions to renderer
|
|
||||||
registerExtensionProtocol()
|
|
||||||
|
|
||||||
// perform full setup if extensionsPath is provided
|
|
||||||
if (options.extensionsPath) {
|
|
||||||
return useExtensions(options.extensionsPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
return {}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create extensions protocol to provide extensions to renderer
|
|
||||||
* @private
|
|
||||||
* @returns {boolean} Whether the protocol registration was successful
|
|
||||||
*/
|
|
||||||
async function registerExtensionProtocol() {
|
|
||||||
let electron: any = undefined
|
|
||||||
|
|
||||||
try {
|
|
||||||
const moduleName = 'electron'
|
|
||||||
electron = await import(moduleName)
|
|
||||||
} catch (err) {
|
|
||||||
console.error('Electron is not available')
|
|
||||||
}
|
|
||||||
const extensionPath = ExtensionManager.instance.getExtensionsPath()
|
|
||||||
if (electron && electron.protocol) {
|
|
||||||
return electron.protocol?.registerFileProtocol('extension', (request: any, callback: any) => {
|
|
||||||
const entry = request.url.substr('extension://'.length - 1)
|
|
||||||
|
|
||||||
const url = normalize(extensionPath + entry)
|
|
||||||
callback({ path: url })
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set extensions up to run from the extensionPath folder if it is provided and
|
|
||||||
* load extensions persisted in that folder.
|
|
||||||
* @param {string} extensionsPath Path to the extensions folder. Required if not yet set up.
|
|
||||||
* @returns {extensionManager} A set of functions used to manage the extension lifecycle.
|
|
||||||
*/
|
|
||||||
export function useExtensions(extensionsPath: string) {
|
|
||||||
if (!extensionsPath) throw Error('A path to the extensions folder is required to use extensions')
|
|
||||||
// Store the path to the extensions folder
|
|
||||||
ExtensionManager.instance.setExtensionsPath(extensionsPath)
|
|
||||||
|
|
||||||
// Remove any registered extensions
|
|
||||||
for (const extension of getAllExtensions()) {
|
|
||||||
if (extension.name) removeExtension(extension.name, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read extension list from extensions folder
|
|
||||||
const extensions = JSON.parse(
|
|
||||||
readFileSync(ExtensionManager.instance.getExtensionsFile(), 'utf-8')
|
|
||||||
)
|
|
||||||
try {
|
|
||||||
// Create and store a Extension instance for each extension in list
|
|
||||||
for (const p in extensions) {
|
|
||||||
loadExtension(extensions[p])
|
|
||||||
}
|
|
||||||
persistExtensions()
|
|
||||||
} catch (error) {
|
|
||||||
// Throw meaningful error if extension loading fails
|
|
||||||
throw new Error(
|
|
||||||
'Could not successfully rebuild list of installed extensions.\n' +
|
|
||||||
error +
|
|
||||||
'\nPlease check the extensions.json file in the extensions folder.'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return the extension lifecycle functions
|
|
||||||
return getStore()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check the given extension object. If it is marked for uninstalling, the extension files are removed.
|
|
||||||
* Otherwise a Extension instance for the provided object is created and added to the store.
|
|
||||||
* @private
|
|
||||||
* @param {Object} ext Extension info
|
|
||||||
*/
|
|
||||||
function loadExtension(ext: any) {
|
|
||||||
// Create new extension, populate it with ext details and save it to the store
|
|
||||||
const extension = new Extension()
|
|
||||||
|
|
||||||
for (const key in ext) {
|
|
||||||
if (Object.prototype.hasOwnProperty.call(ext, key)) {
|
|
||||||
// Use Object.defineProperty to set the properties as writable
|
|
||||||
Object.defineProperty(extension, key, {
|
|
||||||
value: ext[key],
|
|
||||||
writable: true,
|
|
||||||
enumerable: true,
|
|
||||||
configurable: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
addExtension(extension, false)
|
|
||||||
extension.subscribe('pe-persist', persistExtensions)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the publicly available store functions.
|
|
||||||
* @returns {extensionManager} A set of functions used to manage the extension lifecycle.
|
|
||||||
*/
|
|
||||||
export function getStore() {
|
|
||||||
if (!ExtensionManager.instance.getExtensionsFile()) {
|
|
||||||
throw new Error(
|
|
||||||
'The extension path has not yet been set up. Please run useExtensions before accessing the store'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
installExtensions,
|
|
||||||
getExtension,
|
|
||||||
getAllExtensions,
|
|
||||||
getActiveExtensions,
|
|
||||||
removeExtension,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,28 +0,0 @@
|
|||||||
import * as fs from 'fs';
|
|
||||||
import { join } from 'path';
|
|
||||||
import { ExtensionManager } from './manager';
|
|
||||||
|
|
||||||
it('should throw an error when an invalid path is provided', () => {
|
|
||||||
const manager = new ExtensionManager();
|
|
||||||
jest.spyOn(fs, 'existsSync').mockReturnValue(false);
|
|
||||||
expect(() => manager.setExtensionsPath('')).toThrow('Invalid path provided to the extensions folder');
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should return an empty string when extensionsPath is not set', () => {
|
|
||||||
const manager = new ExtensionManager();
|
|
||||||
expect(manager.getExtensionsFile()).toBe(join('', 'extensions.json'));
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should return undefined if no path is set', () => {
|
|
||||||
const manager = new ExtensionManager();
|
|
||||||
expect(manager.getExtensionsPath()).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should return the singleton instance', () => {
|
|
||||||
const instance1 = new ExtensionManager();
|
|
||||||
const instance2 = new ExtensionManager();
|
|
||||||
expect(instance1).toBe(instance2);
|
|
||||||
});
|
|
||||||
@ -1,45 +0,0 @@
|
|||||||
import { join, resolve } from 'path'
|
|
||||||
|
|
||||||
import { existsSync, mkdirSync, writeFileSync } from 'fs'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Manages extension installation and migration.
|
|
||||||
*/
|
|
||||||
|
|
||||||
export class ExtensionManager {
|
|
||||||
public static instance: ExtensionManager = new ExtensionManager()
|
|
||||||
|
|
||||||
private extensionsPath: string | undefined
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
if (ExtensionManager.instance) {
|
|
||||||
return ExtensionManager.instance
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
getExtensionsPath(): string | undefined {
|
|
||||||
return this.extensionsPath
|
|
||||||
}
|
|
||||||
|
|
||||||
setExtensionsPath(extPath: string) {
|
|
||||||
// Create folder if it does not exist
|
|
||||||
let extDir
|
|
||||||
try {
|
|
||||||
extDir = resolve(extPath)
|
|
||||||
if (extDir.length < 2) throw new Error()
|
|
||||||
|
|
||||||
if (!existsSync(extDir)) mkdirSync(extDir)
|
|
||||||
|
|
||||||
const extensionsJson = join(extDir, 'extensions.json')
|
|
||||||
if (!existsSync(extensionsJson)) writeFileSync(extensionsJson, '{}')
|
|
||||||
|
|
||||||
this.extensionsPath = extDir
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error('Invalid path provided to the extensions folder')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
getExtensionsFile() {
|
|
||||||
return join(this.extensionsPath ?? '', 'extensions.json')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,43 +0,0 @@
|
|||||||
import { getAllExtensions } from './store';
|
|
||||||
import { getActiveExtensions } from './store';
|
|
||||||
import { getExtension } from './store';
|
|
||||||
|
|
||||||
test('should return empty array when no extensions added', () => {
|
|
||||||
expect(getAllExtensions()).toEqual([]);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
test('should throw error when extension does not exist', () => {
|
|
||||||
expect(() => getExtension('nonExistentExtension')).toThrow('Extension nonExistentExtension does not exist');
|
|
||||||
});
|
|
||||||
|
|
||||||
import { addExtension } from './store';
|
|
||||||
import Extension from './extension';
|
|
||||||
|
|
||||||
test('should return all extensions when multiple extensions added', () => {
|
|
||||||
const ext1 = new Extension('ext1');
|
|
||||||
ext1.name = 'ext1';
|
|
||||||
const ext2 = new Extension('ext2');
|
|
||||||
ext2.name = 'ext2';
|
|
||||||
|
|
||||||
addExtension(ext1, false);
|
|
||||||
addExtension(ext2, false);
|
|
||||||
|
|
||||||
expect(getAllExtensions()).toEqual([ext1, ext2]);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
test('should return only active extensions', () => {
|
|
||||||
const ext1 = new Extension('ext1');
|
|
||||||
ext1.name = 'ext1';
|
|
||||||
ext1.setActive(true);
|
|
||||||
const ext2 = new Extension('ext2');
|
|
||||||
ext2.name = 'ext2';
|
|
||||||
ext2.setActive(false);
|
|
||||||
|
|
||||||
addExtension(ext1, false);
|
|
||||||
addExtension(ext2, false);
|
|
||||||
|
|
||||||
expect(getActiveExtensions()).toEqual([ext1]);
|
|
||||||
});
|
|
||||||
@ -1,125 +0,0 @@
|
|||||||
import { writeFileSync } from 'fs'
|
|
||||||
import Extension from './extension'
|
|
||||||
import { ExtensionManager } from './manager'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @module store
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Register of installed extensions
|
|
||||||
* @type {Object.<string, Extension>} extension - List of installed extensions
|
|
||||||
*/
|
|
||||||
const extensions: Record<string, Extension> = {}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a extension from the stored extensions.
|
|
||||||
* @param {string} name Name of the extension to retrieve
|
|
||||||
* @returns {Extension} Retrieved extension
|
|
||||||
* @alias extensionManager.getExtension
|
|
||||||
*/
|
|
||||||
export function getExtension(name: string) {
|
|
||||||
if (!Object.prototype.hasOwnProperty.call(extensions, name)) {
|
|
||||||
throw new Error(`Extension ${name} does not exist`)
|
|
||||||
}
|
|
||||||
|
|
||||||
return extensions[name]
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get list of all extension objects.
|
|
||||||
* @returns {Array.<Extension>} All extension objects
|
|
||||||
* @alias extensionManager.getAllExtensions
|
|
||||||
*/
|
|
||||||
export function getAllExtensions() {
|
|
||||||
return Object.values(extensions)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get list of active extension objects.
|
|
||||||
* @returns {Array.<Extension>} Active extension objects
|
|
||||||
* @alias extensionManager.getActiveExtensions
|
|
||||||
*/
|
|
||||||
export function getActiveExtensions() {
|
|
||||||
return Object.values(extensions).filter((extension) => extension.active)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove extension from store and maybe save stored extensions to file
|
|
||||||
* @param {string} name Name of the extension to remove
|
|
||||||
* @param {boolean} persist Whether to save the changes to extensions to file
|
|
||||||
* @returns {boolean} Whether the delete was successful
|
|
||||||
* @alias extensionManager.removeExtension
|
|
||||||
*/
|
|
||||||
export function removeExtension(name: string, persist = true) {
|
|
||||||
const del = delete extensions[name]
|
|
||||||
if (persist) persistExtensions()
|
|
||||||
return del
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add extension to store and maybe save stored extensions to file
|
|
||||||
* @param {Extension} extension Extension to add to store
|
|
||||||
* @param {boolean} persist Whether to save the changes to extensions to file
|
|
||||||
* @returns {void}
|
|
||||||
*/
|
|
||||||
export function addExtension(extension: Extension, persist = true) {
|
|
||||||
if (extension.name) extensions[extension.name] = extension
|
|
||||||
if (persist) {
|
|
||||||
persistExtensions()
|
|
||||||
extension.subscribe('pe-persist', persistExtensions)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Save stored extensions to file
|
|
||||||
* @returns {void}
|
|
||||||
*/
|
|
||||||
export function persistExtensions() {
|
|
||||||
const persistData: Record<string, Extension> = {}
|
|
||||||
for (const name in extensions) {
|
|
||||||
persistData[name] = extensions[name]
|
|
||||||
}
|
|
||||||
writeFileSync(ExtensionManager.instance.getExtensionsFile(), JSON.stringify(persistData))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create and install a new extension for the given specifier.
|
|
||||||
* @param {Array.<installOptions | string>} extensions A list of NPM specifiers, or installation configuration objects.
|
|
||||||
* @param {boolean} [store=true] Whether to store the installed extensions in the store
|
|
||||||
* @returns {Promise.<Array.<Extension>>} New extension
|
|
||||||
* @alias extensionManager.installExtensions
|
|
||||||
*/
|
|
||||||
export async function installExtensions(extensions: any) {
|
|
||||||
const installed: Extension[] = []
|
|
||||||
const installations = extensions.map((ext: any): Promise<void> => {
|
|
||||||
const isObject = typeof ext === 'object'
|
|
||||||
const spec = isObject ? [ext.specifier, ext] : [ext]
|
|
||||||
const activate = isObject ? ext.activate !== false : true
|
|
||||||
|
|
||||||
// Install and possibly activate extension
|
|
||||||
const extension = new Extension(...spec)
|
|
||||||
if (!extension.origin) {
|
|
||||||
return Promise.resolve()
|
|
||||||
}
|
|
||||||
return extension._install().then(() => {
|
|
||||||
if (activate) extension.setActive(true)
|
|
||||||
// Add extension to store if needed
|
|
||||||
addExtension(extension)
|
|
||||||
installed.push(extension)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
await Promise.all(installations)
|
|
||||||
|
|
||||||
// Return list of all installed extensions
|
|
||||||
return installed
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {Object.<string, any>} installOptions The {@link https://www.npmjs.com/package/pacote|pacote}
|
|
||||||
* options used to install the extension with some extra options.
|
|
||||||
* @param {string} specifier the NPM specifier that identifies the package.
|
|
||||||
* @param {boolean} [activate] Whether this extension should be activated after installation. Defaults to true.
|
|
||||||
*/
|
|
||||||
@ -1,19 +0,0 @@
|
|||||||
import { getAppConfigurations, defaultAppConfig } from './config'
|
|
||||||
|
|
||||||
import { getJanExtensionsPath, getJanDataFolderPath } from './config'
|
|
||||||
|
|
||||||
it('should return default config when CI is e2e', () => {
|
|
||||||
process.env.CI = 'e2e'
|
|
||||||
const config = getAppConfigurations()
|
|
||||||
expect(config).toEqual(defaultAppConfig())
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should return extensions path when retrieved successfully', () => {
|
|
||||||
const extensionsPath = getJanExtensionsPath()
|
|
||||||
expect(extensionsPath).not.toBeUndefined()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should return data folder path when retrieved successfully', () => {
|
|
||||||
const dataFolderPath = getJanDataFolderPath()
|
|
||||||
expect(dataFolderPath).not.toBeUndefined()
|
|
||||||
})
|
|
||||||
@ -1,91 +0,0 @@
|
|||||||
import { AppConfiguration } from '../../types'
|
|
||||||
import { join, resolve } from 'path'
|
|
||||||
import fs from 'fs'
|
|
||||||
import os from 'os'
|
|
||||||
const configurationFileName = 'settings.json'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Getting App Configurations.
|
|
||||||
*
|
|
||||||
* @returns {AppConfiguration} The app configurations.
|
|
||||||
*/
|
|
||||||
export const getAppConfigurations = (): AppConfiguration => {
|
|
||||||
const appDefaultConfiguration = defaultAppConfig()
|
|
||||||
if (process.env.CI === 'e2e') return appDefaultConfiguration
|
|
||||||
// Retrieve Application Support folder path
|
|
||||||
// Fallback to user home directory if not found
|
|
||||||
const configurationFile = getConfigurationFilePath()
|
|
||||||
|
|
||||||
if (!fs.existsSync(configurationFile)) {
|
|
||||||
// create default app config if we don't have one
|
|
||||||
console.debug(`App config not found, creating default config at ${configurationFile}`)
|
|
||||||
fs.writeFileSync(configurationFile, JSON.stringify(appDefaultConfiguration))
|
|
||||||
return appDefaultConfiguration
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const appConfigurations: AppConfiguration = JSON.parse(
|
|
||||||
fs.readFileSync(configurationFile, 'utf-8')
|
|
||||||
)
|
|
||||||
return appConfigurations
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Failed to read app config, return default config instead! Err: ${err}`)
|
|
||||||
return defaultAppConfig()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const getConfigurationFilePath = () =>
|
|
||||||
join(
|
|
||||||
global.core?.appPath() || process.env[process.platform == 'win32' ? 'USERPROFILE' : 'HOME'],
|
|
||||||
configurationFileName
|
|
||||||
)
|
|
||||||
|
|
||||||
export const updateAppConfiguration = ({
|
|
||||||
configuration,
|
|
||||||
}: {
|
|
||||||
configuration: AppConfiguration
|
|
||||||
}): Promise<void> => {
|
|
||||||
const configurationFile = getConfigurationFilePath()
|
|
||||||
|
|
||||||
fs.writeFileSync(configurationFile, JSON.stringify(configuration))
|
|
||||||
return Promise.resolve()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Utility function to get data folder path
|
|
||||||
*
|
|
||||||
* @returns {string} The data folder path.
|
|
||||||
*/
|
|
||||||
export const getJanDataFolderPath = (): string => {
|
|
||||||
const appConfigurations = getAppConfigurations()
|
|
||||||
return appConfigurations.data_folder
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Utility function to get extension path
|
|
||||||
*
|
|
||||||
* @returns {string} The extensions path.
|
|
||||||
*/
|
|
||||||
export const getJanExtensionsPath = (): string => {
|
|
||||||
const appConfigurations = getAppConfigurations()
|
|
||||||
return join(appConfigurations.data_folder, 'extensions')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Default app configurations
|
|
||||||
* App Data Folder default to Electron's userData
|
|
||||||
* %APPDATA% on Windows
|
|
||||||
* $XDG_CONFIG_HOME or ~/.config on Linux
|
|
||||||
* ~/Library/Application Support on macOS
|
|
||||||
*/
|
|
||||||
export const defaultAppConfig = (): AppConfiguration => {
|
|
||||||
const { app } = require('electron')
|
|
||||||
const defaultJanDataFolder = join(app?.getPath('userData') ?? os?.homedir() ?? '', 'data')
|
|
||||||
return {
|
|
||||||
data_folder:
|
|
||||||
process.env.CI === 'e2e'
|
|
||||||
? process.env.APP_CONFIG_PATH ?? resolve('./test-data')
|
|
||||||
: defaultJanDataFolder,
|
|
||||||
quick_ask: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,5 +0,0 @@
|
|||||||
export * from './config'
|
|
||||||
export * from './logger'
|
|
||||||
export * from './module'
|
|
||||||
export * from './path'
|
|
||||||
export * from './resource'
|
|
||||||
@ -1,47 +0,0 @@
|
|||||||
import { Logger, LoggerManager } from './logger';
|
|
||||||
|
|
||||||
it('should flush queued logs to registered loggers', () => {
|
|
||||||
class TestLogger extends Logger {
|
|
||||||
name = 'testLogger';
|
|
||||||
log(args: any): void {
|
|
||||||
console.log(args);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const loggerManager = new LoggerManager();
|
|
||||||
const testLogger = new TestLogger();
|
|
||||||
loggerManager.register(testLogger);
|
|
||||||
const logSpy = jest.spyOn(testLogger, 'log');
|
|
||||||
loggerManager.log('test log');
|
|
||||||
expect(logSpy).toHaveBeenCalledWith('test log');
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should unregister a logger', () => {
|
|
||||||
class TestLogger extends Logger {
|
|
||||||
name = 'testLogger';
|
|
||||||
log(args: any): void {
|
|
||||||
console.log(args);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const loggerManager = new LoggerManager();
|
|
||||||
const testLogger = new TestLogger();
|
|
||||||
loggerManager.register(testLogger);
|
|
||||||
loggerManager.unregister('testLogger');
|
|
||||||
const retrievedLogger = loggerManager.get('testLogger');
|
|
||||||
expect(retrievedLogger).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should register and retrieve a logger', () => {
|
|
||||||
class TestLogger extends Logger {
|
|
||||||
name = 'testLogger';
|
|
||||||
log(args: any): void {
|
|
||||||
console.log(args);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const loggerManager = new LoggerManager();
|
|
||||||
const testLogger = new TestLogger();
|
|
||||||
loggerManager.register(testLogger);
|
|
||||||
const retrievedLogger = loggerManager.get('testLogger');
|
|
||||||
expect(retrievedLogger).toBe(testLogger);
|
|
||||||
});
|
|
||||||
@ -1,81 +0,0 @@
|
|||||||
// Abstract Logger class that all loggers should extend.
|
|
||||||
export abstract class Logger {
|
|
||||||
// Each logger must have a unique name.
|
|
||||||
abstract name: string
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Log message to log file.
|
|
||||||
* This method should be overridden by subclasses to provide specific logging behavior.
|
|
||||||
*/
|
|
||||||
abstract log(args: any): void
|
|
||||||
}
|
|
||||||
|
|
||||||
// LoggerManager is a singleton class that manages all registered loggers.
|
|
||||||
export class LoggerManager {
|
|
||||||
// Map of registered loggers, keyed by their names.
|
|
||||||
public loggers = new Map<string, Logger>()
|
|
||||||
|
|
||||||
// Array to store logs that are queued before the loggers are registered.
|
|
||||||
queuedLogs: any[] = []
|
|
||||||
|
|
||||||
// Flag to indicate whether flushLogs is currently running.
|
|
||||||
private isFlushing = false
|
|
||||||
|
|
||||||
// Register a new logger. If a logger with the same name already exists, it will be replaced.
|
|
||||||
register(logger: Logger) {
|
|
||||||
this.loggers.set(logger.name, logger)
|
|
||||||
}
|
|
||||||
// Unregister a logger by its name.
|
|
||||||
unregister(name: string) {
|
|
||||||
this.loggers.delete(name)
|
|
||||||
}
|
|
||||||
|
|
||||||
get(name: string) {
|
|
||||||
return this.loggers.get(name)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Flush queued logs to all registered loggers.
|
|
||||||
flushLogs() {
|
|
||||||
// If flushLogs is already running, do nothing.
|
|
||||||
if (this.isFlushing) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
this.isFlushing = true
|
|
||||||
|
|
||||||
while (this.queuedLogs.length > 0 && this.loggers.size > 0) {
|
|
||||||
const log = this.queuedLogs.shift()
|
|
||||||
this.loggers.forEach((logger) => {
|
|
||||||
logger.log(log)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
this.isFlushing = false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Log message using all registered loggers.
|
|
||||||
log(args: any) {
|
|
||||||
this.queuedLogs.push(args)
|
|
||||||
|
|
||||||
this.flushLogs()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The instance of the logger.
|
|
||||||
* If an instance doesn't exist, it creates a new one.
|
|
||||||
* This ensures that there is only one LoggerManager instance at any time.
|
|
||||||
*/
|
|
||||||
static instance(): LoggerManager {
|
|
||||||
let instance: LoggerManager | undefined = global.core?.logger
|
|
||||||
if (!instance) {
|
|
||||||
instance = new LoggerManager()
|
|
||||||
if (!global.core) global.core = {}
|
|
||||||
global.core.logger = instance
|
|
||||||
}
|
|
||||||
return instance
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const log = (...args: any) => {
|
|
||||||
LoggerManager.instance().log(args)
|
|
||||||
}
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
import { ModuleManager } from './module';
|
|
||||||
|
|
||||||
it('should clear all imported modules', () => {
|
|
||||||
const moduleManager = new ModuleManager();
|
|
||||||
moduleManager.setModule('module1', { key: 'value1' });
|
|
||||||
moduleManager.setModule('module2', { key: 'value2' });
|
|
||||||
moduleManager.clearImportedModules();
|
|
||||||
expect(moduleManager.requiredModules).toEqual({});
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should set a module correctly', () => {
|
|
||||||
const moduleManager = new ModuleManager();
|
|
||||||
moduleManager.setModule('testModule', { key: 'value' });
|
|
||||||
expect(moduleManager.requiredModules['testModule']).toEqual({ key: 'value' });
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
it('should return the singleton instance', () => {
|
|
||||||
const instance1 = new ModuleManager();
|
|
||||||
const instance2 = new ModuleManager();
|
|
||||||
expect(instance1).toBe(instance2);
|
|
||||||
});
|
|
||||||
@ -1,31 +0,0 @@
|
|||||||
/**
|
|
||||||
* Manages imported modules.
|
|
||||||
*/
|
|
||||||
export class ModuleManager {
|
|
||||||
public requiredModules: Record<string, any> = {}
|
|
||||||
public cleaningResource = false
|
|
||||||
|
|
||||||
public static instance: ModuleManager = new ModuleManager()
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
if (ModuleManager.instance) {
|
|
||||||
return ModuleManager.instance
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sets a module.
|
|
||||||
* @param {string} moduleName - The name of the module.
|
|
||||||
* @param {any | undefined} nodule - The module to set, or undefined to clear the module.
|
|
||||||
*/
|
|
||||||
setModule(moduleName: string, nodule: any | undefined) {
|
|
||||||
this.requiredModules[moduleName] = nodule
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clears all imported modules.
|
|
||||||
*/
|
|
||||||
clearImportedModules() {
|
|
||||||
this.requiredModules = {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
import { normalizeFilePath } from './path'
|
|
||||||
|
|
||||||
import { jest } from '@jest/globals'
|
|
||||||
describe('Test file normalize', () => {
|
|
||||||
test('returns no file protocol prefix on Unix', async () => {
|
|
||||||
expect(normalizeFilePath('file://test.txt')).toBe('test.txt')
|
|
||||||
expect(normalizeFilePath('file:/test.txt')).toBe('test.txt')
|
|
||||||
})
|
|
||||||
test('returns no file protocol prefix on Windows', async () => {
|
|
||||||
expect(normalizeFilePath('file:\\\\test.txt')).toBe('test.txt')
|
|
||||||
expect(normalizeFilePath('file:\\test.txt')).toBe('test.txt')
|
|
||||||
})
|
|
||||||
|
|
||||||
test('returns correct path when Electron is available and app is not packaged', () => {
|
|
||||||
const electronMock = {
|
|
||||||
app: {
|
|
||||||
getAppPath: jest.fn().mockReturnValue('/mocked/path'),
|
|
||||||
isPackaged: false,
|
|
||||||
},
|
|
||||||
protocol: {},
|
|
||||||
}
|
|
||||||
jest.mock('electron', () => electronMock)
|
|
||||||
|
|
||||||
const { appResourcePath } = require('./path')
|
|
||||||
|
|
||||||
const expectedPath = process.platform === 'win32' ? '\\mocked\\path' : '/mocked/path'
|
|
||||||
expect(appResourcePath()).toBe(expectedPath)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@ -1,37 +0,0 @@
|
|||||||
import { join } from 'path'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Normalize file path
|
|
||||||
* Remove all file protocol prefix
|
|
||||||
* @param path
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
export function normalizeFilePath(path: string): string {
|
|
||||||
return path.replace(/^(file:[\\/]+)([^:\s]+)$/, '$2')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* App resources path
|
|
||||||
* Returns string - The current application directory.
|
|
||||||
*/
|
|
||||||
export function appResourcePath() {
|
|
||||||
try {
|
|
||||||
const electron = require('electron')
|
|
||||||
// electron
|
|
||||||
if (electron && electron.protocol) {
|
|
||||||
let appPath = join(electron.app.getAppPath(), '..', 'app.asar.unpacked')
|
|
||||||
|
|
||||||
if (!electron.app.isPackaged) {
|
|
||||||
// for development mode
|
|
||||||
appPath = join(electron.app.getAppPath())
|
|
||||||
}
|
|
||||||
return appPath
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error('Electron is not available')
|
|
||||||
}
|
|
||||||
|
|
||||||
// server
|
|
||||||
return join(global.core.appPath(), '../../..')
|
|
||||||
}
|
|
||||||
|
|
||||||
@ -1,9 +0,0 @@
|
|||||||
import { getSystemResourceInfo } from './resource'
|
|
||||||
|
|
||||||
it('should return the correct system resource information with a valid CPU count', async () => {
|
|
||||||
const result = await getSystemResourceInfo()
|
|
||||||
|
|
||||||
expect(result).toEqual({
|
|
||||||
memAvailable: 0,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
import { SystemResourceInfo } from '../../types'
|
|
||||||
|
|
||||||
export const getSystemResourceInfo = async (): Promise<SystemResourceInfo> => {
|
|
||||||
return {
|
|
||||||
memAvailable: 0, // TODO: this should not be 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,8 +0,0 @@
|
|||||||
export * from './extension/index'
|
|
||||||
export * from './extension/extension'
|
|
||||||
export * from './extension/manager'
|
|
||||||
export * from './extension/store'
|
|
||||||
export * from './api'
|
|
||||||
export * from './helper'
|
|
||||||
export * from './../types'
|
|
||||||
export * from '../types/api'
|
|
||||||
19
core/src/test/setup.ts
Normal file
19
core/src/test/setup.ts
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
import { vi } from 'vitest'
|
||||||
|
|
||||||
|
// Ensure window exists in test environment
|
||||||
|
if (typeof window === 'undefined') {
|
||||||
|
global.window = {} as any
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mock window.core for browser tests
|
||||||
|
if (!window.core) {
|
||||||
|
Object.defineProperty(window, 'core', {
|
||||||
|
value: {
|
||||||
|
engineManager: undefined
|
||||||
|
},
|
||||||
|
writable: true,
|
||||||
|
configurable: true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add any other global mocks needed for core tests
|
||||||
@ -1,5 +1,6 @@
|
|||||||
|
|
||||||
|
|
||||||
|
import { test, expect } from 'vitest'
|
||||||
import { NativeRoute } from '../index';
|
import { NativeRoute } from '../index';
|
||||||
|
|
||||||
test('testNativeRouteEnum', () => {
|
test('testNativeRouteEnum', () => {
|
||||||
|
|||||||
@ -49,7 +49,6 @@ export enum AppRoute {
|
|||||||
isSubdirectory = 'isSubdirectory',
|
isSubdirectory = 'isSubdirectory',
|
||||||
baseName = 'baseName',
|
baseName = 'baseName',
|
||||||
log = 'log',
|
log = 'log',
|
||||||
systemInformation = 'systemInformation',
|
|
||||||
showToast = 'showToast',
|
showToast = 'showToast',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,6 @@
|
|||||||
|
import { it, expect } from 'vitest'
|
||||||
import { AssistantEvent } from './assistantEvent';
|
import { AssistantEvent } from './assistantEvent';
|
||||||
|
|
||||||
it('dummy test', () => { expect(true).toBe(true); });
|
it('dummy test', () => { expect(true).toBe(true); });
|
||||||
|
|
||||||
it('should contain OnAssistantsUpdate event', () => {
|
it('should contain OnAssistantsUpdate event', () => {
|
||||||
|
|||||||
@ -1,8 +1,9 @@
|
|||||||
|
|
||||||
|
|
||||||
import { AppConfigurationEventName } from './appConfigEvent';
|
import { describe, it, expect } from 'vitest'
|
||||||
|
import { AppConfigurationEventName } from './appConfigEvent';
|
||||||
|
|
||||||
describe('AppConfigurationEventName', () => {
|
describe('AppConfigurationEventName', () => {
|
||||||
it('should have the correct value for OnConfigurationUpdate', () => {
|
it('should have the correct value for OnConfigurationUpdate', () => {
|
||||||
expect(AppConfigurationEventName.OnConfigurationUpdate).toBe('OnConfigurationUpdate');
|
expect(AppConfigurationEventName.OnConfigurationUpdate).toBe('OnConfigurationUpdate');
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,28 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
import { AllQuantizations } from './huggingfaceEntity';
|
|
||||||
|
|
||||||
test('testAllQuantizationsArray', () => {
|
|
||||||
expect(AllQuantizations).toEqual([
|
|
||||||
'Q3_K_S',
|
|
||||||
'Q3_K_M',
|
|
||||||
'Q3_K_L',
|
|
||||||
'Q4_K_S',
|
|
||||||
'Q4_K_M',
|
|
||||||
'Q5_K_S',
|
|
||||||
'Q5_K_M',
|
|
||||||
'Q4_0',
|
|
||||||
'Q4_1',
|
|
||||||
'Q5_0',
|
|
||||||
'Q5_1',
|
|
||||||
'IQ2_XXS',
|
|
||||||
'IQ2_XS',
|
|
||||||
'Q2_K',
|
|
||||||
'Q2_K_S',
|
|
||||||
'Q6_K',
|
|
||||||
'Q8_0',
|
|
||||||
'F16',
|
|
||||||
'F32',
|
|
||||||
'COPY',
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
@ -1,65 +0,0 @@
|
|||||||
export interface HuggingFaceRepoData {
|
|
||||||
id: string
|
|
||||||
modelId: string
|
|
||||||
modelUrl?: string
|
|
||||||
author: string
|
|
||||||
sha: string
|
|
||||||
downloads: number
|
|
||||||
lastModified: string
|
|
||||||
private: boolean
|
|
||||||
disabled: boolean
|
|
||||||
gated: boolean
|
|
||||||
pipeline_tag: 'text-generation'
|
|
||||||
tags: Array<'transformers' | 'pytorch' | 'safetensors' | string>
|
|
||||||
cardData: Record<CardDataKeys | string, unknown>
|
|
||||||
siblings: {
|
|
||||||
rfilename: string
|
|
||||||
downloadUrl?: string
|
|
||||||
fileSize?: number
|
|
||||||
quantization?: Quantization
|
|
||||||
}[]
|
|
||||||
createdAt: string
|
|
||||||
}
|
|
||||||
|
|
||||||
const CardDataKeys = [
|
|
||||||
'base_model',
|
|
||||||
'datasets',
|
|
||||||
'inference',
|
|
||||||
'language',
|
|
||||||
'library_name',
|
|
||||||
'license',
|
|
||||||
'model_creator',
|
|
||||||
'model_name',
|
|
||||||
'model_type',
|
|
||||||
'pipeline_tag',
|
|
||||||
'prompt_template',
|
|
||||||
'quantized_by',
|
|
||||||
'tags',
|
|
||||||
] as const
|
|
||||||
export type CardDataKeysTuple = typeof CardDataKeys
|
|
||||||
export type CardDataKeys = CardDataKeysTuple[number]
|
|
||||||
|
|
||||||
export const AllQuantizations = [
|
|
||||||
'Q3_K_S',
|
|
||||||
'Q3_K_M',
|
|
||||||
'Q3_K_L',
|
|
||||||
'Q4_K_S',
|
|
||||||
'Q4_K_M',
|
|
||||||
'Q5_K_S',
|
|
||||||
'Q5_K_M',
|
|
||||||
'Q4_0',
|
|
||||||
'Q4_1',
|
|
||||||
'Q5_0',
|
|
||||||
'Q5_1',
|
|
||||||
'IQ2_XXS',
|
|
||||||
'IQ2_XS',
|
|
||||||
'Q2_K',
|
|
||||||
'Q2_K_S',
|
|
||||||
'Q6_K',
|
|
||||||
'Q8_0',
|
|
||||||
'F16',
|
|
||||||
'F32',
|
|
||||||
'COPY',
|
|
||||||
]
|
|
||||||
export type QuantizationsTuple = typeof AllQuantizations
|
|
||||||
export type Quantization = QuantizationsTuple[number]
|
|
||||||
@ -1,8 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
import * as huggingfaceEntity from './huggingfaceEntity';
|
|
||||||
import * as index from './index';
|
|
||||||
|
|
||||||
test('test_exports_from_huggingfaceEntity', () => {
|
|
||||||
expect(index).toEqual(huggingfaceEntity);
|
|
||||||
});
|
|
||||||
@ -1 +0,0 @@
|
|||||||
export * from './huggingfaceEntity'
|
|
||||||
@ -1,4 +1,5 @@
|
|||||||
|
|
||||||
|
import { test, expect } from 'vitest'
|
||||||
import * as assistant from './assistant';
|
import * as assistant from './assistant';
|
||||||
import * as model from './model';
|
import * as model from './model';
|
||||||
import * as thread from './thread';
|
import * as thread from './thread';
|
||||||
@ -6,12 +7,11 @@ import * as message from './message';
|
|||||||
import * as inference from './inference';
|
import * as inference from './inference';
|
||||||
import * as file from './file';
|
import * as file from './file';
|
||||||
import * as config from './config';
|
import * as config from './config';
|
||||||
import * as huggingface from './huggingface';
|
|
||||||
import * as miscellaneous from './miscellaneous';
|
import * as miscellaneous from './miscellaneous';
|
||||||
import * as api from './api';
|
import * as api from './api';
|
||||||
import * as setting from './setting';
|
import * as setting from './setting';
|
||||||
|
|
||||||
test('test_module_exports', () => {
|
test('test_module_exports', () => {
|
||||||
expect(assistant).toBeDefined();
|
expect(assistant).toBeDefined();
|
||||||
expect(model).toBeDefined();
|
expect(model).toBeDefined();
|
||||||
expect(thread).toBeDefined();
|
expect(thread).toBeDefined();
|
||||||
@ -19,7 +19,6 @@ import * as setting from './setting';
|
|||||||
expect(inference).toBeDefined();
|
expect(inference).toBeDefined();
|
||||||
expect(file).toBeDefined();
|
expect(file).toBeDefined();
|
||||||
expect(config).toBeDefined();
|
expect(config).toBeDefined();
|
||||||
expect(huggingface).toBeDefined();
|
|
||||||
expect(miscellaneous).toBeDefined();
|
expect(miscellaneous).toBeDefined();
|
||||||
expect(api).toBeDefined();
|
expect(api).toBeDefined();
|
||||||
expect(setting).toBeDefined();
|
expect(setting).toBeDefined();
|
||||||
|
|||||||
@ -5,7 +5,6 @@ export * from './message'
|
|||||||
export * from './inference'
|
export * from './inference'
|
||||||
export * from './file'
|
export * from './file'
|
||||||
export * from './config'
|
export * from './config'
|
||||||
export * from './huggingface'
|
|
||||||
export * from './miscellaneous'
|
export * from './miscellaneous'
|
||||||
export * from './api'
|
export * from './api'
|
||||||
export * from './setting'
|
export * from './setting'
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user