Merge branch 'main' into feat_adr_002
6
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -28,11 +28,5 @@ If applicable, add screenshots to help explain your problem.
|
|||||||
- Browser [e.g. chrome, safari]
|
- Browser [e.g. chrome, safari]
|
||||||
- Version [e.g. 22]
|
- Version [e.g. 22]
|
||||||
|
|
||||||
**Smartphone (please complete the following information):**
|
|
||||||
- Device: [e.g. iPhone6]
|
|
||||||
- OS: [e.g. iOS8.1]
|
|
||||||
- Browser [e.g. stock browser, safari]
|
|
||||||
- Version [e.g. 22]
|
|
||||||
|
|
||||||
**Additional context**
|
**Additional context**
|
||||||
Add any other context about the problem here.
|
Add any other context about the problem here.
|
||||||
|
|||||||
4
.github/ISSUE_TEMPLATE/discussion-thread.md
vendored
@ -2,7 +2,7 @@
|
|||||||
name: Discussion thread
|
name: Discussion thread
|
||||||
about: Start an open ended discussion
|
about: Start an open ended discussion
|
||||||
title: 'Discussion: [TOPIC HERE]'
|
title: 'Discussion: [TOPIC HERE]'
|
||||||
labels: ''
|
labels: 'type: discussion'
|
||||||
assignees: ''
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -11,4 +11,6 @@ assignees: ''
|
|||||||
|
|
||||||
**Discussion**
|
**Discussion**
|
||||||
|
|
||||||
|
**Alternatives**
|
||||||
|
|
||||||
**Resources**
|
**Resources**
|
||||||
|
|||||||
26
.github/release-drafter.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
categories:
|
||||||
|
- title: '🚀 Features'
|
||||||
|
labels:
|
||||||
|
- 'type: enhancement'
|
||||||
|
- 'type: epic'
|
||||||
|
- 'type: feature request'
|
||||||
|
- title: '🐛 Bug Fixes'
|
||||||
|
labels:
|
||||||
|
- 'type: bug'
|
||||||
|
- title: '🧰 Maintenance'
|
||||||
|
labels:
|
||||||
|
- 'type: chore'
|
||||||
|
- 'type: ci'
|
||||||
|
- title: '📖 Documentaion'
|
||||||
|
labels:
|
||||||
|
- 'type: documentation'
|
||||||
|
change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
|
||||||
|
change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks.
|
||||||
|
template: |
|
||||||
|
## Changes
|
||||||
|
|
||||||
|
$CHANGES
|
||||||
|
|
||||||
|
## Contributor
|
||||||
|
|
||||||
|
$CONTRIBUTORS
|
||||||
26
.github/workflows/build-app.yml
vendored
@ -155,4 +155,28 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
yarn build:publish-linux
|
yarn build:publish-linux
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
update_release_draft:
|
||||||
|
needs: [build-macos, build-windows-x64, build-linux-x64]
|
||||||
|
permissions:
|
||||||
|
# write permission is required to create a github release
|
||||||
|
contents: write
|
||||||
|
# write permission is required for autolabeler
|
||||||
|
# otherwise, read permission is required at least
|
||||||
|
pull-requests: write
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
# (Optional) GitHub Enterprise requires GHE_HOST variable set
|
||||||
|
#- name: Set GHE_HOST
|
||||||
|
# run: |
|
||||||
|
# echo "GHE_HOST=${GITHUB_SERVER_URL##https:\/\/}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
# Drafts your next Release notes as Pull Requests are merged into "master"
|
||||||
|
- uses: release-drafter/release-drafter@v5
|
||||||
|
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
|
||||||
|
# with:
|
||||||
|
# config-name: my-config.yml
|
||||||
|
# disable-autolabeler: true
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
2
.github/workflows/deploy-jan-docs.yml
vendored
@ -26,7 +26,7 @@ jobs:
|
|||||||
run: yarn install
|
run: yarn install
|
||||||
working-directory: docs
|
working-directory: docs
|
||||||
- name: Build website
|
- name: Build website
|
||||||
run: yarn build
|
run: sed -i '/process.env.DEBUG = namespaces;/c\// process.env.DEBUG = namespaces;' ./node_modules/debug/src/node.js && yarn build
|
||||||
working-directory: docs
|
working-directory: docs
|
||||||
|
|
||||||
- name: Add Custome Domain file
|
- name: Add Custome Domain file
|
||||||
|
|||||||
29
.github/workflows/jan-docs-test.yml
vendored
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
name: Jan Docs Test Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- 'docs/**'
|
||||||
|
- '.github/workflows/deploy-jan-docs.yml'
|
||||||
|
- '.github/workflows/jan-docs-test.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
name: Test Build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 18
|
||||||
|
cache: 'yarn'
|
||||||
|
cache-dependency-path: './docs/yarn.lock'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: yarn install
|
||||||
|
working-directory: docs
|
||||||
|
- name: Test Build Command
|
||||||
|
run: sed -i '/process.env.DEBUG = namespaces;/c\// process.env.DEBUG = namespaces;' ./node_modules/debug/src/node.js && yarn build
|
||||||
|
working-directory: docs
|
||||||
21
.github/workflows/linter-and-test.yml
vendored
@ -26,6 +26,12 @@ jobs:
|
|||||||
test-on-macos:
|
test-on-macos:
|
||||||
runs-on: [self-hosted, macOS, macos-desktop]
|
runs-on: [self-hosted, macOS, macos-desktop]
|
||||||
steps:
|
steps:
|
||||||
|
- name: 'Cleanup build folder'
|
||||||
|
run: |
|
||||||
|
ls -la ./
|
||||||
|
rm -rf ./* || true
|
||||||
|
rm -rf ./.??* || true
|
||||||
|
ls -la ./
|
||||||
- name: Getting the repo
|
- name: Getting the repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
@ -48,6 +54,9 @@ jobs:
|
|||||||
test-on-windows:
|
test-on-windows:
|
||||||
runs-on: [self-hosted, Windows, windows-desktop]
|
runs-on: [self-hosted, Windows, windows-desktop]
|
||||||
steps:
|
steps:
|
||||||
|
- name: Clean workspace
|
||||||
|
run: |
|
||||||
|
Remove-Item -Path .\* -Force -Recurse
|
||||||
- name: Getting the repo
|
- name: Getting the repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
@ -68,6 +77,12 @@ jobs:
|
|||||||
test-on-ubuntu:
|
test-on-ubuntu:
|
||||||
runs-on: [self-hosted, Linux, ubuntu-desktop]
|
runs-on: [self-hosted, Linux, ubuntu-desktop]
|
||||||
steps:
|
steps:
|
||||||
|
- name: 'Cleanup build folder'
|
||||||
|
run: |
|
||||||
|
ls -la ./
|
||||||
|
rm -rf ./* || true
|
||||||
|
rm -rf ./.??* || true
|
||||||
|
ls -la ./
|
||||||
- name: Getting the repo
|
- name: Getting the repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
@ -78,11 +93,11 @@ jobs:
|
|||||||
|
|
||||||
- name: Linter and test
|
- name: Linter and test
|
||||||
run: |
|
run: |
|
||||||
|
export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
|
||||||
|
echo -e "Display ID: $DISPLAY"
|
||||||
yarn config set network-timeout 300000
|
yarn config set network-timeout 300000
|
||||||
yarn install
|
yarn install
|
||||||
yarn lint
|
yarn lint
|
||||||
yarn build:plugins
|
yarn build:plugins
|
||||||
yarn build:linux
|
yarn build:linux
|
||||||
yarn test
|
yarn test
|
||||||
env:
|
|
||||||
DISPLAY: ":0"
|
|
||||||
82
README.md
@ -20,64 +20,75 @@
|
|||||||
|
|
||||||
> ⚠️ **Jan is currently in Development**: Expect breaking changes and bugs!
|
> ⚠️ **Jan is currently in Development**: Expect breaking changes and bugs!
|
||||||
|
|
||||||
Jan lets you run AI on your own hardware, with helpful tools to manage models and monitor your hardware performance.
|
**Use offline LLMs with your own data.** Run open source models like Llama2 or Falcon on your internal computers/servers.
|
||||||
|
|
||||||
In the background, Jan runs [Nitro](https://nitro.jan.ai), a C++ inference engine. It runs various model formats (GGUF/TensorRT) on various hardware (Mac M1/M2/Intel, Windows, Linux, and datacenter-grade Nvidia GPUs) with optional GPU acceleration.
|
**Jan runs on any hardware.** From PCs to multi-GPU clusters, Jan supports universal architectures:
|
||||||
|
|
||||||
> See the Nitro codebase at https://nitro.jan.ai.
|
- [x] Nvidia GPUs (fast)
|
||||||
|
- [x] Apple M-series (fast)
|
||||||
|
- [x] Apple Intel
|
||||||
|
- [x] Linux Debian
|
||||||
|
- [x] Windows x64
|
||||||
|
|
||||||
<!-- TODO: uncomment this later when we have this feature -->
|
> Download Jan at https://jan.ai/
|
||||||
<!-- Jan can be run as a server or cloud-native application for enterprise. We offer enterprise plugins for LDAP integration and Audit Logs. Contact us at [hello@jan.ai](mailto:hello@jan.ai) for more details. -->
|
|
||||||
|
|
||||||
## Demo
|
## Demo
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img style='border:1px solid #000000' src="https://github.com/janhq/jan/assets/69952136/1f9bb48c-2e70-4633-9f68-7881cd925972" alt="Jan Web GIF">
|
<img style='border:1px solid #000000' src="https://github.com/janhq/jan/assets/69952136/1db9c3d3-79b1-4988-afb5-afd4f4afd0d9" alt="Jan Web GIF">
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
_Screenshot: Jan v0.1.3 on Mac M1 Pro, 16GB Sonoma_
|
||||||
|
|
||||||
## Quicklinks
|
## Quicklinks
|
||||||
|
|
||||||
- Developer documentation: https://jan.ai/docs (Work in Progress)
|
- [Developer docs](https://jan.ai/docs) (WIP)
|
||||||
- Desktop app: Download at https://jan.ai/
|
- Mobile App shell: [App Store](https://apps.apple.com/us/app/jan-on-device-ai-cloud-ais/id6449664703) | [Android](https://play.google.com/store/apps/details?id=com.jan.ai)
|
||||||
- Mobile app shell: Download via [App Store](https://apps.apple.com/us/app/jan-on-device-ai-cloud-ais/id6449664703) | [Android](https://play.google.com/store/apps/details?id=com.jan.ai)
|
- [Nitro Github](https://nitro.jan.ai): Jan's AI engine
|
||||||
- Nitro (C++ AI Engine): https://nitro.jan.ai
|
|
||||||
|
|
||||||
## Plugins
|
## Plugins
|
||||||
|
|
||||||
Jan supports core & 3rd party extensions:
|
Jan supports core & 3rd party extensions:
|
||||||
|
|
||||||
- [x] **LLM chat**: Self-hosted Llama2 and LLMs
|
- [x] **LLM chat**: Self-hosted Llama2 and LLMs
|
||||||
- [x] **Model Manager**: 1-click to install, swap, and delete models
|
- [x] **Model Manager**: 1-click to install, swap, and delete models with HuggingFace integration
|
||||||
- [x] **Storage**: Optionally store your conversation history and other data in SQLite/your storage of choice
|
- [x] **Storage**: Optionally save conversation history and other data in SQLite
|
||||||
- [ ] **3rd-party AIs**: Connect to ChatGPT, Claude via API Key (in progress)
|
- [ ] **3rd-party AIs**: Connect to ChatGPT, Claude via API Key (in progress)
|
||||||
- [ ] **Cross device support**: Mobile & Web support for custom shared servers (in progress)
|
- [ ] **Cross device support**: Mobile & Web support for custom shared servers (in progress)
|
||||||
- [ ] **File retrieval**: User can upload private and run a vectorDB (planned)
|
- [ ] **File retrieval**: User can chat with docs
|
||||||
- [ ] **Multi-user support**: Share a single server across a team/friends (planned)
|
- [ ] **Multi-user support**: Share a single server across a team/friends (planned)
|
||||||
- [ ] **Compliance**: Auditing and flagging features (planned)
|
- [ ] **Compliance**: Auditing and flagging features (planned)
|
||||||
|
|
||||||
## Hardware Support
|
## Nitro (Jan's AI engine)
|
||||||
|
|
||||||
Nitro provides both CPU and GPU support, via [llama.cpp](https://github.com/ggerganov/llama.cpp) and [TensorRT](https://github.com/NVIDIA/TensorRT), respectively.
|
In the background, Jan runs [Nitro](https://nitro.jan.ai), an open source, C++ inference engine. It runs various model formats (GGUF/TensorRT) on various hardware (Mac M1/M2/Intel, Windows, Linux, and datacenter-grade Nvidia GPUs) with optional GPU acceleration.
|
||||||
|
|
||||||
- [x] Nvidia GPUs (accelerated)
|
> See the open source Nitro codebase at https://nitro.jan.ai.
|
||||||
- [x] Apple M-series (accelerated)
|
|
||||||
- [x] Linux DEB
|
|
||||||
- [x] Windows x64
|
|
||||||
|
|
||||||
Not supported yet: Apple Intel, Linux RPM, Windows x86|ARM64, AMD ROCm
|
## Troubleshooting
|
||||||
|
As Jan is development mode, you might get stuck on a broken build.
|
||||||
|
|
||||||
> See [developer docs](https://docs.jan.ai/docs/) for detailed installation instructions.
|
To reset your installation:
|
||||||
|
|
||||||
|
1. Delete Jan Application from /Applications
|
||||||
|
|
||||||
|
1. Clear cache:
|
||||||
|
`rm -rf /Users/$(whoami)/Library/Application\ Support/jan`
|
||||||
|
OR
|
||||||
|
`rm -rf /Users/$(whoami)/Library/Application\ Support/jan`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
Contributions are welcome! Please read the [CONTRIBUTING.md](CONTRIBUTING.md) file
|
Contributions are welcome! Please read the [CONTRIBUTING.md](CONTRIBUTING.md) file
|
||||||
|
|
||||||
### Pre-requisites
|
### Pre-requisites
|
||||||
|
|
||||||
- node >= 20.0.0
|
- node >= 20.0.0
|
||||||
- yarn >= 1.22.0
|
- yarn >= 1.22.0
|
||||||
|
|
||||||
### Use as complete suite (in progress)
|
### Instructions
|
||||||
### For interactive development
|
|
||||||
|
|
||||||
Note: This instruction is tested on MacOS only.
|
Note: This instruction is tested on MacOS only.
|
||||||
|
|
||||||
@ -85,7 +96,7 @@ Note: This instruction is tested on MacOS only.
|
|||||||
|
|
||||||
```
|
```
|
||||||
git clone https://github.com/janhq/jan
|
git clone https://github.com/janhq/jan
|
||||||
git checkout feature/hackathon-refactor-jan-into-electron-app
|
git checkout DESIRED_BRANCH
|
||||||
cd jan
|
cd jan
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -98,28 +109,29 @@ Note: This instruction is tested on MacOS only.
|
|||||||
yarn build:plugins
|
yarn build:plugins
|
||||||
```
|
```
|
||||||
|
|
||||||
4. **Run development and Using Jan Desktop**
|
3. **Run development and Using Jan Desktop**
|
||||||
|
|
||||||
```
|
```
|
||||||
yarn dev
|
yarn dev
|
||||||
```
|
```
|
||||||
|
|
||||||
This will start the development server and open the desktop app.
|
This will start the development server and open the desktop app.
|
||||||
In this step, there are a few notification about installing base plugin, just click `OK` and `Next` to continue.
|
In this step, there are a few notification about installing base plugin, just click `OK` and `Next` to continue.
|
||||||
|
|
||||||
### For production build
|
### For production build
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Do step 1 and 2 in previous section
|
# Do step 1 and 2 in previous section
|
||||||
git clone https://github.com/janhq/jan
|
git clone https://github.com/janhq/jan
|
||||||
cd jan
|
cd jan
|
||||||
yarn install
|
yarn install
|
||||||
yarn build:plugins
|
yarn build:plugins
|
||||||
|
|
||||||
# Build the app
|
# Build the app
|
||||||
yarn build
|
yarn build
|
||||||
```
|
```
|
||||||
|
|
||||||
This will build the app MacOS m1/m2 for production (with code signing already done) and put the result in `dist` folder.
|
This will build the app MacOS m1/m2 for production (with code signing already done) and put the result in `dist` folder.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|||||||
@ -5,4 +5,4 @@ slug: /about
|
|||||||
|
|
||||||
## Team
|
## Team
|
||||||
|
|
||||||
## History
|
## Timeline
|
||||||
|
|||||||
46
docs/docs/events/hcmc-oct23.md
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
---
|
||||||
|
title: Jan HCMC Hacker House
|
||||||
|
slug: /events/hcmc-oct23
|
||||||
|
---
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## Saigon Hacker House
|
||||||
|
|
||||||
|
Jan's Hacker House is a four-day offline event where the Jan team gathers at a hacker house prior to a major release. We use the time to work on the release, bond as a team and get Jan across the finish line.
|
||||||
|
|
||||||
|
Jan hosts a series of talks, workshops and social events at night, inviting the local AI community to join us.
|
||||||
|
|
||||||
|
### Date & Time
|
||||||
|
|
||||||
|
- 24-27 October 2023
|
||||||
|
|
||||||
|
### Location
|
||||||
|
|
||||||
|
- Thao Dien, District 2, Ho Chi Minh City
|
||||||
|
- Exact location to be shared later
|
||||||
|
|
||||||
|
### OKRs
|
||||||
|
|
||||||
|
- Jan v1.0 should be bug-free and run on Windows, Max, Linux
|
||||||
|
- "Create Bot" feature w/ Saved Prompts
|
||||||
|
- **[Stretch Goal]** API for Plugins
|
||||||
|
- UI API
|
||||||
|
- Core Process API
|
||||||
|
|
||||||
|
|
||||||
|
## Agenda
|
||||||
|
|
||||||
|
To help us manage RSVPs, please use the Eventbrite links below to RSVP for each event.
|
||||||
|
|
||||||
|
| Day | Eventbrite Link |
|
||||||
|
| -------------- | -------------------------- |
|
||||||
|
| Mon (23 Oct) | [Jan Team & Partners Dinner](https://jan-partners-night-hcmc.eventbrite.com) |
|
||||||
|
| Tues (24 Oct) | AI Talks Day 1 |
|
||||||
|
| Wed (25 Oct) | AI Talks Day 2 |
|
||||||
|
| Thurs (26 Oct) | [VC Night](https://jan-hcmc-vc-night.eventbrite.com) |
|
||||||
|
| Fri (27 Oct) | [Jan Launch Party](https://jan-launch-party.eventbrite.com) |
|
||||||
|
|
||||||
|
## Photos
|
||||||
|
|
||||||
|

|
||||||
@ -1,5 +1,5 @@
|
|||||||
---
|
---
|
||||||
title: Community Examples
|
title: Hardware Examples
|
||||||
---
|
---
|
||||||
|
|
||||||
## Add your own example
|
## Add your own example
|
||||||
|
|||||||
BIN
docs/docs/hardware/concepts/concepts-images/GPU.png
Normal file
|
After Width: | Height: | Size: 388 KiB |
BIN
docs/docs/hardware/concepts/concepts-images/GPU_Image.png
Normal file
|
After Width: | Height: | Size: 945 KiB |
BIN
docs/docs/hardware/concepts/concepts-images/PCIex16.png
Normal file
|
After Width: | Height: | Size: 2.2 MiB |
BIN
docs/docs/hardware/concepts/concepts-images/Power.png
Normal file
|
After Width: | Height: | Size: 453 KiB |
BIN
docs/docs/hardware/concepts/concepts-images/RAM-VRAM.png
Normal file
|
After Width: | Height: | Size: 349 KiB |
BIN
docs/docs/hardware/concepts/concepts-images/VRAM-Image.png
Normal file
|
After Width: | Height: | Size: 553 KiB |
BIN
docs/docs/hardware/concepts/concepts-images/slot.png
Normal file
|
After Width: | Height: | Size: 636 KiB |
@ -2,7 +2,131 @@
|
|||||||
title: GPUs and VRAM
|
title: GPUs and VRAM
|
||||||
---
|
---
|
||||||
|
|
||||||
- GPUs plugging in to Motherboard via PCIe
|
## What Is a GPU?
|
||||||
- Multiple GPUs
|
|
||||||
- NVLink
|
A Graphics Card, or GPU (Graphics Processing Unit), is a fundamental component in modern computing. Think of it as the powerhouse behind rendering the stunning visuals you see on your screen. Similar to the motherboard in your computer, the graphics card is a printed circuit board. However, it's not just a passive piece of hardware; it's a sophisticated device equipped with essential components like fans, onboard RAM, a dedicated memory controller, BIOS, and various other features. If you want to learn more about GPUs then read here to [Understand the architecture of a GPU.](https://medium.com/codex/understanding-the-architecture-of-a-gpu-d5d2d2e8978b)
|
||||||
- PCIe (and Motherboard limitations)
|
|
||||||
|

|
||||||
|
|
||||||
|
## What Are GPUs Used For?
|
||||||
|
|
||||||
|
Two decades ago, GPUs primarily enhanced real-time 3D graphics in gaming. But as the 21st century dawned, a revelation occurred among computer scientists. They recognized that GPUs held untapped potential to solve some of the world's most intricate computing tasks.
|
||||||
|
This revelation marked the dawn of the general-purpose GPU era. Today's GPUs have evolved into versatile tools, more adaptable than ever before. They now have the capability to accelerate a diverse range of applications that stretch well beyond their original graphics-focused purpose.
|
||||||
|
|
||||||
|
### **Here are some example use cases:**
|
||||||
|
|
||||||
|
1. **Gaming**: They make games look good and run smoothly.
|
||||||
|
2. **Content Creation**: Help with video editing, 3D design, and graphics work.
|
||||||
|
3. **AI and Machine Learning**: Used for training smart machines.
|
||||||
|
4. **Science**: Speed up scientific calculations and simulations.
|
||||||
|
5. **Cryptocurrency Mining**: Mine digital currencies like Bitcoin.
|
||||||
|
6. **Medical Imaging**: Aid in analyzing medical images.
|
||||||
|
7. **Self-Driving Cars**: Help cars navigate autonomously.
|
||||||
|
8. **Simulations**: Create realistic virtual experiences.
|
||||||
|
9. **Data Analysis**: Speed up data processing and visualization.
|
||||||
|
10. **Video Streaming**: Improve video quality and streaming efficiency.
|
||||||
|
|
||||||
|
## What is VRAM In GPU?
|
||||||
|
|
||||||
|
VRAM, or video random-access memory, is a type of high-speed memory that is specifically designed for use with graphics processing units (GPUs). VRAM is used to store the textures, images, and other data that the GPU needs to render graphics. Its allows the GPU to access the data it needs quickly and efficiently. This is essential for rendering complex graphics at high frame rates.
|
||||||
|
|
||||||
|
VRAM is different from other types of memory, such as the system RAM that is used by the CPU. VRAM is optimized for high bandwidth and low latency, which means that it can read and write data very quickly. The amount of VRAM that a GPU has is one of the factors that determines its performance. More VRAM allows the GPU to store more data and render more complex graphics. However, VRAM is also one of the most expensive components of a GPU. So when choosing a graphics card, it is important to consider the amount of VRAM that it has. If you are planning on running demanding LLMs or video games, or 3D graphics software, you will need a graphics card with more VRAM.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## What makes VRAM and RAM different from each other?
|
||||||
|
|
||||||
|
RAM (Random Access Memory) and VRAM (Video Random Access Memory) are both types of memory used in computers, but they have different functions and characteristics. Here are the differences between RAM and VRAM.
|
||||||
|
|
||||||
|
### RAM (Random Access Memory):
|
||||||
|
|
||||||
|
- RAM is a general-purpose memory that stores data and instructions that the CPU needs to access quickly.
|
||||||
|
- RAM is used for short-term data storage and is volatile, meaning that it loses its contents when the computer is turned off.
|
||||||
|
- RAM is connected to the motherboard and is accessed by the CPU.
|
||||||
|
- RAM typically has a larger capacity compared to VRAM, which is designed to store smaller amounts of data with faster access times.
|
||||||
|
- RAM stores data related to the operating system and the various programs that are running, including code, program files, and user data.
|
||||||
|
|
||||||
|
### VRAM (Video Random Access Memory):
|
||||||
|
|
||||||
|
- VRAM is a type of RAM that is specifically used to store image data for a computer display.
|
||||||
|
- VRAM is a graphics card component that is connected to the GPU (Graphics Processing Unit).
|
||||||
|
- VRAM is used exclusively by the GPU and doesn’t need to store as much data as the CPU.
|
||||||
|
- VRAM is similar to RAM in that it is volatile and loses its contents when the computer is turned off.
|
||||||
|
- VRAM stores data related specifically to graphics, such as textures, frames, and other graphical data.
|
||||||
|
- VRAM is designed to store smaller amounts of data with faster access times than RAM.
|
||||||
|
|
||||||
|
In summary, RAM is used for general-purpose memory, while VRAM is used for graphics-related tasks. RAM has a larger capacity and is accessed by the CPU, while VRAM has a smaller capacity and is accessed by the GPU.
|
||||||
|
|
||||||
|
**Key differences between VRAM and RAM:**
|
||||||
|
|
||||||
|
| Characteristic | VRAM | RAM |
|
||||||
|
| -------------- | --------------------- | --------------------- |
|
||||||
|
| Purpose | Graphics processing | General processing |
|
||||||
|
| Speed | Faster | Slower |
|
||||||
|
| Latency | Lower | Higher |
|
||||||
|
| Bandwidth | Higher | Lower |
|
||||||
|
| Cost | More expensive | Less expensive |
|
||||||
|
| Availability | Less widely available | More widely available |
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## How to Connect GPU to the Motherboard via PCIe
|
||||||
|
|
||||||
|
Connecting hardware components to a motherboard is often likened to assembling LEGO pieces. If the parts fit together seamlessly, you're on the right track. Experienced PC builders find this process straightforward. However, for first-time builders, identifying where each hardware component belongs on the motherboard can be a bit perplexing.
|
||||||
|
|
||||||
|
**So follow the below 5 steps to Connect your GPU to the Motherboard:**
|
||||||
|
|
||||||
|
1. First, make sure your computer is powered off and unplugged from the electrical outlet to ensure safety.
|
||||||
|
2. Open your computer case if necessary to access the motherboard. Locate the PCIe x16 on the motherboard where you'll install the GPU. These slots are typically longer than other expansion slots and are used for graphics cards.
|
||||||
|
Remove Slot Covers (if applicable): Some PCIe slots may have protective covers or brackets covering them. Remove these covers by unscrewing them from the case using a Phillips-head screwdriver. And PCIe x16 will have plastic lock on one side only. There may be more than one PCIe x16 slot depending on the motherboard. You can use any of the slots according to your choice.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
3. Now Insert the Graphics Card slowly:
|
||||||
|
|
||||||
|
- Unlock the plastic lock on one side of the PCIe x16 slot by pulling it outwards.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
- Align the PCIe slot with your graphics card, making sure that the HDMI port side of the GPU faces the rear side of the CPU case.
|
||||||
|
- Gently press on the card until you hear it securely snap in place.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
4. Insert the Power Connector: If your GPU requires additional power (most modern GPUs do), connect the necessary power cables from your power supply to the GPU's power connectors. These connectors are usually located on the top or side of the GPU.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
5. Power on the System: After turning on the PC see if the fans on your graphics card spin. If it does not spin, remove the power cable from the GPU, reconnect it, and power on the PC again.
|
||||||
|
|
||||||
|
> :memo: Note: To better understand you can also watch YouTube tutorials on how to Connect the GPU to the Motherboard via PCIe
|
||||||
|
|
||||||
|
## How to Choose a Graphics Card for your AI works
|
||||||
|
|
||||||
|
Selecting the optimal GPU for running Large Language Models (LLMs) on your home PC is a decision influenced by your budget and the specific LLMs you intend to work with. Your choice should strike a balance between performance, efficiency, and cost-effectiveness.
|
||||||
|
|
||||||
|
In general, the following GPU features are important for running LLMs:
|
||||||
|
|
||||||
|
- **High VRAM:** LLMs are typically very large and complex models, so they require a GPU with a high amount of VRAM. This will allow the model to be loaded into memory and processed efficiently.
|
||||||
|
- **CUDA Compatibility:** When running LLMs on a GPU, CUDA compatibility is paramount. CUDA is NVIDIA's parallel computing platform, and it plays a vital role in accelerating deep learning tasks. LLMs, with their extensive matrix calculations, heavily rely on parallel processing. Ensuring your GPU supports CUDA is like having the right tool for the job. It allows the LLM to leverage the GPU's parallel processing capabilities, significantly speeding up model training and inference.
|
||||||
|
- **Number of CUDA, Tensor, and RT Cores:** High-performance NVIDIA GPUs have both CUDA and Tensor cores. These cores are responsible for executing the neural network computations that underpin LLMs' language understanding and generation. The more CUDA cores your GPU has, the better equipped it is to handle the massive computational load that LLMs impose. Tensor cores in your GPU, further enhance LLM performance by accelerating the critical matrix operations integral to language modeling tasks.
|
||||||
|
- **Generation (Series)**: When selecting a GPU for LLMs, consider its generation or series (e.g., RTX 30 series). Newer GPU generations often come with improved architectures and features. For LLM tasks, opting for the latest generation can mean better performance, energy efficiency, and support for emerging AI technologies. Avoid purchasing, RTX-2000 series GPUs which are much outdated nowadays.
|
||||||
|
|
||||||
|
### Here are some of the best GPU options for this purpose:
|
||||||
|
|
||||||
|
1. **NVIDIA RTX 3090**: The NVIDIA RTX 3090 is a high-end GPU with a substantial 24GB of VRAM. This copious VRAM capacity makes it exceptionally well-suited for handling large LLMs. Moreover, it's known for its relative efficiency, meaning it won't overheat or strain your home PC's cooling system excessively. The RTX 3090's robust capabilities are a boon for those who need to work with hefty language models.
|
||||||
|
2. **NVIDIA RTX 4090**: If you're looking for peak performance and can afford the investment, the NVIDIA RTX 4090 represents the pinnacle of GPU power. Boasting 24GB of VRAM and featuring a cutting-edge Tensor Core architecture tailored for AI workloads, it outshines the RTX 3090 in terms of sheer capability. However, it's important to note that the RTX 4090 is also pricier and more power-hungry than its predecessor, the RTX 3090.
|
||||||
|
3. **AMD Radeon RX 6900 XT**: On the AMD side, the Radeon RX 6900 XT stands out as a high-end GPU with 16GB of VRAM. While it may not quite match the raw power of the RTX 3090 or RTX 4090, it strikes a balance between performance and affordability. Additionally, it tends to be more power-efficient, which could translate to a more sustainable and quieter setup in your home PC.
|
||||||
|
|
||||||
|
If budget constraints are a consideration, there are more cost-effective GPU options available:
|
||||||
|
|
||||||
|
- **NVIDIA RTX 3070**: The RTX 3070 is a solid mid-range GPU that can handle LLMs effectively. While it may not excel with the most massive or complex language models, it's a reliable choice for users looking for a balance between price and performance.
|
||||||
|
- **AMD Radeon RX 6800 XT**: Similarly, the RX 6800 XT from AMD offers commendable performance without breaking the bank. It's well-suited for running mid-sized LLMs and provides a competitive option in terms of both power and cost.
|
||||||
|
|
||||||
|
When selecting a GPU for LLMs, remember that it's not just about the GPU itself. Consider the synergy with other components in your PC:
|
||||||
|
|
||||||
|
- **CPU**: To ensure efficient processing, pair your GPU with a powerful CPU. LLMs benefit from fast processors, so having a capable CPU is essential.
|
||||||
|
- **RAM**: Sufficient RAM is crucial for LLMs. They can be memory-intensive, and having enough RAM ensures smooth operation.
|
||||||
|
- **Cooling System**: LLMs can push your PC's hardware to the limit. A robust cooling system helps maintain optimal temperatures, preventing overheating and performance throttling.
|
||||||
|
|
||||||
|
By taking all of these factors into account, you can build a home PC setup that's well-equipped to handle the demands of running LLMs effectively and efficiently.
|
||||||
|
|||||||
@ -1,3 +0,0 @@
|
|||||||
---
|
|
||||||
title: "@dan-jan: 3090 Desktop"
|
|
||||||
---
|
|
||||||
@ -1,3 +1,24 @@
|
|||||||
---
|
---
|
||||||
title: "@janhq: 2x4090 Workstation"
|
title: "2 x 4090 Workstation"
|
||||||
---
|
---
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Jan uses a 2 x 4090 Workstation to run Codellama for internal use.[^1]
|
||||||
|
|
||||||
|
## Component List
|
||||||
|
|
||||||
|
| Type | Item | Unit Price | Total Price |
|
||||||
|
| :------------------- | :------------------------------------------------------------- | :--------- | ----------- |
|
||||||
|
| **CPU** | [Ryzen Threadripper Pro 5965WX 280W SP3 WOF](AMAZON-LINK-HERE) | $2,229 | |
|
||||||
|
| **Motherboard** | [Asus Pro WS WRX80E Sage SE WiFi](AMAZON-LINK-HERE) | $933 | |
|
||||||
|
| **RAM** | 4 x [G.Skill Ripjaw S5 2x32 6000C32](AMAZON-LINK-HERE) | $92.99 | |
|
||||||
|
| **GPU** | 2 x [Asus Strix RTX 4090 24GB OC](AMAZON-LINK-HERE) | $4,345 | |
|
||||||
|
| **Storage PCIe-SSD** | [Samsung 990 Pro 2TB NVME 2.0](AMAZON-LINK-HERE) | $134.99 | |
|
||||||
|
| **Cooler** | [BeQuiet Dark Rock 4 Pro TR4](AMAZON-LINK-HERE) | $89.90 | |
|
||||||
|
| **Power Supply** | [FSP Cannon 2000W Pro 92+ Full Modular PSU](AMAZON-LINK-HERE) | $449.99 | |
|
||||||
|
| **Case** | [Veddha 6GPUs Frame Black](AMAZON-LINK-HERE) | $59.99 | |
|
||||||
|
| **Total cost** | | $8,334 | |
|
||||||
|
|
||||||
|
|
||||||
|
[^1]: https://www.reddit.com/r/LocalLLaMA/comments/16lxt6a/case_for_dual_4090s/. ideb
|
||||||
@ -1,6 +1,4 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 1
|
sidebar_position: 1
|
||||||
title: Hardware
|
title: Introduction
|
||||||
---
|
---
|
||||||
|
|
||||||
TODO
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
---
|
|
||||||
title: Cloud vs. Buy
|
|
||||||
---
|
|
||||||
62
docs/docs/hardware/overview/cloud-vs-self-hosting.md
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
---
|
||||||
|
title: Cloud vs. Self-hosting Your AI
|
||||||
|
---
|
||||||
|
|
||||||
|
The choice of how to run your AI - on GPU cloud services, on-prem, or just using an API provider - involves various trade-offs. The following is a naive exploration of the pros and cons of renting vs self-hosting.
|
||||||
|
|
||||||
|
## Cost Comparison
|
||||||
|
|
||||||
|
The following estimations use these general assumptions:
|
||||||
|
|
||||||
|
| | Self-Hosted | GPT 4.0 | GPU Rental |
|
||||||
|
| ---------- | ---------------------------------------- | -------------- | ------------------ |
|
||||||
|
| Unit Costs | $10k upfront for 2x4090s (5 year amort.) | $0.00012/token | $4.42 for 1xH100/h |
|
||||||
|
|
||||||
|
- 800 average tokens (input & output) in a single request
|
||||||
|
- Inference speed is at 24 tokens per second
|
||||||
|
|
||||||
|
### Low Usage
|
||||||
|
|
||||||
|
When operating at low capacity:
|
||||||
|
|
||||||
|
| | Self-Hosted | GPT 4.0 | GPU Rental |
|
||||||
|
| ---------------- | ----------- | ------- | ---------- |
|
||||||
|
| Cost per Request | $2.33 | $0.10 | $0.04 |
|
||||||
|
|
||||||
|
### High Usage
|
||||||
|
|
||||||
|
When operating at high capacity, i.e. 24 hours in a day, ~77.8k requests per month:
|
||||||
|
|
||||||
|
| | Self-Hosted | GPT 4.0 | GPU Rental |
|
||||||
|
| -------------- | ------------ | ------- | ---------- |
|
||||||
|
| Cost per Month | $166 (fixed) | $7465 | $3182 |
|
||||||
|
|
||||||
|
### Incremental Costs
|
||||||
|
|
||||||
|
Large context use cases are also interesting to evaluate. For example, if you had to write a 500 word essay summarizing Tolstoy's "War and Peace":
|
||||||
|
|
||||||
|
| | Self-Hosted | GPT 4.0 | GPU Rental |
|
||||||
|
| ----------------------- | -------------------- | ------- | ---------- |
|
||||||
|
| Cost of "War and Peace" | (upfront fixed cost) | $94 | $40 |
|
||||||
|
|
||||||
|
> **Takeaway**: Renting on cloud or using an API is great for initially scaling. However, it can quickly become expensive when dealing with large datasets and context windows. For predictable costs, self-hosting is an attractive option.
|
||||||
|
|
||||||
|
## Business Considerations
|
||||||
|
|
||||||
|
Other business level considerations may include:
|
||||||
|
|
||||||
|
| | Self-Hosted | GPT 4.0 | GPU Rental |
|
||||||
|
| ----------------------- | ----------- | ------- | ---------- |
|
||||||
|
| Data Privacy | ✅ | ❌ | ❌ |
|
||||||
|
| Offline Mode | ✅ | ❌ | ❌ |
|
||||||
|
| Customization & Control | ✅ | ❌ | ✅ |
|
||||||
|
| Auditing | ✅ | ❌ | ✅ |
|
||||||
|
| Setup Complexity | ❌ | ✅ | ✅ |
|
||||||
|
| Setup Cost | ❌ | ✅ | ✅ |
|
||||||
|
| Maintenance | ❌ | ✅ | ❌ |
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
The decision to run LLMs in the cloud or on in-house servers is not one-size-fits-all. It depends on your business's specific needs, budget, and security considerations. Cloud-based LLMs offer scalability and cost-efficiency but come with potential security concerns, while in-house servers provide greater control, customization, and cost predictability.
|
||||||
|
|
||||||
|
In some situations, using a mix of cloud and in-house resources can be the best way to go. Businesses need to assess their needs and assets carefully to pick the right method for using LLMs in the ever-changing world of AI technology.
|
||||||
@ -1,3 +1,14 @@
|
|||||||
---
|
---
|
||||||
title: CPU vs. GPU
|
title: GPU vs CPU What's the Difference?
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## CPU vs. GPU
|
||||||
|
|
||||||
|
| | CPU | GPU |
|
||||||
|
| ------------------- | ------------------------------------------------------------------------ | ------------------------------------------------------- |
|
||||||
|
| **Function** | Generalized component that handles main processing functions of a server | Specialized component that excels at parallel computing |
|
||||||
|
| **Processing** | Designed for serial instruction processing | Designed for parallel instruction processing |
|
||||||
|
| **Design** | Fewer, more powerful cores | More cores than CPUs, but less powerful than CPU cores |
|
||||||
|
| **Best suited for** | General-purpose computing applications | High-performance computing applications |
|
||||||
|
|
||||||
|

|
||||||
|
|||||||
@ -2,12 +2,61 @@
|
|||||||
title: Recommended AI Hardware by Budget
|
title: Recommended AI Hardware by Budget
|
||||||
---
|
---
|
||||||
|
|
||||||
## $1,000
|
> :warning: **Warning:** Do your own research before any purchase. Jan is not liable for compatibility, performance or other issues. Products can become outdated quickly.
|
||||||
|
|
||||||
## $2,500
|
## Entry-level PC Build at $1000
|
||||||
|
|
||||||
## $5,000
|
| Type | Item | Price |
|
||||||
|
| :------------------- | :--------------------------------------------------------- | :------- |
|
||||||
|
| **CPU** | [Intel Core i5 12400 2.5GHz 6-Core Processor](#) | $170.99 |
|
||||||
|
| **CPU Cooler** | [Intel Boxed Cooler (Included with CPU)](#) | Included |
|
||||||
|
| **Motherboard** | [ASUS Prime B660-PLUS DDR4 ATX LGA1700](#) | $169.95 |
|
||||||
|
| **GPU** | [Nvidia RTX 3050 8GB - ZOTAC Gaming Twin Edge](#) | $250 |
|
||||||
|
| **Memory** | [16GB (2 x 8GB) G.Skill Ripjaws V DDR4-3200 C16](#) | $49.99 |
|
||||||
|
| **Storage PCIe-SSD** | [ADATA XPG SX8200 Pro 512GB NVMe M.2 Solid State Drive](#) | $46.50 |
|
||||||
|
| **Power Supply** | [Corsair CX-M Series CX450M 450W ATX 2.4 Power Supply](#) | $89.99 |
|
||||||
|
| **Case** | [be quiet! Pure Base 600 Black ATX Mid Tower Case](#) | $97.00 |
|
||||||
|
| **Total cost** | | $870 |
|
||||||
|
|
||||||
## $7,500
|
## Entry-level PC Build at $1,500
|
||||||
|
|
||||||
## $10,000
|
| Type | Item | Price |
|
||||||
|
| :------------------- | :------------------------------------------------------- | :------ |
|
||||||
|
| **CPU** | [Intel Core i5 12600K 3.7GHz 6-Core Processor](#) | $269.99 |
|
||||||
|
| **CPU Cooler** | [be quiet! Dark Rock Pro 4](#) | $99.99 |
|
||||||
|
| **Motherboard** | [ASUS ProArt B660-Creator DDR4 ATX LGA1700](#) | $229.99 |
|
||||||
|
| **GPU** | [Nvidia RTX 3050 8GB - ZOTAC Gaming Twin Edge](#) | $349.99 |
|
||||||
|
| **Memory** | [32GB (2 x 16GB) G.Skill Ripjaws V DDR4-3200 C16](#) | $129.99 |
|
||||||
|
| **Storage PCIe-SSD** | [ADATA XPG SX8200 Pro 1TB NVMe M.2 Solid State Drive](#) | $109.99 |
|
||||||
|
| **Power Supply** | [Corsair RMx Series RM650x 650W ATX 2.4 Power Supply](#) | $119.99 |
|
||||||
|
| **Case** | [Corsair Carbide Series 200R ATX Mid Tower Case](#) | $59.99 |
|
||||||
|
| **Total cost** | | $1371 |
|
||||||
|
|
||||||
|
## Mid-range PC Build at $3000
|
||||||
|
|
||||||
|
| Type | Item | Price |
|
||||||
|
| :--------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :-------- |
|
||||||
|
| **CPU** | [AMD Ryzen 9 7950X 4.5 GHz 16-Core Processor](https://de.pcpartpicker.com/product/22XJ7P/amd-ryzen-9-7950x-45-ghz-16-core-processor-100-100000514wof) | $556 |
|
||||||
|
| **CPU Cooler** | [Thermalright Peerless Assassin 120 White 66.17 CFM CPU Cooler](https://de.pcpartpicker.com/product/476p99/thermalright-peerless-assassin-120-white-6617-cfm-cpu-cooler-pa120-white) | $59.99 |
|
||||||
|
| **Motherboard** | [Gigabyte B650 GAMING X AX ATX AM5 Motherboard](https://de.pcpartpicker.com/product/YZgFf7/gigabyte-b650-gaming-x-ax-atx-am5-motherboard-b650-gaming-x-ax) | $199.99 |
|
||||||
|
| **Memory** | [G.Skill Ripjaws S5 64 GB (2 x 32 GB) DDR5-6000 CL32 Memory](https://de.pcpartpicker.com/product/BJcG3C/gskill-ripjaws-s5-64-gb-2-x-32-gb-ddr5-6000-cl32-memory-f5-6000j3238g32gx2-rs5k) | $194 |
|
||||||
|
| **Storage** | [Crucial P5 Plus 2 TB M.2-2280 PCIe 4.0 X4 NVME Solid ](https://de.pcpartpicker.com/product/VZWzK8/crucial-p5-plus-2-tb-m2-2280-pcie-40-x4-nvme-solid-state-drive-ct2000p5pssd8) | $165.99 |
|
||||||
|
| **GPU** | [PNY XLR8 Gaming VERTO EPIC-X RGB OC GeForce RTX 4090 24 GB](https://de.pcpartpicker.com/product/TvpzK8/pny-xlr8-gaming-verto-epic-x-rgb-oc-geforce-rtx-4090-24-gb-video-card-vcg409024tfxxpb1-o) | $1,599.99 |
|
||||||
|
| **Case** | [Fractal Design Pop Air ATX Mid Tower Case](https://de.pcpartpicker.com/product/QnD7YJ/fractal-design-pop-air-atx-mid-tower-case-fd-c-poa1a-02) | $89.99 |
|
||||||
|
| **Power Supply** | [Thermaltake Toughpower GF A3 - TT Premium Edition 1050 W 80+ Gold](https://de.pcpartpicker.com/product/4v3NnQ/thermaltake-toughpower-gf-a3-1050-w-80-gold-certified-fully-modular-atx-power-supply-ps-tpd-1050fnfagu-l) | $139.99 |
|
||||||
|
| |
|
||||||
|
| **Total cost** | **$3000** |
|
||||||
|
|
||||||
|
## High-End PC Build at $6,000
|
||||||
|
|
||||||
|
| Type | Item | Price |
|
||||||
|
| :--------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------- |
|
||||||
|
| **CPU** | [AMD Ryzen 9 3900X 3.8 GHz 12-Core Processor](https://pcpartpicker.com/product/tLCD4D/amd-ryzen-9-3900x-36-ghz-12-core-processor-100-100000023box) | $365.00 |
|
||||||
|
| **CPU Cooler** | [Noctua NH-U12S chromax.black 55 CFM CPU Cooler](https://pcpartpicker.com/product/dMVG3C/noctua-nh-u12s-chromaxblack-55-cfm-cpu-cooler-nh-u12s-chromaxblack) | $89.95 |
|
||||||
|
| **Motherboard** | [Asus ProArt X570-CREATOR WIFI ATX AM4 Motherboard](https://pcpartpicker.com/product/8y8bt6/asus-proart-x570-creator-wifi-atx-am4-motherboard-proart-x570-creator-wifi) | $599.99 |
|
||||||
|
| **Memory** | [Corsair Vengeance LPX 128 GB (4 x 32 GB) DDR4-3200 CL16 Memory](https://pcpartpicker.com/product/tRH8TW/corsair-vengeance-lpx-128-gb-4-x-32-gb-ddr4-3200-memory-cmk128gx4m4e3200c16) | $249.99 |
|
||||||
|
| **Storage** | [Sabrent Rocket 4 Plus 2 TB M.2-2280 PCIe 4.0 X4 NVME Solid State Drive](https://pcpartpicker.com/product/PMBhP6/sabrent-rocket-4-plus-2-tb-m2-2280-nvme-solid-state-drive-sb-rkt4p-2tb) | $129.99 |
|
||||||
|
| **GPU** | [PNY RTX A-Series RTX A6000 48 GB Video Card](https://pcpartpicker.com/product/HWt9TW/pny-rtx-a-series-rtx-a6000-48-gb-video-card-vcnrtxa6000-pb) | $4269.00 |
|
||||||
|
| **Power Supply** | [EVGA SuperNOVA 850 G2 850 W 80+ Gold ](https://pcpartpicker.com/product/LCfp99/evga-supernova-850-g2-850-w-80-gold-certified-fully-modular-atx-power-supply-220-g2-0850-xr) | $322.42 |
|
||||||
|
| |
|
||||||
|
| **Total cost** | **$6026.34** |
|
||||||
|
|||||||
@ -1,7 +1,182 @@
|
|||||||
---
|
---
|
||||||
title: Recommended AI Models by Hardware
|
title: Selecting AI Hardware
|
||||||
---
|
---
|
||||||
|
|
||||||
|
When selecting a GPU for LLMs, remember that it's not just about the GPU itself. Consider the synergy with other components in your PC:
|
||||||
|
|
||||||
|
- **CPU**: To ensure efficient processing, pair your GPU with a powerful CPU. LLMs benefit from fast processors, so having a capable CPU is essential.
|
||||||
|
- **RAM**: Sufficient RAM is crucial for LLMs. They can be memory-intensive, and having enough RAM ensures smooth operation.
|
||||||
|
- **Cooling System**: LLMs can push your PC's hardware to the limit. A robust cooling system helps maintain optimal temperatures, preventing overheating and performance throttling.
|
||||||
|
|
||||||
|
By taking all of these factors into account, you can build a home PC setup that's well-equipped to handle the demands of running LLMs effectively and efficiently.
|
||||||
|
|
||||||
|
## GPU Selection
|
||||||
|
|
||||||
|
Selecting the optimal GPU for running Large Language Models (LLMs) on your home PC is a decision influenced by your budget and the specific LLMs you intend to work with. Your choice should strike a balance between performance, efficiency, and cost-effectiveness.
|
||||||
|
|
||||||
|
### GPU Comparison
|
||||||
|
|
||||||
|
| GPU | Price | Cores | VRAM (GB) | Bandwth (T/s) | Power |
|
||||||
|
| --------------------- | ----- | ----- | --------- | ------------- | ----- |
|
||||||
|
| Nvidia H100 | 40000 | 18432 | 80 | 2 | |
|
||||||
|
| Nvidia A100 | 15000 | 6912 | 80 | | |
|
||||||
|
| Nvidia A100 | 7015 | 6912 | 40 | | |
|
||||||
|
| Nvidia A10 | 2799 | 9216 | 24 | | |
|
||||||
|
| Nvidia RTX A6000 | 4100 | 10752 | 48 | 0.768 | |
|
||||||
|
| Nvidia RTX 6000 | 6800 | 4608 | 46 | | |
|
||||||
|
| Nvidia RTX 4090 Ti | 2000 | 18176 | 24 | | |
|
||||||
|
| Nvidia RTX 4090 | 1800 | 16384 | 24 | 1.008 | |
|
||||||
|
| Nvidia RTX 3090 | 1450 | 10496 | 24 | | |
|
||||||
|
| Nvidia RTX 3080 | 700 | 8704 | 12 | | |
|
||||||
|
| Nvidia RTX 3070 | 900 | 6144 | 8 | | |
|
||||||
|
| Nvidia L4 | 2711 | 7424 | 24 | | |
|
||||||
|
| Nvidia T4 | 2299 | 2560 | 16 | | |
|
||||||
|
| AMD Radeon RX 6900 XT | 1000 | 5120 | 16 | | |
|
||||||
|
| AMD Radeon RX 6800 XT | 420 | 4608 | 16 | | |
|
||||||
|
|
||||||
|
\*Market prices as of Oct 2023 via Amazon/PCMag
|
||||||
|
|
||||||
|
### Other Considerations
|
||||||
|
|
||||||
|
In general, the following GPU features are important for running LLMs:
|
||||||
|
|
||||||
|
- **High VRAM:** LLMs are typically very large and complex models, so they require a GPU with a high amount of VRAM. This will allow the model to be loaded into memory and processed efficiently.
|
||||||
|
- **CUDA Compatibility:** When running LLMs on a GPU, CUDA compatibility is paramount. CUDA is NVIDIA's parallel computing platform, and it plays a vital role in accelerating deep learning tasks. LLMs, with their extensive matrix calculations, heavily rely on parallel processing. Ensuring your GPU supports CUDA is like having the right tool for the job. It allows the LLM to leverage the GPU's parallel processing capabilities, significantly speeding up model training and inference.
|
||||||
|
- **Number of CUDA, Tensor, and RT Cores:** High-performance NVIDIA GPUs have both CUDA and Tensor cores. These cores are responsible for executing the neural network computations that underpin LLMs' language understanding and generation. The more CUDA cores your GPU has, the better equipped it is to handle the massive computational load that LLMs impose. Tensor cores in your GPU, further enhance LLM performance by accelerating the critical matrix operations integral to language modeling tasks.
|
||||||
|
- **Generation (Series)**: When selecting a GPU for LLMs, consider its generation or series (e.g., RTX 30 series). Newer GPU generations often come with improved architectures and features. For LLM tasks, opting for the latest generation can mean better performance, energy efficiency, and support for emerging AI technologies. Avoid purchasing, RTX-2000 series GPUs which are much outdated nowadays.
|
||||||
|
|
||||||
|
## CPU Selection
|
||||||
|
|
||||||
|
Selecting the right CPU for running Large Language Models (LLMs) on your home PC is contingent on your budget and the specific LLMs you intend to work with. It's a decision that warrants careful consideration, as the CPU plays a pivotal role in determining the overall performance of your system.
|
||||||
|
|
||||||
|
In general, the following CPU features are important for running LLMs:
|
||||||
|
|
||||||
|
- **Number of Cores and Threads:** the number of CPU cores and threads influences parallel processing. More cores and threads help handle the complex computations involved in language models. For tasks like training and inference, a higher core/thread count can significantly improve processing speed and efficiency, enabling quicker results.
|
||||||
|
- **High clock speed:** The base clock speed, or base frequency, represents the CPU's default operating speed. So having a CPU with a high clock speed. This will allow the model to process instructions more quickly, which can further improve performance.
|
||||||
|
- **Base Power (TDP):** LLMs often involve long training sessions and demanding computations. Therefore, a lower Thermal Design Power (TDP) is desirable. A CPU with a lower TDP consumes less power and generates less heat during prolonged LLM operations. This not only contributes to energy efficiency but also helps maintain stable temperatures in your system, preventing overheating and potential performance throttling.
|
||||||
|
- **Generation (Series):** Consider its generation or series (e.g., 9th Gen, 11th Gen Intel Core). Newer CPU generations often come with architectural improvements that enhance performance and efficiency. For LLM tasks, opting for a more recent generation can lead to faster and more efficient language model training and inference.
|
||||||
|
- **Support for AVX512:** AVX512 is a set of vector instruction extensions that can be used to accelerate machine learning workloads. Many LLMs are optimized to take advantage of AVX512, so it is important to make sure that your CPU supports this instruction set.
|
||||||
|
|
||||||
|
### Here are some CPU options for running LLMs:
|
||||||
|
|
||||||
|
1. **Intel Core i7-12700K**: Slightly less potent than the Core i9-12900K, the Intel Core i7-12700K is still a powerful CPU. With 12 cores and 20 threads, it strikes a balance between performance and cost-effectiveness. This CPU is well-suited for running mid-sized and large LLMs, making it a compelling option.
|
||||||
|
2. **Intel Core i9-12900K**: Positioned as a high-end CPU, the Intel Core i9-12900K packs a formidable punch with its 16 cores and 24 threads. It's one of the fastest CPUs available, making it an excellent choice for handling large and intricate LLMs. The abundance of cores and threads translates to exceptional parallel processing capabilities, which is crucial for tasks involving massive language models.
|
||||||
|
3. **AMD Ryzen 9 5950X**: Representing AMD's high-end CPU offering, the Ryzen 9 5950X boasts 16 cores and 32 threads. While it may not quite match the speed of the Core i9-12900K, it remains a robust and cost-effective choice. Its multicore prowess enables smooth handling of LLM workloads, and its affordability makes it an attractive alternative.
|
||||||
|
4. **AMD Ryzen 7 5800X**: Slightly less potent than the Ryzen 9 5950X, the Ryzen 7 5800X is still a formidable CPU with 8 cores and 16 threads. It's well-suited for running mid-sized and smaller LLMs, providing a compelling blend of performance and value.
|
||||||
|
|
||||||
|
For those operating within budget constraints, there are more budget-friendly CPU options:
|
||||||
|
|
||||||
|
- **Intel Core i5-12600K**: The Core i5-12600K is a capable mid-range CPU that can still handle LLMs effectively, though it may not be optimized for the largest or most complex models.
|
||||||
|
- **AMD Ryzen 5 5600X**: The Ryzen 5 5600X offers a balance of performance and affordability. It's suitable for running smaller to mid-sized LLMs without breaking the bank.
|
||||||
|
|
||||||
|
**When selecting a CPU for LLMs, consider the synergy with other components in your PC:**
|
||||||
|
|
||||||
|
- **GPU**: Pair your CPU with a powerful GPU to ensure smooth processing of LLMs. Some language models, particularly those used for AI, rely on GPU acceleration for optimal performance.
|
||||||
|
- **RAM**: Adequate RAM is essential for LLMs, as these models can be memory-intensive. Having enough RAM ensures that your CPU can operate efficiently without bottlenecks.
|
||||||
|
- **Cooling System**: Given the resource-intensive nature of LLMs, a robust cooling system is crucial to maintain optimal temperatures and prevent performance throttling.
|
||||||
|
|
||||||
|
By carefully weighing your budget and performance requirements and considering the interplay of components in your PC, you can assemble a well-rounded system that's up to the task of running LLMs efficiently.
|
||||||
|
|
||||||
|
> :memo: **Note:** It is important to note that these are just general recommendations. The specific CPU requirements for your LLM will vary depending on the specific model you are using and the tasks that you want to perform with it. If you are unsure what CPU to get, it is best to consult with an expert.
|
||||||
|
|
||||||
|
## RAM Selection
|
||||||
|
|
||||||
|
The amount of RAM you need to run an LLM depends on the size and complexity of the model, as well as the tasks you want to perform with it. For example, if you are simply running inference on a pre-trained LLM, you may be able to get away with using a relatively modest amount of RAM. However, if you are training a new LLM from scratch, or if you are running complex tasks like fine-tuning or code generation, you will need more RAM.
|
||||||
|
|
||||||
|
### Here is a general guide to RAM selection for running LLMs:
|
||||||
|
|
||||||
|
- **Capacity:** The amount of RAM you need will depend on the size and complexity of the LLM model you want to run. For inference, you will need at least 16GB of RAM, but 32GB or more is ideal for larger models and more complex tasks. For training, you will need at least 64GB of RAM, but 128GB or more is ideal for larger models and more complex tasks.
|
||||||
|
- **Speed:** LLMs can benefit from having fast RAM, so it is recommended to use DDR4 or DDR5 RAM with a speed of at least 3200MHz.
|
||||||
|
- **Latency:** RAM latency is the amount of time it takes for the CPU to access data in memory. Lower latency is better for performance, so it is recommended to look for RAM with a low latency rating.
|
||||||
|
- **Timing:** RAM timing is a set of parameters that control how the RAM operates. It is important to make sure that the RAM timing is compatible with your motherboard and CPU.
|
||||||
|
|
||||||
|
R**ecommended RAM** **options for running LLMs:**
|
||||||
|
|
||||||
|
- **Inference:** For inference on pre-trained LLMs, you will need at least 16GB of RAM. However, 32GB or more is ideal for larger models and more complex tasks.
|
||||||
|
- **Training:** For training LLMs from scratch, you will need at least 64GB of RAM. However, 128GB or more is ideal for larger models and more complex tasks.
|
||||||
|
|
||||||
|
In addition to the amount of RAM, it is also important to consider the speed of the RAM. LLMs can benefit from having fast RAM, so it is recommended to use DDR4 or DDR5 RAM with a speed of at least 3200MHz.
|
||||||
|
|
||||||
|
## Motherboard Selection
|
||||||
|
|
||||||
|
When picking a motherboard to run advanced language models, you need to think about a few things. First, consider the specific language model you want to use, the type of CPU and GPU in your computer, and your budget. Here are some suggestions:
|
||||||
|
|
||||||
|
1. **ASUS ROG Maximus Z790 Hero:** This is a top-notch motherboard with lots of great features. It works well with Intel's latest CPUs, fast DDR5 memory, and PCIe 5.0 devices. It's also good at keeping things cool, which is important for running demanding language models.
|
||||||
|
2. **MSI MEG Z790 Ace:** Similar to the ASUS ROG Maximus, this motherboard is high-end and has similar features. It's good for running language models too.
|
||||||
|
3. **Gigabyte Z790 Aorus Master:** This one is more budget-friendly but still works great with Intel's latest CPUs, DDR5 memory, and fast PCIe 5.0 devices. It's got a strong power system, which helps with running language models.
|
||||||
|
|
||||||
|
If you're on a tighter budget, you might want to check out mid-range options like the **ASUS TUF Gaming Z790-Plus WiFi** or the **MSI MPG Z790 Edge WiFi DDR5**. They offer good performance without breaking the bank.
|
||||||
|
|
||||||
|
No matter which motherboard you pick, make sure it works with your CPU and GPU. Also, check that it has the features you need, like enough slots for your GPU and storage drives.
|
||||||
|
|
||||||
|
Other things to think about when choosing a motherboard for language models:
|
||||||
|
|
||||||
|
- **Cooling:** Language models can make your CPU work hard, so a motherboard with good cooling is a must. This keeps your CPU from getting too hot.
|
||||||
|
- **Memory:** Language models need lots of memory, so make sure your motherboard supports a good amount of it. Check if it works with the type of memory you want to use, like DDR5 or DDR4.
|
||||||
|
- **Storage:** Language models can create and store a ton of data. So, look for a motherboard with enough slots for your storage drives.
|
||||||
|
- **BIOS:** The BIOS controls your motherboard. Make sure it's up-to-date and has the latest features, especially if you plan to overclock or undervolt your system.
|
||||||
|
|
||||||
|
## Cooling System Selection
|
||||||
|
|
||||||
|
Modern computers have two critical components, the CPU and GPU, which can heat up during high-performance tasks. To prevent overheating, they come with built-in temperature controls that automatically reduce performance when temperatures rise. To keep them cool and maintain optimal performance, you need a reliable cooling system.
|
||||||
|
|
||||||
|
For laptops, the only choice is a fan-based cooling system. Laptops have built-in fans and copper pipes to dissipate heat. Many gaming laptops even have two separate fans: one for the CPU and another for the GPU.
|
||||||
|
|
||||||
|
For desktop computers, you have the option to install more efficient water cooling systems. These are highly effective but can be expensive. Or you can install more cooling fans to keep you components cool.
|
||||||
|
|
||||||
|
Keep in mind that dust can accumulate in fan-based cooling systems, leading to malfunctions. So periodically clean the dust to keep your cooling system running smoothly.
|
||||||
|
|
||||||
|
## Use MacBook to run LLMs
|
||||||
|
|
||||||
|
An Apple MacBook equipped with either the M1 or the newer M2 Pro/Max processor. These cutting-edge chips leverage Apple's innovative Unified Memory Architecture (UMA), which revolutionizes the way the CPU and GPU interact with memory resources. This advancement plays a pivotal role in enhancing the performance and capabilities of LLMs.
|
||||||
|
|
||||||
|
Unified Memory Architecture, as implemented in Apple's M1 and M2 series processors, facilitates seamless and efficient data access for both the CPU and GPU. Unlike traditional systems where data needs to be shuttled between various memory pools, UMA offers a unified and expansive memory pool that can be accessed by both processing units without unnecessary data transfers. This transformative approach significantly minimizes latency while concurrently boosting data access bandwidth, resulting in substantial improvements in both the speed and quality of outputs.
|
||||||
|

|
||||||
|
|
||||||
|
The M1 and M2 Pro/Max chips offer varying levels of unified memory bandwidth, further underscoring their prowess in handling data-intensive tasks like AI processing. The M1/M2 Pro chip boasts an impressive capacity of up to 200 GB/s of unified memory bandwidth, while the M1/M2 Max takes it a step further, supporting up to a staggering 400 GB/s of unified memory bandwidth. This means that regardless of the complexity and demands of the AI tasks at hand, these Apple laptops armed with M1 or M2 processors are well-equipped to handle them with unparalleled efficiency and speed.
|
||||||
|
|
||||||
|
## Calculating vRAM Requirements for an LLM
|
||||||
|
|
||||||
|
**For example:** Calculating the VRAM required to run a 13-billion-parameter Large Language Model (LLM) involves considering the model size, batch size, sequence length, token size, and any additional overhead. Here's how you can estimate the VRAM required for a 13B LLM:
|
||||||
|
|
||||||
|
1. **Model Size**: Find out the size of the 13B LLM in terms of the number of parameters. This information is typically provided in the model's documentation. A 13-billion-parameter model has 13,000,000,000 parameters.
|
||||||
|
2. **Batch Size**: Decide on the batch size you want to use during inference. The batch size represents how many input samples you process simultaneously. Smaller batch sizes require less VRAM.
|
||||||
|
3. **Sequence Length**: Determine the average length of the input text sequences you'll be working with. Sequence length can impact VRAM requirements; longer sequences need more memory.
|
||||||
|
4. **Token Size**: Understand the memory required to store one token in bytes. Most LLMs use 4 bytes per token.
|
||||||
|
5. **Overhead**: Consider any additional memory overhead for intermediate computations and framework requirements. Overhead can vary but should be estimated based on your specific setup.
|
||||||
|
|
||||||
|
Use the following formula to estimate the VRAM required:
|
||||||
|
|
||||||
|
**VRAM Required (in gigabytes)** = `Model Parameters x Token Size x Batch Size x Sequence Length + Overhead`
|
||||||
|
|
||||||
|
- **Model Parameters**: 13,000,000,000 parameters for a 13B LLM.
|
||||||
|
- **Token Size**: Usually 4 bytes per token.
|
||||||
|
- **Batch Size**: Choose your batch size.
|
||||||
|
- **Sequence Length**: The average length of input sequences.
|
||||||
|
- **Overhead**: Any additional VRAM required based on your setup.
|
||||||
|
|
||||||
|
Here's an example:
|
||||||
|
|
||||||
|
Suppose you want to run a 13B LLM with the following parameters:
|
||||||
|
|
||||||
|
- **Batch Size**: 4
|
||||||
|
- **Sequence Length**: 512 tokens
|
||||||
|
- **Token Size**: 4 bytes
|
||||||
|
- **Estimated Overhead**: 2 GB
|
||||||
|
|
||||||
|
VRAM Required (in gigabytes) = `(13,000,000,000 x 4 x 4 x 512) + 2`
|
||||||
|
|
||||||
|
VRAM Required (in gigabytes) = `(8,388,608,000) + 2,000`
|
||||||
|
|
||||||
|
VRAM Required (in gigabytes) ≈ `8,390,608,000 bytes`
|
||||||
|
|
||||||
|
To convert this to gigabytes, divide by `1,073,741,824 (1 GB)`
|
||||||
|
|
||||||
|
VRAM Required (in gigabytes) ≈ `8,390,608,000 / 1,073,741,824 ≈ 7.8 GB`
|
||||||
|
|
||||||
|
So, to run a 13-billion-parameter LLM with the specified parameters and overhead, you would need approximately 7.8 gigabytes of VRAM on your GPU. Make sure to have some additional VRAM for stable operation and consider testing the setup in practice to monitor VRAM usage accurately.
|
||||||
|
|
||||||
|
<!--
|
||||||
## Macbook 8GB RAM
|
## Macbook 8GB RAM
|
||||||
|
|
||||||
## Macbook 16GB RAM
|
## Macbook 16GB RAM -->
|
||||||
|
|||||||
@ -4,5 +4,65 @@ title: Recommended AI Hardware by Model
|
|||||||
|
|
||||||
## Codellama 34b
|
## Codellama 34b
|
||||||
|
|
||||||
## Falcon 180b
|
### System Requirements:
|
||||||
|
|
||||||
|
**For example**: If you want to use [Codellama 7B](https://huggingface.co/TheBloke/CodeLlama-7B-Instruct-GPTQ/tree/main) models on your own computer, you can take advantage of your GPU and run this with GPTQ file models.
|
||||||
|
|
||||||
|
GPTQ is a format that compresses the model parameters to 4-bit, which reduces the VRAM requirements significantly. You can use the [oobabooga webui](https://github.com/oobabooga/text-generation-webui) or [JanAI](https://jan.ai/), which are simple interfaces that let you interact with different LLMS on your browser. It is pretty easy to set up and run. You can install it on Windows or Linux. (linked it to our installation page)
|
||||||
|
|
||||||
|
**For 7B Parameter Models (4-bit Quantization)**
|
||||||
|
|
||||||
|
| Format | RAM Requirements | VRAM Requirements | Minimum recommended GPU |
|
||||||
|
| ------------------------------------------------ | -------------------- | ----------------- | ----------------------------------------- |
|
||||||
|
| GPTQ (GPU inference) | 6GB (Swap to Load\*) | 6GB | GTX 1660, 2060,RTX 3050, 3060 AMD 5700 XT |
|
||||||
|
| GGML / GGUF (CPU inference) | 4GB | 300MB | |
|
||||||
|
| Combination of GPTQ and GGML / GGUF (offloading) | 2GB | 2GB | |
|
||||||
|
|
||||||
|
**For 13B Parameter Models (4-bit Quantization)**
|
||||||
|
|
||||||
|
| Format | RAM Requirements | VRAM Requirements | Minimum recommended GPU |
|
||||||
|
| ------------------------------------------------ | --------------------- | ----------------- | -------------------------------------------------- |
|
||||||
|
| GPTQ (GPU inference) | 12GB (Swap to Load\*) | 10GB | |
|
||||||
|
| GGML / GGUF (CPU inference) | 8GB | 500MB | AMD 6900 XT, RTX 2060 12GB, 3060 12GB, 3080, A2000 |
|
||||||
|
| Combination of GPTQ and GGML / GGUF (offloading) | 10GB | 10GB | |
|
||||||
|
|
||||||
|
**For 34B Parameter Models (4-bit Quantization)**
|
||||||
|
|
||||||
|
| Format | RAM Requirements | VRAM Requirements | Minimum recommended GPU |
|
||||||
|
| ------------------------------------------------ | --------------------- | ----------------- | -------------------------------------------------------------------- |
|
||||||
|
| GPTQ (GPU inference) | 32GB (Swap to Load\*) | 20GB | |
|
||||||
|
| GGML / GGUF (CPU inference) | 20GB | 500MB | RTX 3080 20GB, A4500, A5000, 3090, 4090, 6000, Tesla V100, Tesla P40 |
|
||||||
|
| Combination of GPTQ and GGML / GGUF (offloading) | 10GB | 4GB | |
|
||||||
|
|
||||||
|
**For 7B Parameter Models (8-bit Quantization)**
|
||||||
|
|
||||||
|
| Format | RAM Requirements | VRAM Requirements | Minimum recommended GPU |
|
||||||
|
| ------------------------------------------------ | --------------------- | ----------------- | -------------------------------------- |
|
||||||
|
| GPTQ (GPU inference) | 24GB (Swap to Load\*) | 12GB | RTX 3080, RTX 3080 Ti, RTX 3090, A5000 |
|
||||||
|
| GGML / GGUF (CPU inference) | 16GB | 1GB | RTX 3060 12GB, RTX 3070, A2000 |
|
||||||
|
| Combination of GPTQ and GGML / GGUF (offloading) | 12GB | 4GB | RTX 3060, RTX 3060 Ti, A2000 |
|
||||||
|
|
||||||
|
**For 13B Parameter Models (8-bit Quantization)**
|
||||||
|
|
||||||
|
| Format | RAM Requirements | VRAM Requirements | Minimum recommended GPU |
|
||||||
|
| ------------------------------------------------ | --------------------- | ----------------- | --------------------------------- |
|
||||||
|
| GPTQ (GPU inference) | 36GB (Swap to Load\*) | 20GB | RTX 4090, A6000, A6000 Ti, A8000 |
|
||||||
|
| GGML / GGUF (CPU inference) | 24GB | 2GB | RTX 3080 20GB, RTX 3080 Ti, A5000 |
|
||||||
|
| Combination of GPTQ and GGML / GGUF (offloading) | 20GB | 8GB | RTX 3080, RTX 3080 Ti, A5000 |
|
||||||
|
|
||||||
|
**For 34B Parameter Models (8-bit Quantization)**
|
||||||
|
|
||||||
|
| Format | RAM Requirements | VRAM Requirements | Minimum recommended GPU |
|
||||||
|
| ------------------------------------------------ | --------------------- | ----------------- | -------------------------------- |
|
||||||
|
| GPTQ (GPU inference) | 64GB (Swap to Load\*) | 40GB | A8000, A8000 Ti, A9000 |
|
||||||
|
| GGML / GGUF (CPU inference) | 40GB | 2GB | RTX 4090, A6000, A6000 Ti, A8000 |
|
||||||
|
| Combination of GPTQ and GGML / GGUF (offloading) | 48GB | 20GB | RTX 4090, A6000, A6000 Ti, A8000 |
|
||||||
|
|
||||||
|
> :memo: **Note**: System RAM, not VRAM, required to load the model, in addition to having enough VRAM. Not required to run the model. You can use swap space if you do not have enough RAM.
|
||||||
|
|
||||||
|
### Performance Recommendations:
|
||||||
|
|
||||||
|
1. **Optimal Performance**: To achieve the best performance when working with CodeLlama models, consider investing in a high-end GPU such as NVIDIA's latest RTX 3090 or RTX 4090. For the largest models like the 65B and 70B, a dual GPU setup is recommended. Additionally, ensure your system boasts sufficient RAM, with a minimum of 16 GB, although 64 GB is ideal for seamless operation.
|
||||||
|
2. **Budget-Friendly Approach**: If budget constraints are a concern, focus on utilizing CodeLlama GGML/GGUF models that can comfortably fit within your system's available RAM. Keep in mind that while you can allocate some model weights to the system RAM to save GPU memory, this may result in a performance trade-off.
|
||||||
|
|
||||||
|
> :memo: **Note**: It's essential to note that these recommendations are guidelines, and the actual performance you experience will be influenced by various factors. These factors include the specific task you're performing, the implementation of the model, and the concurrent system processes. To optimize your setup, consider these recommendations as a starting point and adapt them to your unique requirements and constraints.
|
||||||
|
|||||||
@ -2,19 +2,21 @@
|
|||||||
title: Recommended AI Hardware by Use Case
|
title: Recommended AI Hardware by Use Case
|
||||||
---
|
---
|
||||||
|
|
||||||
## Personal Use
|
## Which AI Hardware to Choose Based on Your Use Case
|
||||||
|
|
||||||
### Entry-level Experimentation
|
Artificial intelligence (AI) is rapidly changing the world, and AI hardware is becoming increasingly important for businesses and individuals alike. Choosing the right hardware for your AI needs is crucial to get the best performance and results. Here are some tips for selecting AI hardware based on your specific use case and requirements.
|
||||||
|
|
||||||
### Personal Use
|
### Entry-level Experimentation:
|
||||||
|
|
||||||
- Macbook (16gb)
|
**Personal Use:**
|
||||||
- 3090
|
When venturing into the world of AI as an individual, your choice of hardware can significantly impact your experience. Here's a more detailed breakdown:
|
||||||
|
|
||||||
### Prosumer Use
|
- **Macbook (16GB):** A Macbook equipped with 16GB of RAM and either the M1 or the newer M2 Pro/Max processor is an excellent starting point for AI enthusiasts. These cutting-edge chips leverage Apple's innovative Unified Memory Architecture (UMA), which revolutionizes the way the CPU and GPU interact with memory resources. This advancement plays a pivotal role in enhancing the performance and capabilities of LLMs.
|
||||||
|
- **Nvidia GeForce RTX 3090:** This powerful graphics card is a solid alternative for AI beginners, offering exceptional performance for basic experiments.
|
||||||
|
|
||||||
- Apple Silicon
|
2. **Serious AI Work:**
|
||||||
- 2 x 3090 (48gb RAM)
|
|
||||||
|
- **2 x 3090 RTX Card (48GB RAM):** For those committed to more advanced AI projects, this configuration provides the necessary muscle. Its dual Nvidia GeForce RTX 3090 GPUs and ample RAM make it suitable for complex AI tasks and model training.
|
||||||
|
|
||||||
## Business Use
|
## Business Use
|
||||||
|
|
||||||
@ -25,7 +27,7 @@ Run a LLM trained on enterprise data (i.e. RAG)
|
|||||||
- Mac Studio M2 Ultra with 192GB unified memory
|
- Mac Studio M2 Ultra with 192GB unified memory
|
||||||
- Cannot train
|
- Cannot train
|
||||||
- RTX 6000
|
- RTX 6000
|
||||||
- Should we recommend 2 x 4090 instead?
|
- Should we recommend 2 x 4090 instead?
|
||||||
|
|
||||||
### For a 50-person Law Firm
|
### For a 50-person Law Firm
|
||||||
|
|
||||||
@ -59,4 +61,4 @@ Run Codellama with RAG on existing codebase
|
|||||||
### For a 10,000-person Enterprise
|
### For a 10,000-person Enterprise
|
||||||
|
|
||||||
- 8 x H100s
|
- 8 x H100s
|
||||||
- NVAIE with vGPUs
|
- NVAIE with vGPUs
|
||||||
|
|||||||
@ -1,3 +1,3 @@
|
|||||||
---
|
---
|
||||||
title: Self-hosted ChatGPT
|
title: Self-Hosted ChatGPT
|
||||||
---
|
---
|
||||||
@ -127,7 +127,7 @@ const config = {
|
|||||||
type: "docSidebar",
|
type: "docSidebar",
|
||||||
sidebarId: "solutionsSidebar",
|
sidebarId: "solutionsSidebar",
|
||||||
position: "left",
|
position: "left",
|
||||||
label: "Use Cases",
|
label: "Solutions",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: "docSidebar",
|
type: "docSidebar",
|
||||||
@ -175,7 +175,7 @@ const config = {
|
|||||||
to: "/platform",
|
to: "/platform",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: "Use Cases",
|
label: "Solutions",
|
||||||
to: "/solutions",
|
to: "/solutions",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
|||||||
983
docs/package-lock.json
generated
@ -14,14 +14,16 @@
|
|||||||
"write-heading-ids": "docusaurus write-heading-ids"
|
"write-heading-ids": "docusaurus write-heading-ids"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@docusaurus/core": "2.4.1",
|
"@docusaurus/core": "^2.4.3",
|
||||||
"@docusaurus/preset-classic": "2.4.1",
|
"@docusaurus/preset-classic": "^2.4.3",
|
||||||
"@docusaurus/theme-live-codeblock": "^2.4.1",
|
"@docusaurus/theme-live-codeblock": "^2.4.3",
|
||||||
"@headlessui/react": "^1.7.17",
|
"@headlessui/react": "^1.7.17",
|
||||||
"@heroicons/react": "^2.0.18",
|
"@heroicons/react": "^2.0.18",
|
||||||
"@mdx-js/react": "^1.6.22",
|
"@mdx-js/react": "^1.6.22",
|
||||||
"autoprefixer": "^10.4.16",
|
"autoprefixer": "^10.4.16",
|
||||||
|
"axios": "^1.5.1",
|
||||||
"clsx": "^1.2.1",
|
"clsx": "^1.2.1",
|
||||||
|
"js-yaml": "^4.1.0",
|
||||||
"postcss": "^8.4.30",
|
"postcss": "^8.4.30",
|
||||||
"prism-react-renderer": "^1.3.5",
|
"prism-react-renderer": "^1.3.5",
|
||||||
"react": "^17.0.2",
|
"react": "^17.0.2",
|
||||||
|
|||||||
236
docs/sidebars.js
@ -43,28 +43,28 @@ const sidebars = {
|
|||||||
|
|
||||||
// Note: Tab name is "Use Cases"
|
// Note: Tab name is "Use Cases"
|
||||||
solutionsSidebar: [
|
solutionsSidebar: [
|
||||||
"solutions/solutions",
|
// "solutions/solutions",
|
||||||
{
|
{
|
||||||
type: "category",
|
type: "category",
|
||||||
label: "Use cases",
|
label: "Solutions",
|
||||||
collapsible: true,
|
collapsible: true,
|
||||||
collapsed: false,
|
collapsed: false,
|
||||||
items: ["solutions/personal-ai", "solutions/self-hosted"],
|
items: ["solutions/self-hosted", "solutions/personal-ai"],
|
||||||
},
|
|
||||||
{
|
|
||||||
type: "category",
|
|
||||||
label: "Industries",
|
|
||||||
collapsible: true,
|
|
||||||
collapsed: false,
|
|
||||||
items: [
|
|
||||||
"solutions/industries/software",
|
|
||||||
"solutions/industries/education",
|
|
||||||
"solutions/industries/law",
|
|
||||||
"solutions/industries/public-sector",
|
|
||||||
"solutions/industries/finance",
|
|
||||||
"solutions/industries/healthcare",
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
|
// {
|
||||||
|
// type: "category",
|
||||||
|
// label: "Industries",
|
||||||
|
// collapsible: true,
|
||||||
|
// collapsed: false,
|
||||||
|
// items: [
|
||||||
|
// "solutions/industries/software",
|
||||||
|
// "solutions/industries/education",
|
||||||
|
// "solutions/industries/law",
|
||||||
|
// "solutions/industries/public-sector",
|
||||||
|
// "solutions/industries/finance",
|
||||||
|
// "solutions/industries/healthcare",
|
||||||
|
// ],
|
||||||
|
// },
|
||||||
],
|
],
|
||||||
|
|
||||||
docsSidebar: [
|
docsSidebar: [
|
||||||
@ -83,101 +83,101 @@ const sidebars = {
|
|||||||
],
|
],
|
||||||
|
|
||||||
hardwareSidebar: [
|
hardwareSidebar: [
|
||||||
{
|
// {
|
||||||
type: "category",
|
// type: "category",
|
||||||
label: "Overview",
|
// label: "Overview",
|
||||||
collapsible: true,
|
// collapsible: true,
|
||||||
collapsed: true,
|
// collapsed: true,
|
||||||
link: { type: "doc", id: "hardware/hardware" },
|
// link: { type: "doc", id: "hardware/hardware" },
|
||||||
items: [
|
// items: [
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "Cloud vs. Buy",
|
// label: "Cloud vs. Self-Hosting",
|
||||||
id: "hardware/overview/cloud-vs-buy",
|
// id: "hardware/overview/cloud-vs-self-hosting",
|
||||||
},
|
// },
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "CPUs vs. GPUs",
|
// label: "CPUs vs. GPUs",
|
||||||
id: "hardware/overview/cpu-vs-gpu",
|
// id: "hardware/overview/cpu-vs-gpu",
|
||||||
},
|
// },
|
||||||
],
|
// ],
|
||||||
},
|
// },
|
||||||
{
|
// {
|
||||||
type: "category",
|
// type: "category",
|
||||||
label: "Recommendations",
|
// label: "Recommendations",
|
||||||
collapsible: true,
|
// collapsible: true,
|
||||||
collapsed: false,
|
// collapsed: false,
|
||||||
items: [
|
// items: [
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "By Hardware",
|
// label: "By Hardware",
|
||||||
id: "hardware/recommendations/by-hardware",
|
// id: "hardware/recommendations/by-hardware",
|
||||||
},
|
// },
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "By Budget",
|
// label: "By Budget",
|
||||||
id: "hardware/recommendations/by-budget",
|
// id: "hardware/recommendations/by-budget",
|
||||||
},
|
// },
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "By Model",
|
// label: "By Model",
|
||||||
id: "hardware/recommendations/by-model",
|
// id: "hardware/recommendations/by-model",
|
||||||
},
|
// },
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "By Use Case",
|
// label: "By Use Case",
|
||||||
id: "hardware/recommendations/by-usecase",
|
// id: "hardware/recommendations/by-usecase",
|
||||||
},
|
// },
|
||||||
],
|
// ],
|
||||||
},
|
// },
|
||||||
{
|
// {
|
||||||
type: "category",
|
// type: "category",
|
||||||
label: "Anatomy of a Thinking Machine",
|
// label: "Anatomy of a Thinking Machine",
|
||||||
collapsible: true,
|
// collapsible: true,
|
||||||
collapsed: true,
|
// collapsed: true,
|
||||||
link: { type: "doc", id: "hardware/concepts/concepts" },
|
// link: { type: "doc", id: "hardware/concepts/concepts" },
|
||||||
items: [
|
// items: [
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "Chassis",
|
// label: "Chassis",
|
||||||
id: "hardware/concepts/chassis",
|
// id: "hardware/concepts/chassis",
|
||||||
},
|
// },
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "Motherboard",
|
// label: "Motherboard",
|
||||||
id: "hardware/concepts/motherboard",
|
// id: "hardware/concepts/motherboard",
|
||||||
},
|
// },
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "CPU and RAM",
|
// label: "CPU and RAM",
|
||||||
id: "hardware/concepts/cpu-and-ram",
|
// id: "hardware/concepts/cpu-and-ram",
|
||||||
},
|
// },
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "GPU and VRAM",
|
// label: "GPU and VRAM",
|
||||||
id: "hardware/concepts/gpu-and-vram",
|
// id: "hardware/concepts/gpu-and-vram",
|
||||||
},
|
// },
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "Storage",
|
// label: "Storage",
|
||||||
id: "hardware/concepts/storage",
|
// id: "hardware/concepts/storage",
|
||||||
},
|
// },
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "Network",
|
// label: "Network",
|
||||||
id: "hardware/concepts/network",
|
// id: "hardware/concepts/network",
|
||||||
},
|
// },
|
||||||
|
|
||||||
{
|
// {
|
||||||
type: "doc",
|
// type: "doc",
|
||||||
label: "Power Supply",
|
// label: "Power Supply",
|
||||||
id: "hardware/concepts/power",
|
// id: "hardware/concepts/power",
|
||||||
},
|
// },
|
||||||
],
|
// ],
|
||||||
},
|
// },
|
||||||
{
|
{
|
||||||
type: "category",
|
type: "category",
|
||||||
label: "Community Examples",
|
label: "Hardware Examples",
|
||||||
collapsible: true,
|
collapsible: true,
|
||||||
collapsed: true,
|
collapsed: true,
|
||||||
link: { type: "doc", id: "hardware/community" },
|
link: { type: "doc", id: "hardware/community" },
|
||||||
@ -194,7 +194,7 @@ const sidebars = {
|
|||||||
type: "category",
|
type: "category",
|
||||||
label: "About Jan",
|
label: "About Jan",
|
||||||
collapsible: true,
|
collapsible: true,
|
||||||
collapsed: false,
|
collapsed: true,
|
||||||
link: { type: "doc", id: "about/about" },
|
link: { type: "doc", id: "about/about" },
|
||||||
items: [
|
items: [
|
||||||
"about/roadmap",
|
"about/roadmap",
|
||||||
@ -203,14 +203,26 @@ const sidebars = {
|
|||||||
label: "Careers",
|
label: "Careers",
|
||||||
href: "https://janai.bamboohr.com/careers",
|
href: "https://janai.bamboohr.com/careers",
|
||||||
},
|
},
|
||||||
"about/brand-assets",
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "category",
|
||||||
|
label: "Events",
|
||||||
|
collapsible: true,
|
||||||
|
collapsed: true,
|
||||||
|
items: [
|
||||||
|
{
|
||||||
|
type: "doc",
|
||||||
|
label: "Ho Chi Minh City (Oct 2023)",
|
||||||
|
id: "events/hcmc-oct23",
|
||||||
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: "category",
|
type: "category",
|
||||||
label: "Company Handbook",
|
label: "Company Handbook",
|
||||||
collapsible: true,
|
collapsible: true,
|
||||||
collapsed: false,
|
collapsed: true,
|
||||||
link: { type: "doc", id: "handbook/handbook" },
|
link: { type: "doc", id: "handbook/handbook" },
|
||||||
items: ["handbook/remote-work"],
|
items: ["handbook/remote-work"],
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,28 +1,29 @@
|
|||||||
import React from "react";
|
import React, { useState, useEffect } from "react";
|
||||||
import { Fragment } from "react";
|
import { Fragment } from "react";
|
||||||
import { Menu, Transition } from "@headlessui/react";
|
import { Menu, Transition } from "@headlessui/react";
|
||||||
import { ChevronDownIcon } from "@heroicons/react/20/solid";
|
import { ChevronDownIcon } from "@heroicons/react/20/solid";
|
||||||
|
import axios from "axios";
|
||||||
|
|
||||||
const items = [
|
const systemsTemplate = [
|
||||||
{
|
{
|
||||||
name: "Download for Mac (M1/M2)",
|
name: "Download for Mac (M1/M2)",
|
||||||
href: "https://github.com/janhq/jan/releases/download/v0.1.2/Jan-0.1.2-arm64.dmg",
|
|
||||||
logo: require("@site/static/img/apple-logo-white.png").default,
|
logo: require("@site/static/img/apple-logo-white.png").default,
|
||||||
|
fileFormat: "{appname}-mac-arm64-{tag}.dmg",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Download for Mac (Intel)",
|
name: "Download for Mac (Intel)",
|
||||||
href: "https://github.com/janhq/jan/releases/download/v0.1.2/Jan-0.1.2-arm64.dmg",
|
|
||||||
logo: require("@site/static/img/apple-logo-white.png").default,
|
logo: require("@site/static/img/apple-logo-white.png").default,
|
||||||
|
fileFormat: "{appname}-mac-x64-{tag}.dmg",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Download for Windows",
|
name: "Download for Windows",
|
||||||
href: "https://static.vecteezy.com/system/resources/previews/004/243/615/non_2x/creative-coming-soon-teaser-background-free-vector.jpg",
|
|
||||||
logo: require("@site/static/img/windows-logo-white.png").default,
|
logo: require("@site/static/img/windows-logo-white.png").default,
|
||||||
|
fileFormat: "{appname}-win-x64-{tag}.exe",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Download for Linux",
|
name: "Download for Linux",
|
||||||
href: "https://static.vecteezy.com/system/resources/previews/004/243/615/non_2x/creative-coming-soon-teaser-background-free-vector.jpg",
|
|
||||||
logo: require("@site/static/img/linux-logo-white.png").default,
|
logo: require("@site/static/img/linux-logo-white.png").default,
|
||||||
|
fileFormat: "{appname}-linux-amd64-{tag}.deb",
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -31,22 +32,81 @@ function classNames(...classes) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export default function Dropdown() {
|
export default function Dropdown() {
|
||||||
|
const [systems, setSystems] = useState(systemsTemplate);
|
||||||
|
const [defaultSystem, setDefaultSystem] = useState(systems[0]);
|
||||||
|
|
||||||
|
const getLatestReleaseInfo = async (repoOwner, repoName) => {
|
||||||
|
const url = `https://api.github.com/repos/${repoOwner}/${repoName}/releases/latest`;
|
||||||
|
try {
|
||||||
|
const response = await axios.get(url);
|
||||||
|
return response.data;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const extractAppName = (fileName) => {
|
||||||
|
// Extract appname using a regex that matches the provided file formats
|
||||||
|
const regex = /^(.*?)-(?:mac|win|linux)-(?:arm64|x64|amd64)-.*$/;
|
||||||
|
const match = fileName.match(regex);
|
||||||
|
return match ? match[1] : null;
|
||||||
|
};
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const updateDownloadLinks = async () => {
|
||||||
|
try {
|
||||||
|
const releaseInfo = await getLatestReleaseInfo("janhq", "jan");
|
||||||
|
|
||||||
|
// Extract appname from the first asset name
|
||||||
|
const firstAssetName = releaseInfo.assets[0].name;
|
||||||
|
const appname = extractAppName(firstAssetName);
|
||||||
|
|
||||||
|
if (!appname) {
|
||||||
|
console.error("Failed to extract appname from file name:", firstAssetName);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove 'v' at the start of the tag_name
|
||||||
|
const tag = releaseInfo.tag_name.startsWith("v")
|
||||||
|
? releaseInfo.tag_name.substring(1)
|
||||||
|
: releaseInfo.tag_name;
|
||||||
|
|
||||||
|
const updatedSystems = systems.map((system) => {
|
||||||
|
const downloadUrl = system.fileFormat
|
||||||
|
.replace("{appname}", appname)
|
||||||
|
.replace("{tag}", tag);
|
||||||
|
return {
|
||||||
|
...system,
|
||||||
|
href: `https://github.com/janhq/jan/releases/download/${releaseInfo.tag_name}/${downloadUrl}`,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
setSystems(updatedSystems);
|
||||||
|
setDefaultSystem(updatedSystems[0]);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to update download links:", error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
updateDownloadLinks();
|
||||||
|
}, []);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="inline-flex align-items-stretch">
|
<div className="inline-flex align-items-stretch">
|
||||||
{/* TODO dynamically detect users OS through browser */}
|
|
||||||
<a
|
<a
|
||||||
className="cursor-pointer relative inline-flex items-center rounded-l-md border-0 px-3.5 py-2.5 text-base font-semibold text-white bg-indigo-600 dark:bg-indigo-500 hover:bg-indigo-500 dark:hover:bg-indigo-400 hover:text-white"
|
className="cursor-pointer relative inline-flex items-center rounded-l-md border-0 px-3.5 py-2.5 text-base font-semibold text-white bg-blue-600 dark:bg-blue-500 hover:bg-blue-500 dark:hover:bg-blue-400 hover:text-white"
|
||||||
href={items[0].href}
|
href={defaultSystem.href}
|
||||||
>
|
>
|
||||||
<img
|
<img
|
||||||
src={require("@site/static/img/apple-logo-white.png").default}
|
src={require("@site/static/img/apple-logo-white.png").default}
|
||||||
alt="Logo"
|
alt="Logo"
|
||||||
className="h-5 mr-3 -mt-1"
|
className="h-5 mr-3 -mt-1"
|
||||||
/>
|
/>
|
||||||
Download for Mac (Silicon)
|
{defaultSystem.name}
|
||||||
</a>
|
</a>
|
||||||
<Menu as="div" className="relative -ml-px block">
|
<Menu as="div" className="relative -ml-px block">
|
||||||
<Menu.Button className="cursor-pointer relative inline-flex items-center rounded-r-md border-0 border-l border-gray-300 active:border-l active:border-white h-full text-white bg-indigo-600 dark:bg-indigo-500 hover:bg-indigo-500 dark:hover:bg-indigo-400">
|
<Menu.Button className="cursor-pointer relative inline-flex items-center rounded-r-md border-0 border-l border-gray-300 active:border-l active:border-white h-full text-white bg-blue-600 dark:bg-blue-500 hover:bg-blue-500 dark:hover:bg-blue-400">
|
||||||
<span className="sr-only">Open OS options</span>
|
<span className="sr-only">Open OS options</span>
|
||||||
<ChevronDownIcon className="h-5 w-5" aria-hidden="true" />
|
<ChevronDownIcon className="h-5 w-5" aria-hidden="true" />
|
||||||
</Menu.Button>
|
</Menu.Button>
|
||||||
@ -59,26 +119,26 @@ export default function Dropdown() {
|
|||||||
leaveFrom="transform opacity-100 scale-100"
|
leaveFrom="transform opacity-100 scale-100"
|
||||||
leaveTo="transform opacity-0 scale-95"
|
leaveTo="transform opacity-0 scale-95"
|
||||||
>
|
>
|
||||||
<Menu.Items className="absolute right-0 z-10 mt-2 w-72 text-left origin-top-right rounded-md bg-indigo-600 dark:bg-indigo-500 shadow-lg ring-1 ring-black ring-opacity-5 focus:outline-none">
|
<Menu.Items className="absolute right-0 z-10 mt-2 w-72 text-left origin-top-right rounded-md bg-blue-600 dark:bg-blue-500 shadow-lg ring-1 ring-black ring-opacity-5 focus:outline-none">
|
||||||
<div className="py-1">
|
<div className="py-1">
|
||||||
{items.map((item) => (
|
{systems.map((system) => (
|
||||||
<Menu.Item key={item.name}>
|
<Menu.Item key={system.name}>
|
||||||
{({ active }) => (
|
{({ active }) => (
|
||||||
<a
|
<a
|
||||||
href={item.href}
|
href={system.href}
|
||||||
className={classNames(
|
className={classNames(
|
||||||
active
|
active
|
||||||
? "bg-indigo-500 dark:hover:bg-indigo-400 hover:text-white"
|
? "bg-blue-500 dark:hover:bg-blue-400 hover:text-white"
|
||||||
: "text-white",
|
: "text-white",
|
||||||
"block px-4 py-2"
|
"block px-4 py-2"
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
<img
|
<img
|
||||||
src={item.logo}
|
src={system.logo}
|
||||||
alt="Logo"
|
alt="Logo"
|
||||||
className="w-3 mr-3 -mt-1"
|
className="w-3 mr-3 -mt-1"
|
||||||
/>
|
/>
|
||||||
{item.name}
|
{system.name}
|
||||||
</a>
|
</a>
|
||||||
)}
|
)}
|
||||||
</Menu.Item>
|
</Menu.Item>
|
||||||
|
|||||||
@ -5,7 +5,7 @@ import {
|
|||||||
LockClosedIcon,
|
LockClosedIcon,
|
||||||
} from "@heroicons/react/20/solid";
|
} from "@heroicons/react/20/solid";
|
||||||
|
|
||||||
const features = [
|
const systems = [
|
||||||
{
|
{
|
||||||
name: "Mac",
|
name: "Mac",
|
||||||
description:
|
description:
|
||||||
@ -47,20 +47,20 @@ export default function HomepageDownloads() {
|
|||||||
</div>
|
</div>
|
||||||
<div className="mx-auto mt-16 max-w-2xl sm:mt-20 lg:mt-24 lg:max-w-none">
|
<div className="mx-auto mt-16 max-w-2xl sm:mt-20 lg:mt-24 lg:max-w-none">
|
||||||
<dl className="grid max-w-xl grid-cols-1 gap-x-8 gap-y-16 lg:max-w-none lg:grid-cols-3">
|
<dl className="grid max-w-xl grid-cols-1 gap-x-8 gap-y-16 lg:max-w-none lg:grid-cols-3">
|
||||||
{features.map((feature) => (
|
{systems.map((system) => (
|
||||||
<div key={feature.name} className="flex flex-col">
|
<div key={system.name} className="flex flex-col">
|
||||||
<dt className="flex items-center gap-x-3 text-base font-semibold leading-7 text-gray-900 dark: text-white">
|
<dt className="flex items-center gap-x-3 text-base font-semibold leading-7 text-gray-900 dark: text-white">
|
||||||
<feature.icon
|
<system.icon
|
||||||
className="h-5 w-5 flex-none text-indigo-600 dark:text-indigo-400"
|
className="h-5 w-5 flex-none text-indigo-600 dark:text-indigo-400"
|
||||||
aria-hidden="true"
|
aria-hidden="true"
|
||||||
/>
|
/>
|
||||||
{feature.name}
|
{system.name}
|
||||||
</dt>
|
</dt>
|
||||||
<dd className="mt-4 flex flex-auto flex-col text-base leading-7 text-gray-600 dark:text-gray-300">
|
<dd className="mt-4 flex flex-auto flex-col text-base leading-7 text-gray-600 dark:text-gray-300">
|
||||||
<p className="flex-auto">{feature.description}</p>
|
<p className="flex-auto">{system.description}</p>
|
||||||
<p className="mt-6">
|
<p className="mt-6">
|
||||||
<a
|
<a
|
||||||
href={feature.href}
|
href={system.href}
|
||||||
className="text-sm font-semibold leading-6 text-indigo-600 dark:text-indigo-400"
|
className="text-sm font-semibold leading-6 text-indigo-600 dark:text-indigo-400"
|
||||||
>
|
>
|
||||||
Learn more <span aria-hidden="true">→</span>
|
Learn more <span aria-hidden="true">→</span>
|
||||||
|
|||||||
@ -8,7 +8,7 @@ export default function HomepageHero() {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="bg-white dark:bg-gray-900">
|
<div className="bg-white dark:bg-gray-900">
|
||||||
<div className="relative isolate pt-14">
|
<div className="relative isolate md:pt-14 pt-0">
|
||||||
{/* Background top gradient styling */}
|
{/* Background top gradient styling */}
|
||||||
{colorMode === "dark" ? (
|
{colorMode === "dark" ? (
|
||||||
<div
|
<div
|
||||||
@ -39,7 +39,7 @@ export default function HomepageHero() {
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Main hero block */}
|
{/* Main hero block */}
|
||||||
<div className="py-24 sm:py-32 lg:pb-40 animate-in fade-in zoom-in-50 duration-1000 ">
|
<div className="py-24 lg:pb-40 animate-in fade-in zoom-in-50 duration-1000 ">
|
||||||
<div className="mx-auto max-w-7xl px-6 lg:px-8">
|
<div className="mx-auto max-w-7xl px-6 lg:px-8">
|
||||||
{/* Hero text and buttons */}
|
{/* Hero text and buttons */}
|
||||||
<div className="mx-auto max-w-2xl text-center">
|
<div className="mx-auto max-w-2xl text-center">
|
||||||
@ -47,7 +47,7 @@ export default function HomepageHero() {
|
|||||||
Run your own AI
|
Run your own AI
|
||||||
</h1>
|
</h1>
|
||||||
<p className="mt-6 text-lg leading-8 text-gray-600 dark:text-gray-300">
|
<p className="mt-6 text-lg leading-8 text-gray-600 dark:text-gray-300">
|
||||||
Jan lets you run AI on your own hardware. 1-click to install the
|
Jan lets you run AI on your own hardware. 1-click to install the
|
||||||
latest open-source models. Monitor and manage software-hardware
|
latest open-source models. Monitor and manage software-hardware
|
||||||
performance.
|
performance.
|
||||||
<br></br>
|
<br></br>
|
||||||
@ -60,7 +60,7 @@ export default function HomepageHero() {
|
|||||||
<Dropdown />
|
<Dropdown />
|
||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
className="cursor-pointer relative inline-flex items-center rounded px-3.5 py-2 dark:py-2.5 text-base font-semibold text-indigo-600 bg-white border-indigo-600 dark:border-0 hover:bg-indigo-600 dark:hover:bg-indigo-500 hover:text-white"
|
className="cursor-pointer relative inline-flex items-center rounded px-3.5 py-2 dark:py-2.5 text-base font-semibold text-blue-600 bg-white border-blue-600 dark:border-0 hover:bg-blue-600 dark:hover:bg-blue-500 hover:text-white"
|
||||||
onClick={() =>
|
onClick={() =>
|
||||||
window.open(
|
window.open(
|
||||||
"https://github.com/janhq/jan",
|
"https://github.com/janhq/jan",
|
||||||
@ -79,14 +79,15 @@ export default function HomepageHero() {
|
|||||||
src={
|
src={
|
||||||
colorMode === "dark"
|
colorMode === "dark"
|
||||||
? // TODO replace with darkmode image
|
? // TODO replace with darkmode image
|
||||||
require("@site/static/img/desktop-llm-chat-dark.png").default
|
require("@site/static/img/desktop-llm-chat-dark.png")
|
||||||
: require("@site/static/img/desktop-llm-chat-light.png").default
|
.default
|
||||||
|
: require("@site/static/img/desktop-llm-chat-light.png")
|
||||||
|
.default
|
||||||
}
|
}
|
||||||
alt="App screenshot"
|
alt="App screenshot"
|
||||||
width={2432}
|
width={2432}
|
||||||
className="mt-16 rounded-lg md:rounded-2xl lg:rounded-3xl bg-white/5 shadow-2xl ring-1 ring-white/10 sm:mt-24"
|
className="mt-16 rounded-lg md:rounded-2xl lg:rounded-3xl bg-white/5 shadow-2xl ring-1 ring-white/10 sm:mt-24"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{/* Background top gradient styling */}
|
{/* Background top gradient styling */}
|
||||||
|
|||||||
@ -25,7 +25,7 @@
|
|||||||
|
|
||||||
/* For readability concerns, you should choose a lighter palette in dark mode. */
|
/* For readability concerns, you should choose a lighter palette in dark mode. */
|
||||||
[data-theme="dark"] {
|
[data-theme="dark"] {
|
||||||
--ifm-color-primary: #2563EB; /* New Primary Blue */
|
--ifm-color-primary: #ffffff; /* New Primary Blue */
|
||||||
--ifm-color-primary-dark: #204FCF; /* Darker Blue */
|
--ifm-color-primary-dark: #204FCF; /* Darker Blue */
|
||||||
--ifm-color-primary-darker: #1B45B7; /* Even Darker Blue */
|
--ifm-color-primary-darker: #1B45B7; /* Even Darker Blue */
|
||||||
--ifm-color-primary-darkest: #163C9D; /* Darkest Blue */
|
--ifm-color-primary-darkest: #163C9D; /* Darkest Blue */
|
||||||
@ -34,3 +34,13 @@
|
|||||||
--ifm-color-primary-lightest: #3A8BFF; /* Lightest Blue */
|
--ifm-color-primary-lightest: #3A8BFF; /* Lightest Blue */
|
||||||
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);
|
--docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.footer.footer--dark {
|
||||||
|
--ifm-footer-background-color: #1a212f;
|
||||||
|
--ifm-footer-color: var(--ifm-footer-link-color);
|
||||||
|
--ifm-footer-link-color: var(--ifm-color-secondary);
|
||||||
|
--ifm-footer-title-color: var(--ifm-color-white);
|
||||||
|
|
||||||
|
background-color: var(--ifm-footer-background-color);
|
||||||
|
color: var(--ifm-footer-color)
|
||||||
|
}
|
||||||
BIN
docs/static/img/2x4090-workstation.png
vendored
Normal file
|
After Width: | Height: | Size: 1.1 MiB |
BIN
docs/static/img/hcmc-villa-1.jpeg
vendored
Normal file
|
After Width: | Height: | Size: 228 KiB |
BIN
docs/static/img/hcmc-villa-2.jpeg
vendored
Normal file
|
After Width: | Height: | Size: 332 KiB |
1178
docs/yarn.lock
@ -95,6 +95,7 @@ const createConversation = (conversation: any) =>
|
|||||||
resolve(undefined);
|
resolve(undefined);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const createMessage = (message: any) =>
|
const createMessage = (message: any) =>
|
||||||
new Promise((resolve) => {
|
new Promise((resolve) => {
|
||||||
if (window && window.electronAPI) {
|
if (window && window.electronAPI) {
|
||||||
|
|||||||
@ -5,54 +5,78 @@ const { app } = require("electron");
|
|||||||
const MODEL_TABLE_CREATION = `
|
const MODEL_TABLE_CREATION = `
|
||||||
CREATE TABLE IF NOT EXISTS models (
|
CREATE TABLE IF NOT EXISTS models (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
slug TEXT NOT NULL,
|
|
||||||
name TEXT NOT NULL,
|
name TEXT NOT NULL,
|
||||||
description TEXT NOT NULL,
|
short_description TEXT NOT NULL,
|
||||||
avatar_url TEXT,
|
avatar_url TEXT,
|
||||||
long_description TEXT NOT NULL,
|
long_description TEXT NOT NULL,
|
||||||
technical_description TEXT NOT NULL,
|
|
||||||
author TEXT NOT NULL,
|
author TEXT NOT NULL,
|
||||||
version TEXT NOT NULL,
|
version TEXT NOT NULL,
|
||||||
model_url TEXT NOT NULL,
|
model_url TEXT NOT NULL,
|
||||||
nsfw INTEGER NOT NULL,
|
nsfw INTEGER NOT NULL,
|
||||||
greeting TEXT NOT NULL,
|
tags TEXT NOT NULL,
|
||||||
|
default_greeting TEXT NOT NULL,
|
||||||
type TEXT NOT NULL,
|
type TEXT NOT NULL,
|
||||||
file_name TEXT NOT NULL,
|
|
||||||
download_url TEXT NOT NULL,
|
|
||||||
start_download_at INTEGER DEFAULT -1,
|
|
||||||
finish_download_at INTEGER DEFAULT -1,
|
|
||||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||||
);`;
|
);`;
|
||||||
|
|
||||||
|
const MODEL_VERSION_TABLE_CREATION = `
|
||||||
|
CREATE TABLE IF NOT EXISTS model_versions (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
quant_method TEXT NOT NULL,
|
||||||
|
bits INTEGER NOT NULL,
|
||||||
|
size INTEGER NOT NULL,
|
||||||
|
max_ram_required INTEGER NOT NULL,
|
||||||
|
usecase TEXT NOT NULL,
|
||||||
|
download_link TEXT NOT NULL,
|
||||||
|
model_id TEXT NOT NULL,
|
||||||
|
start_download_at INTEGER DEFAULT -1,
|
||||||
|
finish_download_at INTEGER DEFAULT -1,
|
||||||
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||||
|
);`;
|
||||||
|
|
||||||
const MODEL_TABLE_INSERTION = `
|
const MODEL_TABLE_INSERTION = `
|
||||||
INSERT INTO models (
|
INSERT OR IGNORE INTO models (
|
||||||
id,
|
id,
|
||||||
slug,
|
|
||||||
name,
|
name,
|
||||||
description,
|
short_description,
|
||||||
avatar_url,
|
avatar_url,
|
||||||
long_description,
|
long_description,
|
||||||
technical_description,
|
|
||||||
author,
|
author,
|
||||||
version,
|
version,
|
||||||
model_url,
|
model_url,
|
||||||
nsfw,
|
nsfw,
|
||||||
greeting,
|
tags,
|
||||||
type,
|
default_greeting,
|
||||||
file_name,
|
type
|
||||||
download_url,
|
) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)`;
|
||||||
|
|
||||||
|
const MODEL_VERSION_TABLE_INSERTION = `
|
||||||
|
INSERT INTO model_versions (
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
quant_method,
|
||||||
|
bits,
|
||||||
|
size,
|
||||||
|
max_ram_required,
|
||||||
|
usecase,
|
||||||
|
download_link,
|
||||||
|
model_id,
|
||||||
start_download_at
|
start_download_at
|
||||||
) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)`;
|
) VALUES (?,?,?,?,?,?,?,?,?,?)`;
|
||||||
|
|
||||||
|
const getDbPath = () => {
|
||||||
|
return path.join(app.getPath("userData"), "jan.db");
|
||||||
|
};
|
||||||
|
|
||||||
function init() {
|
function init() {
|
||||||
const db = new sqlite3.Database(path.join(app.getPath("userData"), "jan.db"));
|
const db = new sqlite3.Database(getDbPath());
|
||||||
console.log(
|
console.debug(`Database located at ${getDbPath()}`);
|
||||||
`Database located at ${path.join(app.getPath("userData"), "jan.db")}`
|
|
||||||
);
|
|
||||||
|
|
||||||
db.serialize(() => {
|
db.serialize(() => {
|
||||||
db.run(MODEL_TABLE_CREATION);
|
db.run(MODEL_TABLE_CREATION);
|
||||||
|
db.run(MODEL_VERSION_TABLE_CREATION);
|
||||||
db.run(
|
db.run(
|
||||||
"CREATE TABLE IF NOT EXISTS conversations ( id INTEGER PRIMARY KEY, name TEXT, model_id TEXT, image TEXT, message TEXT, created_at DATETIME DEFAULT CURRENT_TIMESTAMP, updated_at DATETIME DEFAULT CURRENT_TIMESTAMP);"
|
"CREATE TABLE IF NOT EXISTS conversations ( id INTEGER PRIMARY KEY, name TEXT, model_id TEXT, image TEXT, message TEXT, created_at DATETIME DEFAULT CURRENT_TIMESTAMP, updated_at DATETIME DEFAULT CURRENT_TIMESTAMP);"
|
||||||
);
|
);
|
||||||
@ -61,60 +85,60 @@ function init() {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
const stmt = db.prepare(
|
|
||||||
"INSERT INTO conversations (name, model_id, image, message) VALUES (?, ?, ?, ?)"
|
|
||||||
);
|
|
||||||
stmt.finalize();
|
|
||||||
db.close();
|
db.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Store a model in the database when user start downloading it
|
* Store a model in the database when user start downloading it
|
||||||
*
|
*
|
||||||
* @param model Product
|
* @param params: { model, modelVersion }
|
||||||
*/
|
*/
|
||||||
function storeModel(model: any) {
|
function storeModel(params: any) {
|
||||||
return new Promise((res) => {
|
return new Promise((res) => {
|
||||||
const db = new sqlite3.Database(
|
const db = new sqlite3.Database(getDbPath());
|
||||||
path.join(app.getPath("userData"), "jan.db")
|
console.debug("Inserting", JSON.stringify(params));
|
||||||
);
|
|
||||||
console.debug("Inserting", JSON.stringify(model));
|
const model = params.model;
|
||||||
|
const modelTags = model.tags.join(",");
|
||||||
|
const modelVersion = params.modelVersion;
|
||||||
|
|
||||||
db.serialize(() => {
|
db.serialize(() => {
|
||||||
const stmt = db.prepare(MODEL_TABLE_INSERTION);
|
const stmt = db.prepare(MODEL_TABLE_INSERTION);
|
||||||
stmt.run(
|
stmt.run(
|
||||||
model.id,
|
model.id,
|
||||||
model.slug,
|
|
||||||
model.name,
|
model.name,
|
||||||
model.description,
|
model.shortDescription,
|
||||||
model.avatarUrl,
|
model.avatarUrl,
|
||||||
model.longDescription,
|
model.longDescription,
|
||||||
model.technicalDescription,
|
|
||||||
model.author,
|
model.author,
|
||||||
model.version,
|
model.version,
|
||||||
model.modelUrl,
|
model.modelUrl,
|
||||||
model.nsfw,
|
model.nsfw,
|
||||||
|
modelTags,
|
||||||
model.greeting,
|
model.greeting,
|
||||||
model.type,
|
model.type
|
||||||
model.fileName,
|
|
||||||
model.downloadUrl,
|
|
||||||
Date.now(),
|
|
||||||
function (err: any) {
|
|
||||||
if (err) {
|
|
||||||
// Handle the insertion error here
|
|
||||||
console.error(err.message);
|
|
||||||
res(undefined);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// @ts-ignoreF
|
|
||||||
const id = this.lastID;
|
|
||||||
res(id);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
stmt.finalize();
|
stmt.finalize();
|
||||||
|
|
||||||
|
const stmt2 = db.prepare(MODEL_VERSION_TABLE_INSERTION);
|
||||||
|
stmt2.run(
|
||||||
|
modelVersion.id,
|
||||||
|
modelVersion.name,
|
||||||
|
modelVersion.quantMethod,
|
||||||
|
modelVersion.bits,
|
||||||
|
modelVersion.size,
|
||||||
|
modelVersion.maxRamRequired,
|
||||||
|
modelVersion.usecase,
|
||||||
|
modelVersion.downloadLink,
|
||||||
|
model.id,
|
||||||
|
modelVersion.startDownloadAt
|
||||||
|
);
|
||||||
|
|
||||||
|
stmt2.finalize();
|
||||||
});
|
});
|
||||||
|
|
||||||
db.close();
|
db.close();
|
||||||
|
res(undefined);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -123,16 +147,18 @@ function storeModel(model: any) {
|
|||||||
*
|
*
|
||||||
* @param model Product
|
* @param model Product
|
||||||
*/
|
*/
|
||||||
function updateFinishedDownloadAt(fileName: string, time: number) {
|
function updateFinishedDownloadAt(modelVersionId: string) {
|
||||||
return new Promise((res) => {
|
return new Promise((res, rej) => {
|
||||||
const db = new sqlite3.Database(
|
const db = new sqlite3.Database(getDbPath());
|
||||||
path.join(app.getPath("userData"), "jan.db")
|
const time = Date.now();
|
||||||
|
console.debug(
|
||||||
|
`Updating finished downloaded model version ${modelVersionId}`
|
||||||
);
|
);
|
||||||
console.debug(`Updating fileName ${fileName} to ${time}`);
|
const stmt = `UPDATE model_versions SET finish_download_at = ? WHERE id = ?`;
|
||||||
const stmt = `UPDATE models SET finish_download_at = ? WHERE file_name = ?`;
|
db.run(stmt, [time, modelVersionId], (err: any) => {
|
||||||
db.run(stmt, [time, fileName], (err: any) => {
|
|
||||||
if (err) {
|
if (err) {
|
||||||
console.log(err);
|
console.log(err);
|
||||||
|
rej(err);
|
||||||
} else {
|
} else {
|
||||||
console.log("Updated 1 row");
|
console.log("Updated 1 row");
|
||||||
res("Updated");
|
res("Updated");
|
||||||
@ -148,40 +174,107 @@ function updateFinishedDownloadAt(fileName: string, time: number) {
|
|||||||
*/
|
*/
|
||||||
function getUnfinishedDownloadModels() {
|
function getUnfinishedDownloadModels() {
|
||||||
return new Promise((res) => {
|
return new Promise((res) => {
|
||||||
const db = new sqlite3.Database(
|
const db = new sqlite3.Database(getDbPath());
|
||||||
path.join(app.getPath("userData"), "jan.db")
|
|
||||||
);
|
|
||||||
|
|
||||||
const query = `SELECT * FROM models WHERE finish_download_at = -1 ORDER BY start_download_at DESC`;
|
const query = `SELECT * FROM model_versions WHERE finish_download_at = -1 ORDER BY start_download_at DESC`;
|
||||||
db.all(query, (err: Error, row: any) => {
|
db.all(query, (err: Error, row: any) => {
|
||||||
res(row);
|
if (row) {
|
||||||
|
res(row);
|
||||||
|
} else {
|
||||||
|
res([]);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
db.close();
|
db.close();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function getFinishedDownloadModels() {
|
async function getFinishedDownloadModels() {
|
||||||
return new Promise((res) => {
|
const db = new sqlite3.Database(getDbPath());
|
||||||
const db = new sqlite3.Database(
|
try {
|
||||||
path.join(app.getPath("userData"), "jan.db")
|
const query = `SELECT * FROM model_versions WHERE finish_download_at != -1 ORDER BY finish_download_at DESC`;
|
||||||
|
const modelVersions: any = await new Promise((resolve, reject) => {
|
||||||
|
db.all(query, (err: Error, rows: any[]) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve(rows);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const models = await Promise.all(
|
||||||
|
modelVersions.map(async (modelVersion) => {
|
||||||
|
const modelQuery = `SELECT * FROM models WHERE id = ?`;
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
db.get(
|
||||||
|
modelQuery,
|
||||||
|
[modelVersion.model_id],
|
||||||
|
(err: Error, row: any) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve(row);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
const query = `SELECT * FROM models WHERE finish_download_at != -1 ORDER BY finish_download_at DESC`;
|
const downloadedModels = [];
|
||||||
db.all(query, (err: Error, row: any) => {
|
modelVersions.forEach((modelVersion: any) => {
|
||||||
res(row.map((item: any) => parseToProduct(item)));
|
const model = models.find((m: any) => m.id === modelVersion.model_id);
|
||||||
|
|
||||||
|
if (!model) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const assistantModel = {
|
||||||
|
id: modelVersion.id,
|
||||||
|
name: modelVersion.name,
|
||||||
|
quantMethod: modelVersion.quant_method,
|
||||||
|
bits: modelVersion.bits,
|
||||||
|
size: modelVersion.size,
|
||||||
|
maxRamRequired: modelVersion.max_ram_required,
|
||||||
|
usecase: modelVersion.usecase,
|
||||||
|
downloadLink: modelVersion.download_link,
|
||||||
|
startDownloadAt: modelVersion.start_download_at,
|
||||||
|
finishDownloadAt: modelVersion.finish_download_at,
|
||||||
|
productId: model.id,
|
||||||
|
productName: model.name,
|
||||||
|
shortDescription: model.short_description,
|
||||||
|
longDescription: model.long_description,
|
||||||
|
avatarUrl: model.avatar_url,
|
||||||
|
author: model.author,
|
||||||
|
version: model.version,
|
||||||
|
modelUrl: model.model_url,
|
||||||
|
nsfw: model.nsfw === 0 ? false : true,
|
||||||
|
greeting: model.default_greeting,
|
||||||
|
type: model.type,
|
||||||
|
createdAt: new Date(model.created_at).getTime(),
|
||||||
|
updatedAt: new Date(model.updated_at ?? "").getTime(),
|
||||||
|
status: "",
|
||||||
|
releaseDate: -1,
|
||||||
|
tags: model.tags.split(","),
|
||||||
|
};
|
||||||
|
downloadedModels.push(assistantModel);
|
||||||
});
|
});
|
||||||
|
|
||||||
db.close();
|
db.close();
|
||||||
});
|
|
||||||
|
return downloadedModels;
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function deleteDownloadModel(modelId: string) {
|
function deleteDownloadModel(modelId: string) {
|
||||||
return new Promise((res) => {
|
return new Promise((res) => {
|
||||||
const db = new sqlite3.Database(
|
const db = new sqlite3.Database(getDbPath());
|
||||||
path.join(app.getPath("userData"), "jan.db")
|
console.debug(`Deleting ${modelId}`);
|
||||||
);
|
|
||||||
console.log(`Deleting ${modelId}`);
|
|
||||||
db.serialize(() => {
|
db.serialize(() => {
|
||||||
const stmt = db.prepare("DELETE FROM models WHERE id = ?");
|
const stmt = db.prepare("DELETE FROM model_versions WHERE id = ?");
|
||||||
stmt.run(modelId);
|
stmt.run(modelId);
|
||||||
stmt.finalize();
|
stmt.finalize();
|
||||||
res(modelId);
|
res(modelId);
|
||||||
@ -191,55 +284,76 @@ function deleteDownloadModel(modelId: string) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function getModelById(modelId: string) {
|
function fetchModelVersion(db: any, versionId: string) {
|
||||||
return new Promise((res) => {
|
return new Promise((resolve, reject) => {
|
||||||
const db = new sqlite3.Database(
|
|
||||||
path.join(app.getPath("userData"), "jan.db")
|
|
||||||
);
|
|
||||||
|
|
||||||
console.debug("Get model by id", modelId);
|
|
||||||
db.get(
|
db.get(
|
||||||
`SELECT * FROM models WHERE id = ?`,
|
"SELECT * FROM model_versions WHERE id = ?",
|
||||||
[modelId],
|
[versionId],
|
||||||
(err: any, row: any) => {
|
(err, row) => {
|
||||||
console.debug("Get model by id result", row);
|
if (err) {
|
||||||
|
reject(err);
|
||||||
if (row) {
|
} else {
|
||||||
const product = {
|
resolve(row);
|
||||||
id: row.id,
|
|
||||||
slug: row.slug,
|
|
||||||
name: row.name,
|
|
||||||
description: row.description,
|
|
||||||
avatarUrl: row.avatar_url,
|
|
||||||
longDescription: row.long_description,
|
|
||||||
technicalDescription: row.technical_description,
|
|
||||||
author: row.author,
|
|
||||||
version: row.version,
|
|
||||||
modelUrl: row.model_url,
|
|
||||||
nsfw: row.nsfw,
|
|
||||||
greeting: row.greeting,
|
|
||||||
type: row.type,
|
|
||||||
inputs: row.inputs,
|
|
||||||
outputs: row.outputs,
|
|
||||||
createdAt: new Date(row.created_at),
|
|
||||||
updatedAt: new Date(row.updated_at),
|
|
||||||
fileName: row.file_name,
|
|
||||||
downloadUrl: row.download_url,
|
|
||||||
};
|
|
||||||
res(product);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
db.close();
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function fetchModel(db: any, modelId: string) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
db.get("SELECT * FROM models WHERE id = ?", [modelId], (err, row) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve(row);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const getModelById = async (versionId: string): Promise<any | undefined> => {
|
||||||
|
const db = new sqlite3.Database(getDbPath());
|
||||||
|
const modelVersion: any | undefined = await fetchModelVersion(db, versionId);
|
||||||
|
if (!modelVersion) return undefined;
|
||||||
|
const model: any | undefined = await fetchModel(db, modelVersion.model_id);
|
||||||
|
if (!model) return undefined;
|
||||||
|
|
||||||
|
const assistantModel = {
|
||||||
|
id: modelVersion.id,
|
||||||
|
name: modelVersion.name,
|
||||||
|
quantMethod: modelVersion.quant_method,
|
||||||
|
bits: modelVersion.bits,
|
||||||
|
size: modelVersion.size,
|
||||||
|
maxRamRequired: modelVersion.max_ram_required,
|
||||||
|
usecase: modelVersion.usecase,
|
||||||
|
downloadLink: modelVersion.download_link,
|
||||||
|
startDownloadAt: modelVersion.start_download_at,
|
||||||
|
finishDownloadAt: modelVersion.finish_download_at,
|
||||||
|
productId: model.id,
|
||||||
|
productName: model.name,
|
||||||
|
shortDescription: model.short_description,
|
||||||
|
longDescription: model.long_description,
|
||||||
|
avatarUrl: model.avatar_url,
|
||||||
|
author: model.author,
|
||||||
|
version: model.version,
|
||||||
|
modelUrl: model.model_url,
|
||||||
|
nsfw: model.nsfw === 0 ? false : true,
|
||||||
|
greeting: model.default_greeting,
|
||||||
|
type: model.type,
|
||||||
|
createdAt: new Date(model.created_at).getTime(),
|
||||||
|
updatedAt: new Date(model.updated_at ?? "").getTime(),
|
||||||
|
status: "",
|
||||||
|
releaseDate: -1,
|
||||||
|
tags: model.tags.split(","),
|
||||||
|
};
|
||||||
|
|
||||||
|
return assistantModel;
|
||||||
|
};
|
||||||
|
|
||||||
function getConversations() {
|
function getConversations() {
|
||||||
return new Promise((res) => {
|
return new Promise((res) => {
|
||||||
const db = new sqlite3.Database(
|
const db = new sqlite3.Database(getDbPath());
|
||||||
path.join(app.getPath("userData"), "jan.db")
|
|
||||||
);
|
|
||||||
|
|
||||||
db.all(
|
db.all(
|
||||||
"SELECT * FROM conversations ORDER BY updated_at DESC",
|
"SELECT * FROM conversations ORDER BY updated_at DESC",
|
||||||
@ -250,11 +364,10 @@ function getConversations() {
|
|||||||
db.close();
|
db.close();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function storeConversation(conversation: any): Promise<number | undefined> {
|
function storeConversation(conversation: any): Promise<number | undefined> {
|
||||||
return new Promise((res) => {
|
return new Promise((res) => {
|
||||||
const db = new sqlite3.Database(
|
const db = new sqlite3.Database(getDbPath());
|
||||||
path.join(app.getPath("userData"), "jan.db")
|
|
||||||
);
|
|
||||||
|
|
||||||
db.serialize(() => {
|
db.serialize(() => {
|
||||||
const stmt = db.prepare(
|
const stmt = db.prepare(
|
||||||
@ -287,9 +400,7 @@ function storeConversation(conversation: any): Promise<number | undefined> {
|
|||||||
|
|
||||||
function storeMessage(message: any): Promise<number | undefined> {
|
function storeMessage(message: any): Promise<number | undefined> {
|
||||||
return new Promise((res) => {
|
return new Promise((res) => {
|
||||||
const db = new sqlite3.Database(
|
const db = new sqlite3.Database(getDbPath());
|
||||||
path.join(app.getPath("userData"), "jan.db")
|
|
||||||
);
|
|
||||||
|
|
||||||
db.serialize(() => {
|
db.serialize(() => {
|
||||||
const stmt = db.prepare(
|
const stmt = db.prepare(
|
||||||
@ -319,11 +430,10 @@ function storeMessage(message: any): Promise<number | undefined> {
|
|||||||
db.close();
|
db.close();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function updateMessage(message: any): Promise<number | undefined> {
|
function updateMessage(message: any): Promise<number | undefined> {
|
||||||
return new Promise((res) => {
|
return new Promise((res) => {
|
||||||
const db = new sqlite3.Database(
|
const db = new sqlite3.Database(getDbPath());
|
||||||
path.join(app.getPath("userData"), "jan.db")
|
|
||||||
);
|
|
||||||
|
|
||||||
db.serialize(() => {
|
db.serialize(() => {
|
||||||
const stmt = db.prepare(
|
const stmt = db.prepare(
|
||||||
@ -340,9 +450,7 @@ function updateMessage(message: any): Promise<number | undefined> {
|
|||||||
|
|
||||||
function deleteConversation(id: any) {
|
function deleteConversation(id: any) {
|
||||||
return new Promise((res) => {
|
return new Promise((res) => {
|
||||||
const db = new sqlite3.Database(
|
const db = new sqlite3.Database(getDbPath());
|
||||||
path.join(app.getPath("userData"), "jan.db")
|
|
||||||
);
|
|
||||||
|
|
||||||
db.serialize(() => {
|
db.serialize(() => {
|
||||||
const deleteConv = db.prepare("DELETE FROM conversations WHERE id = ?");
|
const deleteConv = db.prepare("DELETE FROM conversations WHERE id = ?");
|
||||||
@ -362,9 +470,7 @@ function deleteConversation(id: any) {
|
|||||||
|
|
||||||
function getConversationMessages(conversation_id: any) {
|
function getConversationMessages(conversation_id: any) {
|
||||||
return new Promise((res) => {
|
return new Promise((res) => {
|
||||||
const db = new sqlite3.Database(
|
const db = new sqlite3.Database(getDbPath());
|
||||||
path.join(app.getPath("userData"), "jan.db")
|
|
||||||
);
|
|
||||||
|
|
||||||
const query = `SELECT * FROM messages WHERE conversation_id = ${conversation_id} ORDER BY id DESC`;
|
const query = `SELECT * FROM messages WHERE conversation_id = ${conversation_id} ORDER BY id DESC`;
|
||||||
db.all(query, (err: Error, row: any) => {
|
db.all(query, (err: Error, row: any) => {
|
||||||
@ -374,31 +480,6 @@ function getConversationMessages(conversation_id: any) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseToProduct(row: any) {
|
|
||||||
const product = {
|
|
||||||
id: row.id,
|
|
||||||
slug: row.slug,
|
|
||||||
name: row.name,
|
|
||||||
description: row.description,
|
|
||||||
avatarUrl: row.avatar_url,
|
|
||||||
longDescription: row.long_description,
|
|
||||||
technicalDescription: row.technical_description,
|
|
||||||
author: row.author,
|
|
||||||
version: row.version,
|
|
||||||
modelUrl: row.model_url,
|
|
||||||
nsfw: row.nsfw,
|
|
||||||
greeting: row.greeting,
|
|
||||||
type: row.type,
|
|
||||||
inputs: row.inputs,
|
|
||||||
outputs: row.outputs,
|
|
||||||
createdAt: new Date(row.created_at),
|
|
||||||
updatedAt: new Date(row.updated_at),
|
|
||||||
fileName: row.file_name,
|
|
||||||
downloadUrl: row.download_url,
|
|
||||||
};
|
|
||||||
return product;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
init,
|
init,
|
||||||
getConversations,
|
getConversations,
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "data-plugin",
|
"name": "data-plugin",
|
||||||
"version": "1.0.0",
|
"version": "1.0.2",
|
||||||
"description": "Jan Database Plugin efficiently stores conversation and model data using SQLite, providing accessible data management",
|
"description": "Jan Database Plugin efficiently stores conversation and model data using SQLite, providing accessible data management",
|
||||||
"icon": "https://raw.githubusercontent.com/tailwindlabs/heroicons/88e98b0c2b458553fbadccddc2d2f878edc0387b/src/20/solid/circle-stack.svg",
|
"icon": "https://raw.githubusercontent.com/tailwindlabs/heroicons/88e98b0c2b458553fbadccddc2d2f878edc0387b/src/20/solid/circle-stack.svg",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
|
|||||||
@ -9,14 +9,6 @@ const initModel = async (product) =>
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const dispose = async () =>
|
|
||||||
new Promise(async (resolve) => {
|
|
||||||
if (window.electronAPI) {
|
|
||||||
window.electronAPI
|
|
||||||
.invokePluginFunc(MODULE_PATH, "dispose")
|
|
||||||
.then((res) => resolve(res));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
const inferenceUrl = () => "http://localhost:3928/llama/chat_completion";
|
const inferenceUrl = () => "http://localhost:3928/llama/chat_completion";
|
||||||
|
|
||||||
const stopModel = () => {
|
const stopModel = () => {
|
||||||
@ -27,6 +19,5 @@ const stopModel = () => {
|
|||||||
export function init({ register }) {
|
export function init({ register }) {
|
||||||
register("initModel", "initModel", initModel);
|
register("initModel", "initModel", initModel);
|
||||||
register("inferenceUrl", "inferenceUrl", inferenceUrl);
|
register("inferenceUrl", "inferenceUrl", inferenceUrl);
|
||||||
register("dispose", "dispose", dispose);
|
|
||||||
register("stopModel", "stopModel", stopModel);
|
register("stopModel", "stopModel", stopModel);
|
||||||
}
|
}
|
||||||
@ -1,102 +0,0 @@
|
|||||||
const path = require("path");
|
|
||||||
const { app, dialog } = require("electron");
|
|
||||||
const { spawn } = require("child_process");
|
|
||||||
const fs = require("fs");
|
|
||||||
|
|
||||||
let subprocess = null;
|
|
||||||
|
|
||||||
async function initModel(product) {
|
|
||||||
// fileName fallback
|
|
||||||
if (!product.fileName) {
|
|
||||||
product.fileName = product.file_name;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!product.fileName) {
|
|
||||||
await dialog.showMessageBox({
|
|
||||||
message: "Selected model does not have file name..",
|
|
||||||
});
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (subprocess) {
|
|
||||||
console.error(
|
|
||||||
"A subprocess is already running. Attempt to kill then reinit."
|
|
||||||
);
|
|
||||||
dispose();
|
|
||||||
}
|
|
||||||
|
|
||||||
let binaryFolder = path.join(__dirname, "nitro"); // Current directory by default
|
|
||||||
|
|
||||||
// Read the existing config
|
|
||||||
const configFilePath = path.join(binaryFolder, "config", "config.json");
|
|
||||||
let config = {};
|
|
||||||
if (fs.existsSync(configFilePath)) {
|
|
||||||
const rawData = fs.readFileSync(configFilePath, "utf-8");
|
|
||||||
config = JSON.parse(rawData);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the llama_model_path
|
|
||||||
if (!config.custom_config) {
|
|
||||||
config.custom_config = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
const modelPath = path.join(app.getPath("userData"), product.fileName);
|
|
||||||
|
|
||||||
config.custom_config.llama_model_path = modelPath;
|
|
||||||
|
|
||||||
// Write the updated config back to the file
|
|
||||||
fs.writeFileSync(configFilePath, JSON.stringify(config, null, 4));
|
|
||||||
|
|
||||||
let binaryName;
|
|
||||||
|
|
||||||
if (process.platform === "win32") {
|
|
||||||
binaryName = "nitro_windows_amd64.exe";
|
|
||||||
} else if (process.platform === "darwin") { // Mac OS platform
|
|
||||||
binaryName = process.arch === "arm64" ? "nitro_mac_arm64" : "nitro_mac_amd64";
|
|
||||||
} else {
|
|
||||||
// Linux
|
|
||||||
binaryName = "nitro_linux_amd64_cuda"; // For other platforms
|
|
||||||
}
|
|
||||||
|
|
||||||
const binaryPath = path.join(binaryFolder, binaryName);
|
|
||||||
|
|
||||||
// Execute the binary
|
|
||||||
|
|
||||||
subprocess = spawn(binaryPath, [configFilePath], { cwd: binaryFolder });
|
|
||||||
|
|
||||||
// Handle subprocess output
|
|
||||||
subprocess.stdout.on("data", (data) => {
|
|
||||||
console.log(`stdout: ${data}`);
|
|
||||||
});
|
|
||||||
|
|
||||||
subprocess.stderr.on("data", (data) => {
|
|
||||||
console.error(`stderr: ${data}`);
|
|
||||||
});
|
|
||||||
|
|
||||||
subprocess.on("close", (code) => {
|
|
||||||
console.log(`child process exited with code ${code}`);
|
|
||||||
subprocess = null;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function dispose() {
|
|
||||||
killSubprocess();
|
|
||||||
// clean other registered resources here
|
|
||||||
}
|
|
||||||
|
|
||||||
function killSubprocess() {
|
|
||||||
if (subprocess) {
|
|
||||||
subprocess.kill();
|
|
||||||
subprocess = null;
|
|
||||||
console.log("Subprocess terminated.");
|
|
||||||
} else {
|
|
||||||
console.error("No subprocess is currently running.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
initModel,
|
|
||||||
killSubprocess,
|
|
||||||
dispose,
|
|
||||||
};
|
|
||||||
119
electron/core/plugins/inference-plugin/module.ts
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
const path = require("path");
|
||||||
|
const { app } = require("electron");
|
||||||
|
const { spawn } = require("child_process");
|
||||||
|
const fs = require("fs");
|
||||||
|
const tcpPortUsed = require("tcp-port-used");
|
||||||
|
const { killPortProcess } = require("kill-port-process");
|
||||||
|
|
||||||
|
let subprocess = null;
|
||||||
|
const PORT = 3928;
|
||||||
|
|
||||||
|
const initModel = (fileName) => {
|
||||||
|
return (
|
||||||
|
new Promise<void>(async (resolve, reject) => {
|
||||||
|
if (!fileName) {
|
||||||
|
reject("Model not found, please download again.");
|
||||||
|
}
|
||||||
|
if (subprocess) {
|
||||||
|
console.error(
|
||||||
|
"A subprocess is already running. Attempt to kill then reinit."
|
||||||
|
);
|
||||||
|
killSubprocess();
|
||||||
|
}
|
||||||
|
resolve(fileName);
|
||||||
|
})
|
||||||
|
// Kill port process if it is already in use
|
||||||
|
.then((fileName) =>
|
||||||
|
tcpPortUsed
|
||||||
|
.waitUntilFree(PORT, 200, 3000)
|
||||||
|
.catch(() => killPortProcess(PORT))
|
||||||
|
.then(() => fileName)
|
||||||
|
)
|
||||||
|
// Spawn Nitro subprocess to load model
|
||||||
|
.then(() => {
|
||||||
|
let binaryFolder = path.join(__dirname, "nitro"); // Current directory by default
|
||||||
|
|
||||||
|
// Read the existing config
|
||||||
|
const configFilePath = path.join(binaryFolder, "config", "config.json");
|
||||||
|
let config: any = {};
|
||||||
|
if (fs.existsSync(configFilePath)) {
|
||||||
|
const rawData = fs.readFileSync(configFilePath, "utf-8");
|
||||||
|
config = JSON.parse(rawData);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the llama_model_path
|
||||||
|
if (!config.custom_config) {
|
||||||
|
config.custom_config = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
const modelPath = path.join(app.getPath("userData"), fileName);
|
||||||
|
|
||||||
|
config.custom_config.llama_model_path = modelPath;
|
||||||
|
|
||||||
|
// Write the updated config back to the file
|
||||||
|
fs.writeFileSync(configFilePath, JSON.stringify(config, null, 4));
|
||||||
|
|
||||||
|
let binaryName;
|
||||||
|
|
||||||
|
if (process.platform === "win32") {
|
||||||
|
binaryName = "nitro_windows_amd64.exe";
|
||||||
|
} else if (process.platform === "darwin") {
|
||||||
|
// Mac OS platform
|
||||||
|
binaryName =
|
||||||
|
process.arch === "arm64" ? "nitro_mac_arm64" : "nitro_mac_amd64";
|
||||||
|
} else {
|
||||||
|
// Linux
|
||||||
|
binaryName = "nitro_linux_amd64_cuda"; // For other platforms
|
||||||
|
}
|
||||||
|
|
||||||
|
const binaryPath = path.join(binaryFolder, binaryName);
|
||||||
|
|
||||||
|
// Execute the binary
|
||||||
|
|
||||||
|
subprocess = spawn(binaryPath, [configFilePath], { cwd: binaryFolder });
|
||||||
|
|
||||||
|
// Handle subprocess output
|
||||||
|
subprocess.stdout.on("data", (data) => {
|
||||||
|
console.log(`stdout: ${data}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
subprocess.stderr.on("data", (data) => {
|
||||||
|
console.error(`stderr: ${data}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
subprocess.on("close", (code) => {
|
||||||
|
console.log(`child process exited with code ${code}`);
|
||||||
|
subprocess = null;
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.then(() => tcpPortUsed.waitUntilUsed(PORT, 300, 30000))
|
||||||
|
.then(() => {
|
||||||
|
return {};
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
return { error: err };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
function dispose() {
|
||||||
|
killSubprocess();
|
||||||
|
// clean other registered resources here
|
||||||
|
}
|
||||||
|
|
||||||
|
function killSubprocess() {
|
||||||
|
if (subprocess) {
|
||||||
|
subprocess.kill();
|
||||||
|
subprocess = null;
|
||||||
|
console.log("Subprocess terminated.");
|
||||||
|
} else {
|
||||||
|
killPortProcess(PORT);
|
||||||
|
console.error("No subprocess is currently running.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
initModel,
|
||||||
|
killSubprocess,
|
||||||
|
dispose,
|
||||||
|
};
|
||||||
@ -24,12 +24,59 @@ typedef struct {
|
|||||||
int8_t qs[QK8_0]; // quants
|
int8_t qs[QK8_0]; // quants
|
||||||
} block_q8_0;
|
} block_q8_0;
|
||||||
|
|
||||||
|
// general-purpose kernel for addition of two tensors
|
||||||
|
// pros: works for non-contiguous tensors, supports broadcast across dims 1, 2 and 3
|
||||||
|
// cons: not very efficient
|
||||||
kernel void kernel_add(
|
kernel void kernel_add(
|
||||||
device const float4 * src0,
|
device const char * src0,
|
||||||
device const float4 * src1,
|
device const char * src1,
|
||||||
device float4 * dst,
|
device char * dst,
|
||||||
uint tpig[[thread_position_in_grid]]) {
|
constant int64_t & ne00,
|
||||||
dst[tpig] = src0[tpig] + src1[tpig];
|
constant int64_t & ne01,
|
||||||
|
constant int64_t & ne02,
|
||||||
|
constant int64_t & ne03,
|
||||||
|
constant int64_t & nb00,
|
||||||
|
constant int64_t & nb01,
|
||||||
|
constant int64_t & nb02,
|
||||||
|
constant int64_t & nb03,
|
||||||
|
constant int64_t & ne10,
|
||||||
|
constant int64_t & ne11,
|
||||||
|
constant int64_t & ne12,
|
||||||
|
constant int64_t & ne13,
|
||||||
|
constant int64_t & nb10,
|
||||||
|
constant int64_t & nb11,
|
||||||
|
constant int64_t & nb12,
|
||||||
|
constant int64_t & nb13,
|
||||||
|
constant int64_t & ne0,
|
||||||
|
constant int64_t & ne1,
|
||||||
|
constant int64_t & ne2,
|
||||||
|
constant int64_t & ne3,
|
||||||
|
constant int64_t & nb0,
|
||||||
|
constant int64_t & nb1,
|
||||||
|
constant int64_t & nb2,
|
||||||
|
constant int64_t & nb3,
|
||||||
|
uint3 tgpig[[threadgroup_position_in_grid]],
|
||||||
|
uint3 tpitg[[thread_position_in_threadgroup]],
|
||||||
|
uint3 ntg[[threads_per_threadgroup]]) {
|
||||||
|
const int64_t i03 = tgpig.z;
|
||||||
|
const int64_t i02 = tgpig.y;
|
||||||
|
const int64_t i01 = tgpig.x;
|
||||||
|
|
||||||
|
const int64_t i13 = i03 % ne13;
|
||||||
|
const int64_t i12 = i02 % ne12;
|
||||||
|
const int64_t i11 = i01 % ne11;
|
||||||
|
|
||||||
|
device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01 + tpitg.x*nb00;
|
||||||
|
device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11 + tpitg.x*nb10;
|
||||||
|
device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1 + tpitg.x*nb0;
|
||||||
|
|
||||||
|
for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) {
|
||||||
|
((device float *)dst_ptr)[0] = ((device float *)src0_ptr)[0] + ((device float *)src1_ptr)[0];
|
||||||
|
|
||||||
|
src0_ptr += ntg.x*nb00;
|
||||||
|
src1_ptr += ntg.x*nb10;
|
||||||
|
dst_ptr += ntg.x*nb0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// assumption: src1 is a row
|
// assumption: src1 is a row
|
||||||
@ -38,7 +85,7 @@ kernel void kernel_add_row(
|
|||||||
device const float4 * src0,
|
device const float4 * src0,
|
||||||
device const float4 * src1,
|
device const float4 * src1,
|
||||||
device float4 * dst,
|
device float4 * dst,
|
||||||
constant int64_t & nb,
|
constant int64_t & nb [[buffer(27)]],
|
||||||
uint tpig[[thread_position_in_grid]]) {
|
uint tpig[[thread_position_in_grid]]) {
|
||||||
dst[tpig] = src0[tpig] + src1[tpig % nb];
|
dst[tpig] = src0[tpig] + src1[tpig % nb];
|
||||||
}
|
}
|
||||||
@ -783,7 +830,9 @@ kernel void kernel_alibi_f32(
|
|||||||
constant uint64_t & nb1,
|
constant uint64_t & nb1,
|
||||||
constant uint64_t & nb2,
|
constant uint64_t & nb2,
|
||||||
constant uint64_t & nb3,
|
constant uint64_t & nb3,
|
||||||
constant float & m0,
|
constant float & m0,
|
||||||
|
constant float & m1,
|
||||||
|
constant int & n_heads_log2_floor,
|
||||||
uint3 tgpig[[threadgroup_position_in_grid]],
|
uint3 tgpig[[threadgroup_position_in_grid]],
|
||||||
uint3 tpitg[[thread_position_in_threadgroup]],
|
uint3 tpitg[[thread_position_in_threadgroup]],
|
||||||
uint3 ntg[[threads_per_threadgroup]]) {
|
uint3 ntg[[threads_per_threadgroup]]) {
|
||||||
@ -799,37 +848,73 @@ kernel void kernel_alibi_f32(
|
|||||||
const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0);
|
const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0);
|
||||||
|
|
||||||
device float * dst_data = (device float *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0);
|
device float * dst_data = (device float *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0);
|
||||||
float m_k = pow(m0, i2 + 1);
|
float m_k;
|
||||||
|
if (i2 < n_heads_log2_floor) {
|
||||||
|
m_k = pow(m0, i2 + 1);
|
||||||
|
} else {
|
||||||
|
m_k = pow(m1, 2 * (i2 - n_heads_log2_floor) + 1);
|
||||||
|
}
|
||||||
for (int64_t i00 = tpitg.x; i00 < ne00; i00 += ntg.x) {
|
for (int64_t i00 = tpitg.x; i00 < ne00; i00 += ntg.x) {
|
||||||
device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00);
|
device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00);
|
||||||
dst_data[i00] = src[0] + m_k * (i00 - ne00 + 1);
|
dst_data[i00] = src[0] + m_k * (i00 - ne00 + 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
typedef void (rope_t)(
|
||||||
|
device const void * src0,
|
||||||
|
device const int32_t * src1,
|
||||||
|
device float * dst,
|
||||||
|
constant int64_t & ne00,
|
||||||
|
constant int64_t & ne01,
|
||||||
|
constant int64_t & ne02,
|
||||||
|
constant int64_t & ne03,
|
||||||
|
constant uint64_t & nb00,
|
||||||
|
constant uint64_t & nb01,
|
||||||
|
constant uint64_t & nb02,
|
||||||
|
constant uint64_t & nb03,
|
||||||
|
constant int64_t & ne0,
|
||||||
|
constant int64_t & ne1,
|
||||||
|
constant int64_t & ne2,
|
||||||
|
constant int64_t & ne3,
|
||||||
|
constant uint64_t & nb0,
|
||||||
|
constant uint64_t & nb1,
|
||||||
|
constant uint64_t & nb2,
|
||||||
|
constant uint64_t & nb3,
|
||||||
|
constant int & n_past,
|
||||||
|
constant int & n_dims,
|
||||||
|
constant int & mode,
|
||||||
|
constant float & freq_base,
|
||||||
|
constant float & freq_scale,
|
||||||
|
uint tiitg[[thread_index_in_threadgroup]],
|
||||||
|
uint3 tptg[[threads_per_threadgroup]],
|
||||||
|
uint3 tgpig[[threadgroup_position_in_grid]]);
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
kernel void kernel_rope(
|
kernel void kernel_rope(
|
||||||
device const void * src0,
|
device const void * src0,
|
||||||
device float * dst,
|
device const int32_t * src1,
|
||||||
constant int64_t & ne00,
|
device float * dst,
|
||||||
constant int64_t & ne01,
|
constant int64_t & ne00,
|
||||||
constant int64_t & ne02,
|
constant int64_t & ne01,
|
||||||
constant int64_t & ne03,
|
constant int64_t & ne02,
|
||||||
constant uint64_t & nb00,
|
constant int64_t & ne03,
|
||||||
constant uint64_t & nb01,
|
constant uint64_t & nb00,
|
||||||
constant uint64_t & nb02,
|
constant uint64_t & nb01,
|
||||||
constant uint64_t & nb03,
|
constant uint64_t & nb02,
|
||||||
constant int64_t & ne0,
|
constant uint64_t & nb03,
|
||||||
constant int64_t & ne1,
|
constant int64_t & ne0,
|
||||||
constant int64_t & ne2,
|
constant int64_t & ne1,
|
||||||
constant int64_t & ne3,
|
constant int64_t & ne2,
|
||||||
constant uint64_t & nb0,
|
constant int64_t & ne3,
|
||||||
constant uint64_t & nb1,
|
constant uint64_t & nb0,
|
||||||
constant uint64_t & nb2,
|
constant uint64_t & nb1,
|
||||||
constant uint64_t & nb3,
|
constant uint64_t & nb2,
|
||||||
constant int & n_past,
|
constant uint64_t & nb3,
|
||||||
constant int & n_dims,
|
constant int & n_past,
|
||||||
constant int & mode,
|
constant int & n_dims,
|
||||||
constant float & freq_base,
|
constant int & mode,
|
||||||
constant float & freq_scale,
|
constant float & freq_base,
|
||||||
|
constant float & freq_scale,
|
||||||
uint tiitg[[thread_index_in_threadgroup]],
|
uint tiitg[[thread_index_in_threadgroup]],
|
||||||
uint3 tptg[[threads_per_threadgroup]],
|
uint3 tptg[[threads_per_threadgroup]],
|
||||||
uint3 tgpig[[threadgroup_position_in_grid]]) {
|
uint3 tgpig[[threadgroup_position_in_grid]]) {
|
||||||
@ -839,7 +924,9 @@ kernel void kernel_rope(
|
|||||||
|
|
||||||
const bool is_neox = mode & 2;
|
const bool is_neox = mode & 2;
|
||||||
|
|
||||||
const int64_t p = ((mode & 1) == 0 ? n_past + i2 : i2);
|
device const int32_t * pos = src1;
|
||||||
|
|
||||||
|
const int64_t p = pos[i2];
|
||||||
|
|
||||||
const float theta_0 = freq_scale * (float)p;
|
const float theta_0 = freq_scale * (float)p;
|
||||||
const float inv_ndims = -1.f/n_dims;
|
const float inv_ndims = -1.f/n_dims;
|
||||||
@ -851,11 +938,11 @@ kernel void kernel_rope(
|
|||||||
const float cos_theta = cos(theta);
|
const float cos_theta = cos(theta);
|
||||||
const float sin_theta = sin(theta);
|
const float sin_theta = sin(theta);
|
||||||
|
|
||||||
device const float * const src = (device float *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00);
|
device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00);
|
||||||
device float * dst_data = (device float *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0);
|
device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0);
|
||||||
|
|
||||||
const float x0 = src[0];
|
const T x0 = src[0];
|
||||||
const float x1 = src[1];
|
const T x1 = src[1];
|
||||||
|
|
||||||
dst_data[0] = x0*cos_theta - x1*sin_theta;
|
dst_data[0] = x0*cos_theta - x1*sin_theta;
|
||||||
dst_data[1] = x0*sin_theta + x1*cos_theta;
|
dst_data[1] = x0*sin_theta + x1*cos_theta;
|
||||||
@ -870,8 +957,8 @@ kernel void kernel_rope(
|
|||||||
|
|
||||||
const int64_t i0 = ib*n_dims + ic/2;
|
const int64_t i0 = ib*n_dims + ic/2;
|
||||||
|
|
||||||
device const float * const src = (device float *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00);
|
device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00);
|
||||||
device float * dst_data = (device float *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0);
|
device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0);
|
||||||
|
|
||||||
const float x0 = src[0];
|
const float x0 = src[0];
|
||||||
const float x1 = src[n_dims/2];
|
const float x1 = src[n_dims/2];
|
||||||
@ -883,6 +970,9 @@ kernel void kernel_rope(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template [[host_name("kernel_rope_f32")]] kernel rope_t kernel_rope<float>;
|
||||||
|
template [[host_name("kernel_rope_f16")]] kernel rope_t kernel_rope<half>;
|
||||||
|
|
||||||
kernel void kernel_cpy_f16_f16(
|
kernel void kernel_cpy_f16_f16(
|
||||||
device const half * src0,
|
device const half * src0,
|
||||||
device half * dst,
|
device half * dst,
|
||||||
@ -1273,8 +1363,8 @@ kernel void kernel_mul_mat_q3_K_f32(
|
|||||||
|
|
||||||
float yl[32];
|
float yl[32];
|
||||||
|
|
||||||
const uint16_t kmask1 = 0x3030;
|
//const uint16_t kmask1 = 0x3030;
|
||||||
const uint16_t kmask2 = 0x0f0f;
|
//const uint16_t kmask2 = 0x0f0f;
|
||||||
|
|
||||||
const int tid = tiisg/4;
|
const int tid = tiisg/4;
|
||||||
const int ix = tiisg%4;
|
const int ix = tiisg%4;
|
||||||
@ -2350,4 +2440,4 @@ template [[host_name("kernel_mul_mm_q2_K_f32")]] kernel mat_mm_t kernel_mul_mm<b
|
|||||||
template [[host_name("kernel_mul_mm_q3_K_f32")]] kernel mat_mm_t kernel_mul_mm<block_q3_K, QK_NL, dequantize_q3_K>;
|
template [[host_name("kernel_mul_mm_q3_K_f32")]] kernel mat_mm_t kernel_mul_mm<block_q3_K, QK_NL, dequantize_q3_K>;
|
||||||
template [[host_name("kernel_mul_mm_q4_K_f32")]] kernel mat_mm_t kernel_mul_mm<block_q4_K, QK_NL, dequantize_q4_K>;
|
template [[host_name("kernel_mul_mm_q4_K_f32")]] kernel mat_mm_t kernel_mul_mm<block_q4_K, QK_NL, dequantize_q4_K>;
|
||||||
template [[host_name("kernel_mul_mm_q5_K_f32")]] kernel mat_mm_t kernel_mul_mm<block_q5_K, QK_NL, dequantize_q5_K>;
|
template [[host_name("kernel_mul_mm_q5_K_f32")]] kernel mat_mm_t kernel_mul_mm<block_q5_K, QK_NL, dequantize_q5_K>;
|
||||||
template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm<block_q6_K, QK_NL, dequantize_q6_K>;
|
template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm<block_q6_K, QK_NL, dequantize_q6_K>;
|
||||||
595
electron/core/plugins/inference-plugin/package-lock.json
generated
@ -10,23 +10,29 @@
|
|||||||
"init"
|
"init"
|
||||||
],
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "webpack --config webpack.config.js",
|
"build": "tsc -b . && webpack --config webpack.config.js",
|
||||||
"postinstall": "rimraf ./*.tgz && npm run build && cpx \"module.js\" \"dist\" && rimraf dist/nitro/* && cpx \"nitro/**\" \"dist/nitro\"",
|
"postinstall": "rimraf ./*.tgz && npm run build && rimraf dist/nitro/* && cpx \"nitro/**\" \"dist/nitro\"",
|
||||||
"build:publish": "npm pack && cpx *.tgz ../../pre-install"
|
"build:publish": "npm pack && cpx *.tgz ../../pre-install"
|
||||||
},
|
},
|
||||||
|
"exports": {
|
||||||
|
".": "./dist/index.js",
|
||||||
|
"./main": "./dist/module.js"
|
||||||
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"cpx": "^1.5.0",
|
"cpx": "^1.5.0",
|
||||||
"rimraf": "^3.0.2",
|
"rimraf": "^3.0.2",
|
||||||
"webpack": "^5.88.2",
|
"webpack": "^5.88.2",
|
||||||
"webpack-cli": "^5.1.4"
|
"webpack-cli": "^5.1.4"
|
||||||
},
|
},
|
||||||
"bundledDependencies": [
|
|
||||||
"electron-is-dev",
|
|
||||||
"node-llama-cpp"
|
|
||||||
],
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"electron-is-dev": "^2.0.0"
|
"kill-port-process": "^3.2.0",
|
||||||
|
"tcp-port-used": "^1.0.2",
|
||||||
|
"ts-loader": "^9.5.0"
|
||||||
},
|
},
|
||||||
|
"bundledDependencies": [
|
||||||
|
"tcp-port-used",
|
||||||
|
"kill-port-process"
|
||||||
|
],
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=18.0.0"
|
"node": ">=18.0.0"
|
||||||
},
|
},
|
||||||
|
|||||||
22
electron/core/plugins/inference-plugin/tsconfig.json
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
/* Visit https://aka.ms/tsconfig to read more about this file */
|
||||||
|
/* Language and Environment */
|
||||||
|
"target": "es2016" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
|
||||||
|
/* Modules */
|
||||||
|
"module": "ES6" /* Specify what module code is generated. */,
|
||||||
|
// "rootDir": "./", /* Specify the root folder within your source files. */
|
||||||
|
// "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */
|
||||||
|
// "baseUrl": "." /* Specify the base directory to resolve non-relative module names. */,
|
||||||
|
// "paths": {} /* Specify a set of entries that re-map imports to additional lookup locations. */,
|
||||||
|
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
|
||||||
|
// "resolveJsonModule": true, /* Enable importing .json files. */
|
||||||
|
|
||||||
|
"outDir": "./dist" /* Specify an output folder for all emitted files. */,
|
||||||
|
"esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */,
|
||||||
|
"forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
|
||||||
|
/* Type Checking */
|
||||||
|
"strict": false /* Enable all strict type-checking options. */,
|
||||||
|
"skipLibCheck": true /* Skip type checking all .d.ts files. */
|
||||||
|
}
|
||||||
|
}
|
||||||
7
electron/core/plugins/inference-plugin/types/index.d.ts
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
export {};
|
||||||
|
|
||||||
|
declare global {
|
||||||
|
interface Window {
|
||||||
|
electronAPI?: any | undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -2,7 +2,7 @@ const path = require("path");
|
|||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
experiments: { outputModule: true },
|
experiments: { outputModule: true },
|
||||||
entry: "./index.js", // Adjust the entry point to match your project's main file
|
entry: "./index.ts", // Adjust the entry point to match your project's main file
|
||||||
mode: "production",
|
mode: "production",
|
||||||
module: {
|
module: {
|
||||||
rules: [
|
rules: [
|
||||||
@ -19,7 +19,7 @@ module.exports = {
|
|||||||
library: { type: "module" }, // Specify ESM output format
|
library: { type: "module" }, // Specify ESM output format
|
||||||
},
|
},
|
||||||
resolve: {
|
resolve: {
|
||||||
extensions: [".js"],
|
extensions: [".ts", ".js"],
|
||||||
},
|
},
|
||||||
// Add loaders and other configuration as needed for your project
|
// Add loaders and other configuration as needed for your project
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
const MODULE_PATH = "model-management-plugin/dist/module.js";
|
const MODULE_PATH = "model-management-plugin/dist/module.js";
|
||||||
|
|
||||||
const getDownloadedModels = async () =>
|
const getDownloadedModels = () =>
|
||||||
new Promise(async (resolve) => {
|
new Promise(async (resolve) => {
|
||||||
if (window.electronAPI) {
|
if (window.electronAPI) {
|
||||||
window.electronAPI
|
window.electronAPI
|
||||||
@ -9,7 +9,7 @@ const getDownloadedModels = async () =>
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const getAvailableModels = async () =>
|
const getAvailableModels = () =>
|
||||||
new Promise(async (resolve) => {
|
new Promise(async (resolve) => {
|
||||||
if (window.electronAPI) {
|
if (window.electronAPI) {
|
||||||
window.electronAPI
|
window.electronAPI
|
||||||
@ -18,7 +18,7 @@ const getAvailableModels = async () =>
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const downloadModel = async (product) =>
|
const downloadModel = (product) =>
|
||||||
new Promise(async (resolve) => {
|
new Promise(async (resolve) => {
|
||||||
if (window && window.electronAPI) {
|
if (window && window.electronAPI) {
|
||||||
window.electronAPI
|
window.electronAPI
|
||||||
@ -29,7 +29,7 @@ const downloadModel = async (product) =>
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const deleteModel = async (path) =>
|
const deleteModel = (path) =>
|
||||||
new Promise(async (resolve) => {
|
new Promise(async (resolve) => {
|
||||||
if (window.electronAPI) {
|
if (window.electronAPI) {
|
||||||
console.debug(`Delete model model management plugin: ${path}`);
|
console.debug(`Delete model model management plugin: ${path}`);
|
||||||
@ -38,7 +38,7 @@ const deleteModel = async (path) =>
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const searchModels = async (params) =>
|
const searchModels = (params) =>
|
||||||
new Promise(async (resolve) => {
|
new Promise(async (resolve) => {
|
||||||
if (window.electronAPI) {
|
if (window.electronAPI) {
|
||||||
window.electronAPI
|
window.electronAPI
|
||||||
@ -47,6 +47,15 @@ const searchModels = async (params) =>
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const getConfiguredModels = () =>
|
||||||
|
new Promise(async (resolve) => {
|
||||||
|
if (window.electronAPI) {
|
||||||
|
window.electronAPI
|
||||||
|
.invokePluginFunc(MODULE_PATH, "getConfiguredModels")
|
||||||
|
.then((res) => resolve(res));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// Register all the above functions and objects with the relevant extension points
|
// Register all the above functions and objects with the relevant extension points
|
||||||
export function init({ register }) {
|
export function init({ register }) {
|
||||||
register("getDownloadedModels", "getDownloadedModels", getDownloadedModels);
|
register("getDownloadedModels", "getDownloadedModels", getDownloadedModels);
|
||||||
@ -54,4 +63,5 @@ export function init({ register }) {
|
|||||||
register("downloadModel", "downloadModel", downloadModel);
|
register("downloadModel", "downloadModel", downloadModel);
|
||||||
register("deleteModel", "deleteModel", deleteModel);
|
register("deleteModel", "deleteModel", deleteModel);
|
||||||
register("searchModels", "searchModels", searchModels);
|
register("searchModels", "searchModels", searchModels);
|
||||||
|
register("getConfiguredModels", "getConfiguredModels", getConfiguredModels);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,95 +1,16 @@
|
|||||||
const path = require("path");
|
|
||||||
const { readdirSync, lstatSync } = require("fs");
|
|
||||||
const { app } = require("electron");
|
|
||||||
const { listModels, listFiles, fileDownloadInfo } = require("@huggingface/hub");
|
const { listModels, listFiles, fileDownloadInfo } = require("@huggingface/hub");
|
||||||
|
const https = require("https");
|
||||||
|
|
||||||
let modelsIterator = undefined;
|
let modelsIterator = undefined;
|
||||||
let currentSearchOwner = undefined;
|
let currentSearchOwner = undefined;
|
||||||
|
|
||||||
const ALL_MODELS = [
|
// Github API
|
||||||
{
|
const githubHostName = "api.github.com";
|
||||||
id: "llama-2-7b-chat.Q4_K_M.gguf.bin",
|
const githubHeaders = {
|
||||||
slug: "llama-2-7b-chat.Q4_K_M.gguf.bin",
|
"User-Agent": "node.js",
|
||||||
name: "Llama 2 7B Chat - GGUF",
|
Accept: "application/vnd.github.v3+json",
|
||||||
description: "medium, balanced quality - recommended",
|
};
|
||||||
avatarUrl:
|
const githubPath = "/repos/janhq/models/contents";
|
||||||
"https://aeiljuispo.cloudimg.io/v7/https://cdn-uploads.huggingface.co/production/uploads/6426d3f3a7723d62b53c259b/tvPikpAzKTKGN5wrpadOJ.jpeg?w=200&h=200&f=face",
|
|
||||||
longDescription:
|
|
||||||
"GGUF is a new format introduced by the llama.cpp team on August 21st 2023. It is a replacement for GGML, which is no longer supported by llama.cpp. GGUF offers numerous advantages over GGML, such as better tokenisation, and support for special tokens. It is also supports metadata, and is designed to be extensible.",
|
|
||||||
technicalDescription:
|
|
||||||
'GGML_TYPE_Q4_K - "type-1" 4-bit quantization in super-blocks containing 8 blocks, each block having 32 weights. Scales and mins are quantized with 6 bits. This ends up using 4.5 bpw.',
|
|
||||||
author: "The Bloke",
|
|
||||||
version: "1.0.0",
|
|
||||||
modelUrl: "https://google.com",
|
|
||||||
nsfw: false,
|
|
||||||
greeting: "Hello there",
|
|
||||||
type: "LLM",
|
|
||||||
inputs: undefined,
|
|
||||||
outputs: undefined,
|
|
||||||
createdAt: 0,
|
|
||||||
updatedAt: undefined,
|
|
||||||
fileName: "llama-2-7b-chat.Q4_K_M.gguf.bin",
|
|
||||||
downloadUrl:
|
|
||||||
"https://huggingface.co/TheBloke/Llama-2-7b-Chat-GGUF/resolve/main/llama-2-7b-chat.Q4_K_M.gguf",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: "llama-2-13b-chat.Q4_K_M.gguf",
|
|
||||||
slug: "llama-2-13b-chat.Q4_K_M.gguf",
|
|
||||||
name: "Llama 2 13B Chat - GGUF",
|
|
||||||
description:
|
|
||||||
"medium, balanced quality - not recommended for RAM 16GB and below",
|
|
||||||
avatarUrl:
|
|
||||||
"https://aeiljuispo.cloudimg.io/v7/https://cdn-uploads.huggingface.co/production/uploads/6426d3f3a7723d62b53c259b/tvPikpAzKTKGN5wrpadOJ.jpeg?w=200&h=200&f=face",
|
|
||||||
longDescription:
|
|
||||||
"GGUF is a new format introduced by the llama.cpp team on August 21st 2023. It is a replacement for GGML, which is no longer supported by llama.cpp. GGUF offers numerous advantages over GGML, such as better tokenisation, and support for special tokens. It is also supports metadata, and is designed to be extensible.",
|
|
||||||
technicalDescription:
|
|
||||||
'GGML_TYPE_Q4_K - "type-1" 4-bit quantization in super-blocks containing 8 blocks, each block having 32 weights. Scales and mins are quantized with 6 bits. This ends up using 4.5 bpw.',
|
|
||||||
author: "The Bloke",
|
|
||||||
version: "1.0.0",
|
|
||||||
modelUrl: "https://google.com",
|
|
||||||
nsfw: false,
|
|
||||||
greeting: "Hello there",
|
|
||||||
type: "LLM",
|
|
||||||
inputs: undefined,
|
|
||||||
outputs: undefined,
|
|
||||||
createdAt: 0,
|
|
||||||
updatedAt: undefined,
|
|
||||||
fileName: "llama-2-13b-chat.Q4_K_M.gguf.bin",
|
|
||||||
downloadUrl:
|
|
||||||
"https://huggingface.co/TheBloke/Llama-2-13B-chat-GGUF/resolve/main/llama-2-13b-chat.Q4_K_M.gguf",
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
function getDownloadedModels() {
|
|
||||||
const userDataPath = app.getPath("userData");
|
|
||||||
|
|
||||||
const allBinariesName = [];
|
|
||||||
var files = readdirSync(userDataPath);
|
|
||||||
for (var i = 0; i < files.length; i++) {
|
|
||||||
var filename = path.join(userDataPath, files[i]);
|
|
||||||
var stat = lstatSync(filename);
|
|
||||||
if (stat.isDirectory()) {
|
|
||||||
// ignore
|
|
||||||
} else if (filename.endsWith(".bin")) {
|
|
||||||
var binaryName = path.basename(filename);
|
|
||||||
allBinariesName.push(binaryName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const downloadedModels = ALL_MODELS.map((model) => {
|
|
||||||
if (
|
|
||||||
model.fileName &&
|
|
||||||
allBinariesName
|
|
||||||
.map((t) => t.toLowerCase())
|
|
||||||
.includes(model.fileName.toLowerCase())
|
|
||||||
) {
|
|
||||||
return model;
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}).filter((m) => m !== undefined);
|
|
||||||
|
|
||||||
return downloadedModels;
|
|
||||||
}
|
|
||||||
|
|
||||||
const getNextModels = async (count) => {
|
const getNextModels = async (count) => {
|
||||||
const models = [];
|
const models = [];
|
||||||
@ -161,17 +82,131 @@ const listFilesByName = async (modelName) => {
|
|||||||
return fileDownloadInfoMap;
|
return fileDownloadInfoMap;
|
||||||
};
|
};
|
||||||
|
|
||||||
function getAvailableModels() {
|
async function getConfiguredModels() {
|
||||||
const downloadedModelIds = getDownloadedModels().map((model) => model.id);
|
const files = await getModelFiles();
|
||||||
return ALL_MODELS.filter((model) => {
|
|
||||||
if (!downloadedModelIds.includes(model.id)) {
|
const promises = files.map((file) => getContent(file));
|
||||||
return model;
|
const response = await Promise.all(promises);
|
||||||
}
|
|
||||||
|
const models = [];
|
||||||
|
response.forEach((model) => {
|
||||||
|
models.push(parseToModel(model));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return models;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parseToModel = (model) => {
|
||||||
|
const modelVersions = [];
|
||||||
|
model.versions.forEach((v) => {
|
||||||
|
const version = {
|
||||||
|
id: `${model.author}-${v.name}`,
|
||||||
|
name: v.name,
|
||||||
|
quantMethod: v.quantMethod,
|
||||||
|
bits: v.bits,
|
||||||
|
size: v.size,
|
||||||
|
maxRamRequired: v.maxRamRequired,
|
||||||
|
usecase: v.usecase,
|
||||||
|
downloadLink: v.downloadLink,
|
||||||
|
productId: model.id,
|
||||||
|
};
|
||||||
|
modelVersions.push(version);
|
||||||
|
});
|
||||||
|
|
||||||
|
const product = {
|
||||||
|
id: model.id,
|
||||||
|
name: model.name,
|
||||||
|
shortDescription: model.shortDescription,
|
||||||
|
avatarUrl: model.avatarUrl,
|
||||||
|
author: model.author,
|
||||||
|
version: model.version,
|
||||||
|
modelUrl: model.modelUrl,
|
||||||
|
nsfw: model.nsfw,
|
||||||
|
tags: model.tags,
|
||||||
|
greeting: model.defaultGreeting,
|
||||||
|
type: model.type,
|
||||||
|
createdAt: model.createdAt,
|
||||||
|
longDescription: model.longDescription,
|
||||||
|
status: "Downloadable",
|
||||||
|
releaseDate: 0,
|
||||||
|
availableVersions: modelVersions,
|
||||||
|
};
|
||||||
|
return product;
|
||||||
|
};
|
||||||
|
|
||||||
|
async function getModelFiles() {
|
||||||
|
const options = {
|
||||||
|
hostname: githubHostName,
|
||||||
|
path: githubPath,
|
||||||
|
headers: githubHeaders,
|
||||||
|
};
|
||||||
|
|
||||||
|
const data = await new Promise((resolve, reject) => {
|
||||||
|
const req = https.request(options, (res) => {
|
||||||
|
let data = "";
|
||||||
|
|
||||||
|
res.on("data", (chunk) => {
|
||||||
|
data += chunk;
|
||||||
|
});
|
||||||
|
|
||||||
|
res.on("end", () => {
|
||||||
|
const files = JSON.parse(data);
|
||||||
|
|
||||||
|
if (files.filter == null) {
|
||||||
|
console.error(files.message);
|
||||||
|
reject(files.message ?? "No files found");
|
||||||
|
}
|
||||||
|
if (!files || files.length === 0) {
|
||||||
|
resolve([]);
|
||||||
|
}
|
||||||
|
const jsonFiles = files.filter((file) => file.name.endsWith(".json"));
|
||||||
|
resolve(jsonFiles);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
req.on("error", (error) => {
|
||||||
|
console.error(error);
|
||||||
|
});
|
||||||
|
|
||||||
|
req.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getContent(file) {
|
||||||
|
const options = {
|
||||||
|
hostname: githubHostName,
|
||||||
|
path: `${githubPath}/${file.path}`,
|
||||||
|
headers: githubHeaders,
|
||||||
|
};
|
||||||
|
|
||||||
|
const data = await new Promise((resolve) => {
|
||||||
|
const req = https.request(options, (res) => {
|
||||||
|
let data = "";
|
||||||
|
|
||||||
|
res.on("data", (chunk) => {
|
||||||
|
data += chunk;
|
||||||
|
});
|
||||||
|
|
||||||
|
res.on("end", () => {
|
||||||
|
const fileData = JSON.parse(data);
|
||||||
|
const fileContent = Buffer.from(fileData.content, "base64").toString();
|
||||||
|
resolve(JSON.parse(fileContent));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
req.on("error", (error) => {
|
||||||
|
console.error(error);
|
||||||
|
});
|
||||||
|
|
||||||
|
req.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
return data;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getDownloadedModels,
|
|
||||||
getAvailableModels,
|
|
||||||
searchModels,
|
searchModels,
|
||||||
|
getConfiguredModels,
|
||||||
};
|
};
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
@ -156,8 +156,23 @@ function handleIPCs() {
|
|||||||
|
|
||||||
rmdir(fullPath, { recursive: true }, function (err) {
|
rmdir(fullPath, { recursive: true }, function (err) {
|
||||||
if (err) console.log(err);
|
if (err) console.log(err);
|
||||||
app.relaunch();
|
dispose(requiredModules);
|
||||||
app.exit();
|
|
||||||
|
// just relaunch if packaged, should launch manually in development mode
|
||||||
|
if (app.isPackaged) {
|
||||||
|
app.relaunch();
|
||||||
|
app.exit();
|
||||||
|
} else {
|
||||||
|
for (const modulePath in requiredModules) {
|
||||||
|
delete require.cache[
|
||||||
|
require.resolve(
|
||||||
|
join(app.getPath("userData"), "plugins", modulePath)
|
||||||
|
)
|
||||||
|
];
|
||||||
|
}
|
||||||
|
setupPlugins();
|
||||||
|
mainWindow?.reload();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "jan-electron",
|
"name": "jan",
|
||||||
"version": "0.1.3",
|
"version": "0.1.3",
|
||||||
"main": "./build/main.js",
|
"main": "./build/main.js",
|
||||||
"author": "Jan <service@jan.ai>",
|
"author": "Jan <service@jan.ai>",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"homepage": "./",
|
"homepage": "https://github.com/janhq/jan/tree/main/electron",
|
||||||
|
"description": "Use offline LLMs with your own data. Run open source models like Llama2 or Falcon on your internal computers/servers.",
|
||||||
"build": {
|
"build": {
|
||||||
"appId": "jan.ai.app",
|
"appId": "jan.ai.app",
|
||||||
"productName": "Jan",
|
"productName": "Jan",
|
||||||
@ -32,9 +33,18 @@
|
|||||||
"entitlementsInherit": "./entitlements.mac.plist",
|
"entitlementsInherit": "./entitlements.mac.plist",
|
||||||
"notarize": {
|
"notarize": {
|
||||||
"teamId": "YT49P7GXG4"
|
"teamId": "YT49P7GXG4"
|
||||||
}
|
},
|
||||||
|
"icon": "icons/icon.png"
|
||||||
},
|
},
|
||||||
"artifactName": "${name}-${os}-${arch}-${version}.${ext}"
|
"linux": {
|
||||||
|
"target": ["deb"],
|
||||||
|
"category": "Utility",
|
||||||
|
"icon": "icons/"
|
||||||
|
},
|
||||||
|
"win": {
|
||||||
|
"icon": "icons/icon.png"
|
||||||
|
},
|
||||||
|
"artifactName": "jan-${os}-${arch}-${version}.${ext}"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"lint": "eslint . --ext \".js,.jsx,.ts,.tsx\"",
|
"lint": "eslint . --ext \".js,.jsx,.ts,.tsx\"",
|
||||||
|
|||||||
@ -22,9 +22,9 @@ test.beforeAll(async () => {
|
|||||||
expect(appInfo.asar).toBe(true);
|
expect(appInfo.asar).toBe(true);
|
||||||
expect(appInfo.executable).toBeTruthy();
|
expect(appInfo.executable).toBeTruthy();
|
||||||
expect(appInfo.main).toBeTruthy();
|
expect(appInfo.main).toBeTruthy();
|
||||||
expect(appInfo.name).toBe("jan-electron");
|
expect(appInfo.name).toBe("jan");
|
||||||
expect(appInfo.packageJson).toBeTruthy();
|
expect(appInfo.packageJson).toBeTruthy();
|
||||||
expect(appInfo.packageJson.name).toBe("jan-electron");
|
expect(appInfo.packageJson.name).toBe("jan");
|
||||||
expect(appInfo.platform).toBeTruthy();
|
expect(appInfo.platform).toBeTruthy();
|
||||||
expect(appInfo.platform).toBe(process.platform);
|
expect(appInfo.platform).toBe(process.platform);
|
||||||
expect(appInfo.resourcesDir).toBeTruthy();
|
expect(appInfo.resourcesDir).toBeTruthy();
|
||||||
|
|||||||
622
package-lock.json
generated
@ -27,27 +27,29 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"electron": {
|
"electron": {
|
||||||
"name": "jan-electron",
|
"name": "jan",
|
||||||
"version": "0.1.3",
|
"version": "0.1.3",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@npmcli/arborist": "^7.1.0",
|
"@npmcli/arborist": "^7.1.0",
|
||||||
"electron-mocha": "^12.1.0",
|
"@uiball/loaders": "^1.3.0",
|
||||||
"electron-store": "^8.1.0",
|
"electron-store": "^8.1.0",
|
||||||
"electron-updater": "^6.1.4",
|
"electron-updater": "^6.1.4",
|
||||||
"pacote": "^17.0.4",
|
"pacote": "^17.0.4",
|
||||||
|
"react-intersection-observer": "^9.5.2",
|
||||||
"request": "^2.88.2",
|
"request": "^2.88.2",
|
||||||
"request-progress": "^3.0.0"
|
"request-progress": "^3.0.0",
|
||||||
|
"use-debounce": "^9.0.4"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@electron/notarize": "^2.1.0",
|
||||||
"@playwright/test": "^1.38.1",
|
"@playwright/test": "^1.38.1",
|
||||||
"@typescript-eslint/eslint-plugin": "^6.7.3",
|
"@typescript-eslint/eslint-plugin": "^6.7.3",
|
||||||
"@typescript-eslint/parser": "^6.7.3",
|
"@typescript-eslint/parser": "^6.7.3",
|
||||||
"electron": "26.2.1",
|
"electron": "26.2.1",
|
||||||
"electron-builder": "^24.6.4",
|
"electron-builder": "^24.6.4",
|
||||||
"electron-playwright-helpers": "^1.6.0",
|
"electron-playwright-helpers": "^1.6.0",
|
||||||
"eslint-plugin-react": "^7.33.2",
|
"eslint-plugin-react": "^7.33.2"
|
||||||
"xvfb-maybe": "^0.2.1"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@aashutoshrathi/word-wrap": {
|
"node_modules/@aashutoshrathi/word-wrap": {
|
||||||
@ -711,6 +713,126 @@
|
|||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@next/swc-darwin-x64": {
|
||||||
|
"version": "13.4.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-13.4.10.tgz",
|
||||||
|
"integrity": "sha512-ngXhUBbcZIWZWqNbQSNxQrB9T1V+wgfCzAor2olYuo/YpaL6mUYNUEgeBMhr8qwV0ARSgKaOp35lRvB7EmCRBg==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@next/swc-linux-arm64-gnu": {
|
||||||
|
"version": "13.4.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-13.4.10.tgz",
|
||||||
|
"integrity": "sha512-SjCZZCOmHD4uyM75MVArSAmF5Y+IJSGroPRj2v9/jnBT36SYFTORN8Ag/lhw81W9EeexKY/CUg2e9mdebZOwsg==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@next/swc-linux-arm64-musl": {
|
||||||
|
"version": "13.4.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-13.4.10.tgz",
|
||||||
|
"integrity": "sha512-F+VlcWijX5qteoYIOxNiBbNE8ruaWuRlcYyIRK10CugqI/BIeCDzEDyrHIHY8AWwbkTwe6GRHabMdE688Rqq4Q==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@next/swc-linux-x64-gnu": {
|
||||||
|
"version": "13.4.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-13.4.10.tgz",
|
||||||
|
"integrity": "sha512-WDv1YtAV07nhfy3i1visr5p/tjiH6CeXp4wX78lzP1jI07t4PnHHG1WEDFOduXh3WT4hG6yN82EQBQHDi7hBrQ==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@next/swc-linux-x64-musl": {
|
||||||
|
"version": "13.4.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-13.4.10.tgz",
|
||||||
|
"integrity": "sha512-zFkzqc737xr6qoBgDa3AwC7jPQzGLjDlkNmt/ljvQJ/Veri5ECdHjZCUuiTUfVjshNIIpki6FuP0RaQYK9iCRg==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@next/swc-win32-arm64-msvc": {
|
||||||
|
"version": "13.4.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-13.4.10.tgz",
|
||||||
|
"integrity": "sha512-IboRS8IWz5mWfnjAdCekkl8s0B7ijpWeDwK2O8CdgZkoCDY0ZQHBSGiJ2KViAG6+BJVfLvcP+a2fh6cdyBr9QQ==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@next/swc-win32-ia32-msvc": {
|
||||||
|
"version": "13.4.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-13.4.10.tgz",
|
||||||
|
"integrity": "sha512-bSA+4j8jY4EEiwD/M2bol4uVEu1lBlgsGdvM+mmBm/BbqofNBfaZ2qwSbwE2OwbAmzNdVJRFRXQZ0dkjopTRaQ==",
|
||||||
|
"cpu": [
|
||||||
|
"ia32"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@next/swc-win32-x64-msvc": {
|
||||||
|
"version": "13.4.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-13.4.10.tgz",
|
||||||
|
"integrity": "sha512-g2+tU63yTWmcVQKDGY0MV1PjjqgZtwM4rB1oVVi/v0brdZAcrcTV+04agKzWtvWroyFz6IqtT0MoZJA7PNyLVw==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 10"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@nodelib/fs.scandir": {
|
"node_modules/@nodelib/fs.scandir": {
|
||||||
"version": "2.1.5",
|
"version": "2.1.5",
|
||||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||||
@ -1666,6 +1788,15 @@
|
|||||||
"url": "https://opencollective.com/typescript-eslint"
|
"url": "https://opencollective.com/typescript-eslint"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@uiball/loaders": {
|
||||||
|
"version": "1.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@uiball/loaders/-/loaders-1.3.0.tgz",
|
||||||
|
"integrity": "sha512-w372e7PMt/s6LZ321HoghgDDU8fomamAzJfrVAdBUhsWERJEpxJMqG37NFztUq/T4J7nzzjkvZI4UX7Z2F/O6A==",
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": ">=16.8.0",
|
||||||
|
"react-dom": ">=16.8.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@xmldom/xmldom": {
|
"node_modules/@xmldom/xmldom": {
|
||||||
"version": "0.8.10",
|
"version": "0.8.10",
|
||||||
"resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz",
|
"resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz",
|
||||||
@ -1788,14 +1919,6 @@
|
|||||||
"ajv": "^6.9.1"
|
"ajv": "^6.9.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/ansi-colors": {
|
|
||||||
"version": "4.1.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz",
|
|
||||||
"integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/ansi-regex": {
|
"node_modules/ansi-regex": {
|
||||||
"version": "5.0.1",
|
"version": "5.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
||||||
@ -2610,11 +2733,6 @@
|
|||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/browser-stdout": {
|
|
||||||
"version": "1.3.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz",
|
|
||||||
"integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw=="
|
|
||||||
},
|
|
||||||
"node_modules/browserslist": {
|
"node_modules/browserslist": {
|
||||||
"version": "4.22.1",
|
"version": "4.22.1",
|
||||||
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.22.1.tgz",
|
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.22.1.tgz",
|
||||||
@ -2856,17 +2974,6 @@
|
|||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/camelcase": {
|
|
||||||
"version": "6.3.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
|
|
||||||
"integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/camelcase-css": {
|
"node_modules/camelcase-css": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz",
|
||||||
@ -3104,6 +3211,7 @@
|
|||||||
"version": "8.0.1",
|
"version": "8.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
|
||||||
"integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
|
"integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
|
||||||
|
"dev": true,
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"string-width": "^4.2.0",
|
"string-width": "^4.2.0",
|
||||||
@ -3607,17 +3715,6 @@
|
|||||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
|
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/decamelize": {
|
|
||||||
"version": "4.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz",
|
|
||||||
"integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/decode-named-character-reference": {
|
"node_modules/decode-named-character-reference": {
|
||||||
"version": "1.0.2",
|
"version": "1.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz",
|
||||||
@ -4070,38 +4167,6 @@
|
|||||||
"node": ">=14.0.0"
|
"node": ">=14.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/electron-mocha": {
|
|
||||||
"version": "12.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/electron-mocha/-/electron-mocha-12.1.0.tgz",
|
|
||||||
"integrity": "sha512-9ZIvyHGbet4ZtvF2NYYjGm7/yPljnTxbHo8psVX/HQAFPp9vZE0mCNWlzwE42keq/42gBW6W40MtKmgn1v42hQ==",
|
|
||||||
"dependencies": {
|
|
||||||
"ansi-colors": "^4.1.1",
|
|
||||||
"electron-window": "^0.8.0",
|
|
||||||
"mocha": "^10.2.0",
|
|
||||||
"which": "^3.0.0",
|
|
||||||
"yargs": "^17.7.2"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"electron-mocha": "bin/electron-mocha"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 16.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/electron-mocha/node_modules/which": {
|
|
||||||
"version": "3.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
|
|
||||||
"integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
|
|
||||||
"dependencies": {
|
|
||||||
"isexe": "^2.0.0"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"node-which": "bin/which.js"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/electron-playwright-helpers": {
|
"node_modules/electron-playwright-helpers": {
|
||||||
"version": "1.6.0",
|
"version": "1.6.0",
|
||||||
"resolved": "https://registry.npmjs.org/electron-playwright-helpers/-/electron-playwright-helpers-1.6.0.tgz",
|
"resolved": "https://registry.npmjs.org/electron-playwright-helpers/-/electron-playwright-helpers-1.6.0.tgz",
|
||||||
@ -4163,14 +4228,6 @@
|
|||||||
"tiny-typed-emitter": "^2.1.0"
|
"tiny-typed-emitter": "^2.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/electron-window": {
|
|
||||||
"version": "0.8.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/electron-window/-/electron-window-0.8.1.tgz",
|
|
||||||
"integrity": "sha512-W1i9LfnZJozk3MXE8VgsL2E5wOUHSgyCvcg1H2vQQjj+gqhO9lVudgY3z3SF7LJAmi+0vy3CJkbMqsynWB49EA==",
|
|
||||||
"dependencies": {
|
|
||||||
"is-electron-renderer": "^2.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/electron/node_modules/@types/node": {
|
"node_modules/electron/node_modules/@types/node": {
|
||||||
"version": "18.18.3",
|
"version": "18.18.3",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.3.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.3.tgz",
|
||||||
@ -5250,14 +5307,6 @@
|
|||||||
"url": "https://github.com/sponsors/sindresorhus"
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/flat": {
|
|
||||||
"version": "5.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz",
|
|
||||||
"integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==",
|
|
||||||
"bin": {
|
|
||||||
"flat": "cli.js"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/flat-cache": {
|
"node_modules/flat-cache": {
|
||||||
"version": "3.1.0",
|
"version": "3.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.1.0.tgz",
|
||||||
@ -5512,6 +5561,7 @@
|
|||||||
"version": "2.0.5",
|
"version": "2.0.5",
|
||||||
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
|
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
|
||||||
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
|
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
|
||||||
|
"dev": true,
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "6.* || 8.* || >= 10.*"
|
"node": "6.* || 8.* || >= 10.*"
|
||||||
@ -6175,14 +6225,6 @@
|
|||||||
"url": "https://github.com/sponsors/wooorm"
|
"url": "https://github.com/sponsors/wooorm"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/he": {
|
|
||||||
"version": "1.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
|
|
||||||
"integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
|
|
||||||
"bin": {
|
|
||||||
"he": "bin/he"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/highlight.js": {
|
"node_modules/highlight.js": {
|
||||||
"version": "10.7.3",
|
"version": "10.7.3",
|
||||||
"resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz",
|
"resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz",
|
||||||
@ -6710,11 +6752,6 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/is-electron-renderer": {
|
|
||||||
"version": "2.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/is-electron-renderer/-/is-electron-renderer-2.0.1.tgz",
|
|
||||||
"integrity": "sha512-pRlQnpaCFhDVPtkXkP+g9Ybv/CjbiQDjnKFQTEjpBfDKeV6dRDBczuFRDpM6DVfk2EjpMS8t5kwE5jPnqYl3zA=="
|
|
||||||
},
|
|
||||||
"node_modules/is-equal-shallow": {
|
"node_modules/is-equal-shallow": {
|
||||||
"version": "0.1.3",
|
"version": "0.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz",
|
||||||
@ -7021,17 +7058,6 @@
|
|||||||
"integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==",
|
"integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/is-unicode-supported": {
|
|
||||||
"version": "0.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz",
|
|
||||||
"integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/is-weakmap": {
|
"node_modules/is-weakmap": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz",
|
||||||
@ -7168,7 +7194,7 @@
|
|||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/jan-electron": {
|
"node_modules/jan": {
|
||||||
"resolved": "electron",
|
"resolved": "electron",
|
||||||
"link": true
|
"link": true
|
||||||
},
|
},
|
||||||
@ -7540,21 +7566,6 @@
|
|||||||
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
|
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/log-symbols": {
|
|
||||||
"version": "4.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz",
|
|
||||||
"integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==",
|
|
||||||
"dependencies": {
|
|
||||||
"chalk": "^4.1.0",
|
|
||||||
"is-unicode-supported": "^0.1.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/longest-streak": {
|
"node_modules/longest-streak": {
|
||||||
"version": "3.1.0",
|
"version": "3.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz",
|
||||||
@ -8552,249 +8563,6 @@
|
|||||||
"mkdirp": "bin/cmd.js"
|
"mkdirp": "bin/cmd.js"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/mocha": {
|
|
||||||
"version": "10.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz",
|
|
||||||
"integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==",
|
|
||||||
"dependencies": {
|
|
||||||
"ansi-colors": "4.1.1",
|
|
||||||
"browser-stdout": "1.3.1",
|
|
||||||
"chokidar": "3.5.3",
|
|
||||||
"debug": "4.3.4",
|
|
||||||
"diff": "5.0.0",
|
|
||||||
"escape-string-regexp": "4.0.0",
|
|
||||||
"find-up": "5.0.0",
|
|
||||||
"glob": "7.2.0",
|
|
||||||
"he": "1.2.0",
|
|
||||||
"js-yaml": "4.1.0",
|
|
||||||
"log-symbols": "4.1.0",
|
|
||||||
"minimatch": "5.0.1",
|
|
||||||
"ms": "2.1.3",
|
|
||||||
"nanoid": "3.3.3",
|
|
||||||
"serialize-javascript": "6.0.0",
|
|
||||||
"strip-json-comments": "3.1.1",
|
|
||||||
"supports-color": "8.1.1",
|
|
||||||
"workerpool": "6.2.1",
|
|
||||||
"yargs": "16.2.0",
|
|
||||||
"yargs-parser": "20.2.4",
|
|
||||||
"yargs-unparser": "2.0.0"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"_mocha": "bin/_mocha",
|
|
||||||
"mocha": "bin/mocha.js"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 14.0.0"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"type": "opencollective",
|
|
||||||
"url": "https://opencollective.com/mochajs"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/ansi-colors": {
|
|
||||||
"version": "4.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz",
|
|
||||||
"integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/anymatch": {
|
|
||||||
"version": "3.1.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
|
|
||||||
"integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
|
|
||||||
"dependencies": {
|
|
||||||
"normalize-path": "^3.0.0",
|
|
||||||
"picomatch": "^2.0.4"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/binary-extensions": {
|
|
||||||
"version": "2.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
|
|
||||||
"integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/chokidar": {
|
|
||||||
"version": "3.5.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
|
|
||||||
"integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==",
|
|
||||||
"funding": [
|
|
||||||
{
|
|
||||||
"type": "individual",
|
|
||||||
"url": "https://paulmillr.com/funding/"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"dependencies": {
|
|
||||||
"anymatch": "~3.1.2",
|
|
||||||
"braces": "~3.0.2",
|
|
||||||
"glob-parent": "~5.1.2",
|
|
||||||
"is-binary-path": "~2.1.0",
|
|
||||||
"is-glob": "~4.0.1",
|
|
||||||
"normalize-path": "~3.0.0",
|
|
||||||
"readdirp": "~3.6.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 8.10.0"
|
|
||||||
},
|
|
||||||
"optionalDependencies": {
|
|
||||||
"fsevents": "~2.3.2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/cliui": {
|
|
||||||
"version": "7.0.4",
|
|
||||||
"resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
|
|
||||||
"integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
|
|
||||||
"dependencies": {
|
|
||||||
"string-width": "^4.2.0",
|
|
||||||
"strip-ansi": "^6.0.0",
|
|
||||||
"wrap-ansi": "^7.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/diff": {
|
|
||||||
"version": "5.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz",
|
|
||||||
"integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.3.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/fsevents": {
|
|
||||||
"version": "2.3.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
|
|
||||||
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
|
|
||||||
"hasInstallScript": true,
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"darwin"
|
|
||||||
],
|
|
||||||
"engines": {
|
|
||||||
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/glob": {
|
|
||||||
"version": "7.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz",
|
|
||||||
"integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==",
|
|
||||||
"dependencies": {
|
|
||||||
"fs.realpath": "^1.0.0",
|
|
||||||
"inflight": "^1.0.4",
|
|
||||||
"inherits": "2",
|
|
||||||
"minimatch": "^3.0.4",
|
|
||||||
"once": "^1.3.0",
|
|
||||||
"path-is-absolute": "^1.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": "*"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/isaacs"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/glob-parent": {
|
|
||||||
"version": "5.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
|
|
||||||
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
|
|
||||||
"dependencies": {
|
|
||||||
"is-glob": "^4.0.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/glob/node_modules/brace-expansion": {
|
|
||||||
"version": "1.1.11",
|
|
||||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
|
||||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
|
||||||
"dependencies": {
|
|
||||||
"balanced-match": "^1.0.0",
|
|
||||||
"concat-map": "0.0.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/glob/node_modules/minimatch": {
|
|
||||||
"version": "3.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
|
||||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
|
||||||
"dependencies": {
|
|
||||||
"brace-expansion": "^1.1.7"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": "*"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/is-binary-path": {
|
|
||||||
"version": "2.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
|
|
||||||
"integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
|
|
||||||
"dependencies": {
|
|
||||||
"binary-extensions": "^2.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/minimatch": {
|
|
||||||
"version": "5.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz",
|
|
||||||
"integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==",
|
|
||||||
"dependencies": {
|
|
||||||
"brace-expansion": "^2.0.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/nanoid": {
|
|
||||||
"version": "3.3.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz",
|
|
||||||
"integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==",
|
|
||||||
"bin": {
|
|
||||||
"nanoid": "bin/nanoid.cjs"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/readdirp": {
|
|
||||||
"version": "3.6.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
|
|
||||||
"integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
|
|
||||||
"dependencies": {
|
|
||||||
"picomatch": "^2.2.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/yargs": {
|
|
||||||
"version": "16.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
|
|
||||||
"integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
|
|
||||||
"dependencies": {
|
|
||||||
"cliui": "^7.0.2",
|
|
||||||
"escalade": "^3.1.1",
|
|
||||||
"get-caller-file": "^2.0.5",
|
|
||||||
"require-directory": "^2.1.1",
|
|
||||||
"string-width": "^4.2.0",
|
|
||||||
"y18n": "^5.0.5",
|
|
||||||
"yargs-parser": "^20.2.2"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mocha/node_modules/yargs-parser": {
|
|
||||||
"version": "20.2.4",
|
|
||||||
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz",
|
|
||||||
"integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mri": {
|
"node_modules/mri": {
|
||||||
"version": "1.2.0",
|
"version": "1.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz",
|
||||||
@ -10637,14 +10405,6 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/randombytes": {
|
|
||||||
"version": "2.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
|
|
||||||
"integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==",
|
|
||||||
"dependencies": {
|
|
||||||
"safe-buffer": "^5.1.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/react": {
|
"node_modules/react": {
|
||||||
"version": "18.2.0",
|
"version": "18.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz",
|
||||||
@ -10686,6 +10446,14 @@
|
|||||||
"react": "^16.8.0 || ^17 || ^18"
|
"react": "^16.8.0 || ^17 || ^18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/react-intersection-observer": {
|
||||||
|
"version": "9.5.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/react-intersection-observer/-/react-intersection-observer-9.5.2.tgz",
|
||||||
|
"integrity": "sha512-EmoV66/yvksJcGa1rdW0nDNc4I1RifDWkT50gXSFnPLYQ4xUptuDD4V7k+Rj1OgVAlww628KLGcxPXFlOkkU/Q==",
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": "^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/react-is": {
|
"node_modules/react-is": {
|
||||||
"version": "16.13.1",
|
"version": "16.13.1",
|
||||||
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",
|
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",
|
||||||
@ -11476,6 +11244,7 @@
|
|||||||
"version": "2.1.1",
|
"version": "2.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
||||||
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
|
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
|
||||||
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
@ -11845,14 +11614,6 @@
|
|||||||
"url": "https://github.com/sponsors/sindresorhus"
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/serialize-javascript": {
|
|
||||||
"version": "6.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
|
|
||||||
"integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
|
|
||||||
"dependencies": {
|
|
||||||
"randombytes": "^2.1.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/set-blocking": {
|
"node_modules/set-blocking": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
|
||||||
@ -12606,6 +12367,7 @@
|
|||||||
"version": "8.1.1",
|
"version": "8.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
|
||||||
"integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
|
"integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
|
||||||
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"has-flag": "^4.0.0"
|
"has-flag": "^4.0.0"
|
||||||
@ -13569,6 +13331,17 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/use-debounce": {
|
||||||
|
"version": "9.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/use-debounce/-/use-debounce-9.0.4.tgz",
|
||||||
|
"integrity": "sha512-6X8H/mikbrt0XE8e+JXRtZ8yYVvKkdYRfmIhWZYsP8rcNs9hk3APV8Ua2mFkKRLcJKVdnX2/Vwrmg2GWKUQEaQ==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 10.0.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": ">=16.8.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/utf8-byte-length": {
|
"node_modules/utf8-byte-length": {
|
||||||
"version": "1.0.4",
|
"version": "1.0.4",
|
||||||
"resolved": "https://registry.npmjs.org/utf8-byte-length/-/utf8-byte-length-1.0.4.tgz",
|
"resolved": "https://registry.npmjs.org/utf8-byte-length/-/utf8-byte-length-1.0.4.tgz",
|
||||||
@ -13876,15 +13649,11 @@
|
|||||||
"string-width": "^1.0.2 || 2 || 3 || 4"
|
"string-width": "^1.0.2 || 2 || 3 || 4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/workerpool": {
|
|
||||||
"version": "6.2.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz",
|
|
||||||
"integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw=="
|
|
||||||
},
|
|
||||||
"node_modules/wrap-ansi": {
|
"node_modules/wrap-ansi": {
|
||||||
"version": "7.0.0",
|
"version": "7.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
|
||||||
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
|
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
|
||||||
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"ansi-styles": "^4.0.0",
|
"ansi-styles": "^4.0.0",
|
||||||
@ -13954,54 +13723,11 @@
|
|||||||
"node": ">=0.4"
|
"node": ">=0.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/xvfb-maybe": {
|
|
||||||
"version": "0.2.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/xvfb-maybe/-/xvfb-maybe-0.2.1.tgz",
|
|
||||||
"integrity": "sha512-9IyRz3l6Qyhl6LvnGRF5jMPB4oBEepQnuzvVAFTynP6ACLLSevqigICJ9d/+ofl29m2daeaVBChnPYUnaeJ7yA==",
|
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"debug": "^2.2.0",
|
|
||||||
"which": "^1.2.4"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"xvfb-maybe": "src/xvfb-maybe.js"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/xvfb-maybe/node_modules/debug": {
|
|
||||||
"version": "2.6.9",
|
|
||||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
|
||||||
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"ms": "2.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/xvfb-maybe/node_modules/ms": {
|
|
||||||
"version": "2.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
|
||||||
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
|
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/xvfb-maybe/node_modules/which": {
|
|
||||||
"version": "1.3.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz",
|
|
||||||
"integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==",
|
|
||||||
"dev": true,
|
|
||||||
"license": "ISC",
|
|
||||||
"dependencies": {
|
|
||||||
"isexe": "^2.0.0"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"which": "bin/which"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/y18n": {
|
"node_modules/y18n": {
|
||||||
"version": "5.0.8",
|
"version": "5.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
|
||||||
"integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
|
"integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
|
||||||
|
"dev": true,
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
@ -14026,6 +13752,7 @@
|
|||||||
"version": "17.7.2",
|
"version": "17.7.2",
|
||||||
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
|
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
|
||||||
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
|
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
|
||||||
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"cliui": "^8.0.1",
|
"cliui": "^8.0.1",
|
||||||
@ -14044,33 +13771,12 @@
|
|||||||
"version": "21.1.1",
|
"version": "21.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
|
||||||
"integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
|
"integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
|
||||||
|
"dev": true,
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=12"
|
"node": ">=12"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/yargs-unparser": {
|
|
||||||
"version": "2.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz",
|
|
||||||
"integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==",
|
|
||||||
"dependencies": {
|
|
||||||
"camelcase": "^6.0.0",
|
|
||||||
"decamelize": "^4.0.0",
|
|
||||||
"flat": "^5.0.2",
|
|
||||||
"is-plain-obj": "^2.1.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/yargs-unparser/node_modules/is-plain-obj": {
|
|
||||||
"version": "2.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz",
|
|
||||||
"integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/yauzl": {
|
"node_modules/yauzl": {
|
||||||
"version": "2.10.0",
|
"version": "2.10.0",
|
||||||
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
|
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
|
||||||
|
|||||||
22
package.json
@ -14,23 +14,23 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"lint": "yarn workspace jan-electron lint && yarn workspace jan-web lint",
|
"lint": "yarn workspace jan lint && yarn workspace jan-web lint",
|
||||||
"test": "yarn workspace jan-electron test:e2e",
|
"test": "yarn workspace jan test:e2e",
|
||||||
"dev:electron": "yarn workspace jan-electron dev",
|
"dev:electron": "yarn workspace jan dev",
|
||||||
"dev:web": "yarn workspace jan-web dev",
|
"dev:web": "yarn workspace jan-web dev",
|
||||||
"dev": "concurrently --kill-others \"yarn dev:web\" \"wait-on http://localhost:3000 && yarn dev:electron\"",
|
"dev": "concurrently --kill-others \"yarn dev:web\" \"wait-on http://localhost:3000 && yarn dev:electron\"",
|
||||||
"build:web": "yarn workspace jan-web build && cpx \"web/out/**\" \"electron/renderer/\"",
|
"build:web": "yarn workspace jan-web build && cpx \"web/out/**\" \"electron/renderer/\"",
|
||||||
"build:electron": "yarn workspace jan-electron build",
|
"build:electron": "yarn workspace jan build",
|
||||||
"build:plugins": "rimraf ./electron/core/pre-install/*.tgz && concurrently \"cd ./electron/core/plugins/data-plugin && npm ci\" \"cd ./electron/core/plugins/inference-plugin && npm ci\" \"cd ./electron/core/plugins/model-management-plugin && npm ci\" \"cd ./electron/core/plugins/monitoring-plugin && npm ci\" && concurrently \"cd ./electron/core/plugins/data-plugin && npm run build:publish\" \"cd ./electron/core/plugins/inference-plugin && npm run build:publish\" \"cd ./electron/core/plugins/model-management-plugin && npm run build:publish\" \"cd ./electron/core/plugins/monitoring-plugin && npm run build:publish\"",
|
"build:plugins": "rimraf ./electron/core/pre-install/*.tgz && concurrently \"cd ./electron/core/plugins/data-plugin && npm ci\" \"cd ./electron/core/plugins/inference-plugin && npm ci\" \"cd ./electron/core/plugins/model-management-plugin && npm ci\" \"cd ./electron/core/plugins/monitoring-plugin && npm ci\" && concurrently \"cd ./electron/core/plugins/data-plugin && npm run build:publish\" \"cd ./electron/core/plugins/inference-plugin && npm run build:publish\" \"cd ./electron/core/plugins/model-management-plugin && npm run build:publish\" \"cd ./electron/core/plugins/monitoring-plugin && npm run build:publish\"",
|
||||||
"build:plugins-darwin": "rimraf ./electron/core/pre-install/*.tgz && concurrently \"cd ./electron/core/plugins/data-plugin && npm ci\" \"cd ./electron/core/plugins/inference-plugin && npm ci\" \"cd ./electron/core/plugins/model-management-plugin && npm ci\" \"cd ./electron/core/plugins/monitoring-plugin && npm ci\" && chmod +x ./electron/auto-sign.sh && ./electron/auto-sign.sh && concurrently \"cd ./electron/core/plugins/data-plugin && npm run build:publish\" \"cd ./electron/core/plugins/inference-plugin && npm run build:publish\" \"cd ./electron/core/plugins/model-management-plugin && npm run build:publish\" \"cd ./electron/core/plugins/monitoring-plugin && npm run build:publish\"",
|
"build:plugins-darwin": "rimraf ./electron/core/pre-install/*.tgz && concurrently \"cd ./electron/core/plugins/data-plugin && npm ci\" \"cd ./electron/core/plugins/inference-plugin && npm ci\" \"cd ./electron/core/plugins/model-management-plugin && npm ci\" \"cd ./electron/core/plugins/monitoring-plugin && npm ci\" && chmod +x ./electron/auto-sign.sh && ./electron/auto-sign.sh && concurrently \"cd ./electron/core/plugins/data-plugin && npm run build:publish\" \"cd ./electron/core/plugins/inference-plugin && npm run build:publish\" \"cd ./electron/core/plugins/model-management-plugin && npm run build:publish\" \"cd ./electron/core/plugins/monitoring-plugin && npm run build:publish\"",
|
||||||
"build": "yarn build:web && yarn build:electron",
|
"build": "yarn build:web && yarn build:electron",
|
||||||
"build:darwin": "yarn build:web && yarn workspace jan-electron build:darwin",
|
"build:darwin": "yarn build:web && yarn workspace jan build:darwin",
|
||||||
"build:win32": "yarn build:web && yarn workspace jan-electron build:win32",
|
"build:win32": "yarn build:web && yarn workspace jan build:win32",
|
||||||
"build:linux": "yarn build:web && yarn workspace jan-electron build:linux",
|
"build:linux": "yarn build:web && yarn workspace jan build:linux",
|
||||||
"build:publish": "yarn build:web && yarn workspace jan-electron build:publish",
|
"build:publish": "yarn build:web && yarn workspace jan build:publish",
|
||||||
"build:publish-darwin": "yarn build:web && yarn workspace jan-electron build:publish-darwin",
|
"build:publish-darwin": "yarn build:web && yarn workspace jan build:publish-darwin",
|
||||||
"build:publish-win32": "yarn build:web && yarn workspace jan-electron build:publish-win32",
|
"build:publish-win32": "yarn build:web && yarn workspace jan build:publish-win32",
|
||||||
"build:publish-linux": "yarn build:web && yarn workspace jan-electron build:publish-linux"
|
"build:publish-linux": "yarn build:web && yarn workspace jan build:publish-linux"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"concurrently": "^8.2.1",
|
"concurrently": "^8.2.1",
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
import { useAtomValue } from "jotai";
|
import { useAtomValue } from "jotai";
|
||||||
import React, { Fragment } from "react";
|
import React from "react";
|
||||||
import ModelTable from "../ModelTable";
|
import ModelTable from "../ModelTable";
|
||||||
import { currentProductAtom } from "@/_helpers/atoms/Model.atom";
|
import { activeAssistantModelAtom } from "@/_helpers/atoms/Model.atom";
|
||||||
|
|
||||||
const ActiveModelTable: React.FC = () => {
|
const ActiveModelTable: React.FC = () => {
|
||||||
const activeModel = useAtomValue(currentProductAtom);
|
const activeModel = useAtomValue(activeAssistantModelAtom);
|
||||||
|
|
||||||
if (!activeModel) return null;
|
if (!activeModel) return null;
|
||||||
|
|
||||||
|
|||||||
@ -1,19 +1,19 @@
|
|||||||
import { Product } from "@/_models/Product";
|
|
||||||
import DownloadModelContent from "../DownloadModelContent";
|
import DownloadModelContent from "../DownloadModelContent";
|
||||||
import ModelDownloadButton from "../ModelDownloadButton";
|
import ModelDownloadButton from "../ModelDownloadButton";
|
||||||
import ModelDownloadingButton from "../ModelDownloadingButton";
|
import ModelDownloadingButton from "../ModelDownloadingButton";
|
||||||
import { useAtomValue } from "jotai";
|
import { useAtomValue } from "jotai";
|
||||||
import { modelDownloadStateAtom } from "@/_helpers/atoms/DownloadState.atom";
|
import { modelDownloadStateAtom } from "@/_helpers/atoms/DownloadState.atom";
|
||||||
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
product: Product;
|
model: AssistantModel;
|
||||||
isRecommend: boolean;
|
isRecommend: boolean;
|
||||||
required?: string;
|
required?: string;
|
||||||
onDownloadClick?: (product: Product) => void;
|
onDownloadClick?: (model: AssistantModel) => void;
|
||||||
};
|
};
|
||||||
|
|
||||||
const AvailableModelCard: React.FC<Props> = ({
|
const AvailableModelCard: React.FC<Props> = ({
|
||||||
product,
|
model,
|
||||||
isRecommend,
|
isRecommend,
|
||||||
required,
|
required,
|
||||||
onDownloadClick,
|
onDownloadClick,
|
||||||
@ -24,14 +24,14 @@ const AvailableModelCard: React.FC<Props> = ({
|
|||||||
let total = 0;
|
let total = 0;
|
||||||
let transferred = 0;
|
let transferred = 0;
|
||||||
|
|
||||||
if (product.fileName && downloadState[product.fileName]) {
|
if (model.id && downloadState[model.id]) {
|
||||||
isDownloading =
|
isDownloading =
|
||||||
downloadState[product.fileName].error == null &&
|
downloadState[model.id].error == null &&
|
||||||
downloadState[product.fileName].percent < 1;
|
downloadState[model.id].percent < 1;
|
||||||
|
|
||||||
if (isDownloading) {
|
if (isDownloading) {
|
||||||
total = downloadState[product.fileName].size.total;
|
total = downloadState[model.id].size.total;
|
||||||
transferred = downloadState[product.fileName].size.transferred;
|
transferred = downloadState[model.id].size.transferred;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -41,7 +41,7 @@ const AvailableModelCard: React.FC<Props> = ({
|
|||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<div className="w-1/5 flex items-center justify-end">
|
<div className="w-1/5 flex items-center justify-end">
|
||||||
<ModelDownloadButton callback={() => onDownloadClick?.(product)} />
|
<ModelDownloadButton callback={() => onDownloadClick?.(model)} />
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -50,11 +50,11 @@ const AvailableModelCard: React.FC<Props> = ({
|
|||||||
<div className="flex justify-between py-4 px-3 gap-2.5">
|
<div className="flex justify-between py-4 px-3 gap-2.5">
|
||||||
<DownloadModelContent
|
<DownloadModelContent
|
||||||
required={required}
|
required={required}
|
||||||
author={product.author}
|
author={model.author}
|
||||||
description={product.description}
|
description={model.shortDescription}
|
||||||
isRecommend={isRecommend}
|
isRecommend={isRecommend}
|
||||||
name={product.name}
|
name={model.name}
|
||||||
type={product.type}
|
type={model.type}
|
||||||
/>
|
/>
|
||||||
{downloadButton}
|
{downloadButton}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { useSetAtom } from "jotai";
|
import { useSetAtom } from "jotai";
|
||||||
import SecondaryButton from "../SecondaryButton";
|
import { InformationCircleIcon } from "@heroicons/react/24/outline";
|
||||||
import SendButton from "../SendButton";
|
import SendButton from "../SendButton";
|
||||||
import { showingAdvancedPromptAtom } from "@/_helpers/atoms/Modal.atom";
|
import { showingAdvancedPromptAtom } from "@/_helpers/atoms/Modal.atom";
|
||||||
|
|
||||||
@ -11,22 +11,21 @@ const BasicPromptAccessories: React.FC = () => {
|
|||||||
const shouldShowAdvancedPrompt = false;
|
const shouldShowAdvancedPrompt = false;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div className="absolute inset-x-0 bottom-0 flex justify-between py-2 pl-3 pr-2">
|
||||||
style={{
|
{/* Add future accessories here, e.g upload a file */}
|
||||||
backgroundColor: "#F8F8F8",
|
<div className="flex items-center space-x-5">
|
||||||
borderWidth: 1,
|
<div className="flex items-center">
|
||||||
borderColor: "#D1D5DB",
|
{/* <button
|
||||||
}}
|
type="button"
|
||||||
className="flex justify-between py-2 pl-3 pr-2 rounded-b-lg"
|
className="-m-2.5 flex h-10 w-10 items-center justify-center rounded-full text-gray-400 hover:text-gray-500"
|
||||||
>
|
>
|
||||||
{shouldShowAdvancedPrompt && (
|
<InformationCircleIcon className="h-5 w-5" aria-hidden="true" />
|
||||||
<SecondaryButton
|
</button> */}
|
||||||
title="Advanced"
|
</div>
|
||||||
onClick={() => setShowingAdvancedPrompt(true)}
|
</div>
|
||||||
/>
|
<div className="flex-shrink-0">
|
||||||
)}
|
<SendButton />
|
||||||
<div className="flex justify-end items-center space-x-1 w-full pr-3" />
|
</div>
|
||||||
{!shouldShowAdvancedPrompt && <SendButton />}
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@ -7,7 +7,7 @@ import useCreateConversation from "@/_hooks/useCreateConversation";
|
|||||||
import useInitModel from "@/_hooks/useInitModel";
|
import useInitModel from "@/_hooks/useInitModel";
|
||||||
import useSendChatMessage from "@/_hooks/useSendChatMessage";
|
import useSendChatMessage from "@/_hooks/useSendChatMessage";
|
||||||
import { useAtom, useAtomValue } from "jotai";
|
import { useAtom, useAtomValue } from "jotai";
|
||||||
import { ChangeEvent } from "react";
|
import { ChangeEvent, useEffect, useRef } from "react";
|
||||||
|
|
||||||
const BasicPromptInput: React.FC = () => {
|
const BasicPromptInput: React.FC = () => {
|
||||||
const activeConversationId = useAtomValue(getActiveConvoIdAtom);
|
const activeConversationId = useAtomValue(getActiveConvoIdAtom);
|
||||||
@ -18,9 +18,7 @@ const BasicPromptInput: React.FC = () => {
|
|||||||
|
|
||||||
const { initModel } = useInitModel();
|
const { initModel } = useInitModel();
|
||||||
|
|
||||||
const handleMessageChange = (event: ChangeEvent<HTMLTextAreaElement>) => {
|
const textareaRef = useRef<HTMLTextAreaElement>(null);
|
||||||
setCurrentPrompt(event.target.value);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleKeyDown = async (
|
const handleKeyDown = async (
|
||||||
event: React.KeyboardEvent<HTMLTextAreaElement>
|
event: React.KeyboardEvent<HTMLTextAreaElement>
|
||||||
@ -44,17 +42,53 @@ const BasicPromptInput: React.FC = () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
adjustTextareaHeight();
|
||||||
|
}, [currentPrompt]);
|
||||||
|
|
||||||
|
const handleMessageChange = (event: ChangeEvent<HTMLTextAreaElement>) => {
|
||||||
|
setCurrentPrompt(event.target.value);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Auto adjust textarea height based on content
|
||||||
|
const MAX_ROWS = 30;
|
||||||
|
|
||||||
|
const adjustTextareaHeight = () => {
|
||||||
|
if (textareaRef.current) {
|
||||||
|
textareaRef.current.style.height = "auto"; // 1 row
|
||||||
|
const scrollHeight = textareaRef.current.scrollHeight;
|
||||||
|
const maxScrollHeight =
|
||||||
|
parseInt(window.getComputedStyle(textareaRef.current).lineHeight, 10) *
|
||||||
|
MAX_ROWS;
|
||||||
|
textareaRef.current.style.height = `${Math.min(
|
||||||
|
scrollHeight,
|
||||||
|
maxScrollHeight
|
||||||
|
)}px`;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<textarea
|
<div className="overflow-hidden rounded-lg shadow-sm ring-1 ring-inset ring-gray-300 focus-within:ring-2 focus-within:ring-indigo-600">
|
||||||
onKeyDown={handleKeyDown}
|
<textarea
|
||||||
value={currentPrompt}
|
ref={textareaRef}
|
||||||
onChange={handleMessageChange}
|
onKeyDown={handleKeyDown}
|
||||||
rows={2}
|
value={currentPrompt}
|
||||||
name="comment"
|
onChange={handleMessageChange}
|
||||||
id="comment"
|
name="comment"
|
||||||
className="overflow-hidden block w-full scroll resize-none border-0 bg-transparent py-1.5 text-gray-900 transition-height duration-200 placeholder:text-gray-400 sm:text-sm sm:leading-6 dark:text-white"
|
id="comment"
|
||||||
placeholder="Add your comment..."
|
className="block w-full resize-none border-0 bg-transparent py-1.5 text-gray-900 placeholder:text-gray-400 focus:ring-0 sm:text-sm sm:leading-6"
|
||||||
/>
|
placeholder="Message ..."
|
||||||
|
rows={1}
|
||||||
|
style={{ overflow: "auto" }}
|
||||||
|
/>
|
||||||
|
{/* Spacer element to match the height of the toolbar */}
|
||||||
|
<div className="py-2" aria-hidden="true">
|
||||||
|
{/* Matches height of button in toolbar (1px border + 36px content height) */}
|
||||||
|
<div className="py-px">
|
||||||
|
<div className="h-9" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -63,8 +63,8 @@ const ConfirmDeleteConversationModal: React.FC = () => {
|
|||||||
<div className="mt-2">
|
<div className="mt-2">
|
||||||
<p className="text-sm text-gray-500">
|
<p className="text-sm text-gray-500">
|
||||||
Are you sure you want to delete this conversation? All
|
Are you sure you want to delete this conversation? All
|
||||||
of messages will be permanently removed from our servers
|
of messages will be permanently removed. This action
|
||||||
forever. This action cannot be undone.
|
cannot be undone.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -1,20 +1,20 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import Image from "next/image";
|
import Image from "next/image";
|
||||||
import useCreateConversation from "@/_hooks/useCreateConversation";
|
import useCreateConversation from "@/_hooks/useCreateConversation";
|
||||||
import { Product } from "@/_models/Product";
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
product: Product;
|
model: AssistantModel;
|
||||||
};
|
};
|
||||||
|
|
||||||
const ConversationalCard: React.FC<Props> = ({ product }) => {
|
const ConversationalCard: React.FC<Props> = ({ model }) => {
|
||||||
const { requestCreateConvo } = useCreateConversation();
|
const { requestCreateConvo } = useCreateConversation();
|
||||||
|
|
||||||
const { name, avatarUrl, description } = product;
|
const { name, avatarUrl, shortDescription } = model;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<button
|
<button
|
||||||
onClick={() => requestCreateConvo(product)}
|
onClick={() => requestCreateConvo(model)}
|
||||||
className="flex flex-col justify-between flex-shrink-0 gap-3 bg-white p-4 w-52 rounded-lg text-left dark:bg-gray-700 hover:opacity-20"
|
className="flex flex-col justify-between flex-shrink-0 gap-3 bg-white p-4 w-52 rounded-lg text-left dark:bg-gray-700 hover:opacity-20"
|
||||||
>
|
>
|
||||||
<div className="flex flex-col gap-2 box-border">
|
<div className="flex flex-col gap-2 box-border">
|
||||||
@ -29,7 +29,7 @@ const ConversationalCard: React.FC<Props> = ({ product }) => {
|
|||||||
{name}
|
{name}
|
||||||
</h2>
|
</h2>
|
||||||
<span className="text-gray-600 mt-1 font-normal line-clamp-2">
|
<span className="text-gray-600 mt-1 font-normal line-clamp-2">
|
||||||
{description}
|
{shortDescription}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<span className="flex text-xs leading-5 text-gray-500 items-center gap-0.5">
|
<span className="flex text-xs leading-5 text-gray-500 items-center gap-0.5">
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
import { Product } from "@/_models/Product";
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
import ConversationalCard from "../ConversationalCard";
|
import ConversationalCard from "../ConversationalCard";
|
||||||
import { ChatBubbleBottomCenterTextIcon } from "@heroicons/react/24/outline";
|
import { ChatBubbleBottomCenterTextIcon } from "@heroicons/react/24/outline";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
products: Product[];
|
models: AssistantModel[];
|
||||||
};
|
};
|
||||||
|
|
||||||
const ConversationalList: React.FC<Props> = ({ products }) => (
|
const ConversationalList: React.FC<Props> = ({ models }) => (
|
||||||
<>
|
<>
|
||||||
<div className="flex items-center gap-3 mt-8 mb-2">
|
<div className="flex items-center gap-3 mt-8 mb-2">
|
||||||
<ChatBubbleBottomCenterTextIcon width={24} height={24} className="ml-6" />
|
<ChatBubbleBottomCenterTextIcon width={24} height={24} className="ml-6" />
|
||||||
@ -15,8 +15,8 @@ const ConversationalList: React.FC<Props> = ({ products }) => (
|
|||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div className="mt-2 pl-6 flex w-full gap-2 overflow-x-scroll scroll overflow-hidden">
|
<div className="mt-2 pl-6 flex w-full gap-2 overflow-x-scroll scroll overflow-hidden">
|
||||||
{products.map((item) => (
|
{models.map((item) => (
|
||||||
<ConversationalCard key={item.slug} product={item} />
|
<ConversationalCard key={item.id} model={item} />
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
|
|||||||
@ -1,16 +1,16 @@
|
|||||||
import { Product } from "@/_models/Product";
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
import DownloadModelContent from "../DownloadModelContent";
|
import DownloadModelContent from "../DownloadModelContent";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
product: Product;
|
model: AssistantModel;
|
||||||
isRecommend: boolean;
|
isRecommend: boolean;
|
||||||
required?: string;
|
required?: string;
|
||||||
transferred?: number;
|
transferred?: number;
|
||||||
onDeleteClick?: (product: Product) => void;
|
onDeleteClick?: (model: AssistantModel) => void;
|
||||||
};
|
};
|
||||||
|
|
||||||
const DownloadedModelCard: React.FC<Props> = ({
|
const DownloadedModelCard: React.FC<Props> = ({
|
||||||
product,
|
model,
|
||||||
isRecommend,
|
isRecommend,
|
||||||
required,
|
required,
|
||||||
onDeleteClick,
|
onDeleteClick,
|
||||||
@ -19,14 +19,14 @@ const DownloadedModelCard: React.FC<Props> = ({
|
|||||||
<div className="flex justify-between py-4 px-3 gap-2.5">
|
<div className="flex justify-between py-4 px-3 gap-2.5">
|
||||||
<DownloadModelContent
|
<DownloadModelContent
|
||||||
required={required}
|
required={required}
|
||||||
author={product.author}
|
author={model.author}
|
||||||
description={product.description}
|
description={model.shortDescription}
|
||||||
isRecommend={isRecommend}
|
isRecommend={isRecommend}
|
||||||
name={product.name}
|
name={model.name}
|
||||||
type={product.type}
|
type={model.type}
|
||||||
/>
|
/>
|
||||||
<div className="flex flex-col justify-center">
|
<div className="flex flex-col justify-center">
|
||||||
<button onClick={() => onDeleteClick?.(product)}>Delete</button>
|
<button onClick={() => onDeleteClick?.(model)}>Delete</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -6,10 +6,10 @@ import ExploreModelFilter from "../ExploreModelFilter";
|
|||||||
const ExploreModelContainer: React.FC = () => (
|
const ExploreModelContainer: React.FC = () => (
|
||||||
<div className="flex flex-col flex-1 px-16 pt-14 overflow-hidden">
|
<div className="flex flex-col flex-1 px-16 pt-14 overflow-hidden">
|
||||||
<HeaderTitle title="Explore Models" />
|
<HeaderTitle title="Explore Models" />
|
||||||
<SearchBar
|
{/* <SearchBar
|
||||||
type={SearchType.Model}
|
type={SearchType.Model}
|
||||||
placeholder="Owner name like TheBloke, bhlim etc.."
|
placeholder="Owner name like TheBloke, bhlim etc.."
|
||||||
/>
|
/> */}
|
||||||
<div className="flex flex-1 gap-x-10 mt-9 overflow-hidden">
|
<div className="flex flex-1 gap-x-10 mt-9 overflow-hidden">
|
||||||
<ExploreModelFilter />
|
<ExploreModelFilter />
|
||||||
<ExploreModelList />
|
<ExploreModelList />
|
||||||
|
|||||||
@ -1,7 +1,8 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import SearchBar from "../SearchBar";
|
import SearchBar from "../SearchBar";
|
||||||
import SimpleCheckbox from "../SimpleCheckbox";
|
import SimpleCheckbox from "../SimpleCheckbox";
|
||||||
import SimpleTag, { TagType } from "../SimpleTag";
|
import SimpleTag from "../SimpleTag";
|
||||||
|
import { TagType } from "../SimpleTag/TagType";
|
||||||
|
|
||||||
const tags = [
|
const tags = [
|
||||||
"Roleplay",
|
"Roleplay",
|
||||||
|
|||||||
@ -4,10 +4,20 @@
|
|||||||
|
|
||||||
import ExploreModelItemHeader from "../ExploreModelItemHeader";
|
import ExploreModelItemHeader from "../ExploreModelItemHeader";
|
||||||
import ModelVersionList from "../ModelVersionList";
|
import ModelVersionList from "../ModelVersionList";
|
||||||
import { Fragment, forwardRef, useState } from "react";
|
import { Fragment, forwardRef, useEffect, useState } from "react";
|
||||||
import SimpleTag, { TagType } from "../SimpleTag";
|
import SimpleTag from "../SimpleTag";
|
||||||
|
import {
|
||||||
|
MiscellanousTag,
|
||||||
|
NumOfBit,
|
||||||
|
QuantMethodTag,
|
||||||
|
RamRequired,
|
||||||
|
UsecaseTag,
|
||||||
|
VersionTag,
|
||||||
|
} from "@/_components/SimpleTag/TagType";
|
||||||
import { displayDate } from "@/_utils/datetime";
|
import { displayDate } from "@/_utils/datetime";
|
||||||
import { Product } from "@/_models/Product";
|
import { Product } from "@/_models/Product";
|
||||||
|
import useGetMostSuitableModelVersion from "@/_hooks/useGetMostSuitableModelVersion";
|
||||||
|
import { toGigabytes } from "@/_utils/converter";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
model: Product;
|
model: Product;
|
||||||
@ -16,42 +26,33 @@ type Props = {
|
|||||||
const ExploreModelItem = forwardRef<HTMLDivElement, Props>(({ model }, ref) => {
|
const ExploreModelItem = forwardRef<HTMLDivElement, Props>(({ model }, ref) => {
|
||||||
const [show, setShow] = useState(false);
|
const [show, setShow] = useState(false);
|
||||||
|
|
||||||
|
const { availableVersions } = model;
|
||||||
|
const { suitableModel, getMostSuitableModelVersion } =
|
||||||
|
useGetMostSuitableModelVersion();
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
getMostSuitableModelVersion(availableVersions);
|
||||||
|
}, [availableVersions]);
|
||||||
|
|
||||||
|
if (!suitableModel) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { quantMethod, bits, maxRamRequired, usecase } = suitableModel;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
ref={ref}
|
ref={ref}
|
||||||
className="flex flex-col border border-gray-200 rounded-md mb-4"
|
className="flex flex-col border border-gray-200 rounded-md mb-4"
|
||||||
>
|
>
|
||||||
<ExploreModelItemHeader
|
<ExploreModelItemHeader
|
||||||
name={model.name}
|
suitableModel={suitableModel}
|
||||||
status={TagType.Recommended}
|
exploreModel={model}
|
||||||
versions={model.availableVersions}
|
|
||||||
/>
|
/>
|
||||||
<div className="flex flex-col px-[26px] py-[22px]">
|
<div className="flex flex-col px-[26px] py-[22px]">
|
||||||
<div className="flex justify-between">
|
<div className="flex justify-between">
|
||||||
<div className="flex-1 flex flex-col gap-8">
|
<div className="flex-1 flex flex-col gap-8">
|
||||||
<div className="flex flex-col gap-1">
|
<div className="flex flex-col gap-1">
|
||||||
<div className="text-sm font-medium text-gray-500">
|
|
||||||
Model Format
|
|
||||||
</div>
|
|
||||||
<div className="px-2.5 py-0.5 bg-gray-100 text-xs text-gray-800 w-fit">
|
|
||||||
GGUF
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="flex flex-col">
|
|
||||||
<div className="text-sm font-medium text-gray-500">
|
|
||||||
Hardware Compatibility
|
|
||||||
</div>
|
|
||||||
<div className="flex gap-2">
|
|
||||||
<SimpleTag
|
|
||||||
clickable={false}
|
|
||||||
title={TagType.Compatible}
|
|
||||||
type={TagType.Compatible}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="flex-1 flex flex-col gap-8">
|
|
||||||
<div>
|
|
||||||
<div className="text-sm font-medium text-gray-500">
|
<div className="text-sm font-medium text-gray-500">
|
||||||
Release Date
|
Release Date
|
||||||
</div>
|
</div>
|
||||||
@ -60,14 +61,49 @@ const ExploreModelItem = forwardRef<HTMLDivElement, Props>(({ model }, ref) => {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col gap-2">
|
<div className="flex flex-col gap-2">
|
||||||
<div className="text-sm font-medium text-gray-500">
|
<div className="text-sm font-medium text-gray-500">Version</div>
|
||||||
Expected Performance
|
<div className="flex gap-2">
|
||||||
|
<SimpleTag
|
||||||
|
title={model.version}
|
||||||
|
type={VersionTag.Version}
|
||||||
|
clickable={false}
|
||||||
|
/>
|
||||||
|
<SimpleTag
|
||||||
|
title={quantMethod}
|
||||||
|
type={QuantMethodTag.Default}
|
||||||
|
clickable={false}
|
||||||
|
/>
|
||||||
|
<SimpleTag
|
||||||
|
title={`${bits} Bits`}
|
||||||
|
type={NumOfBit.Default}
|
||||||
|
clickable={false}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex-1 flex flex-col gap-8">
|
||||||
|
<div>
|
||||||
|
<div className="text-sm font-medium text-gray-500">Author</div>
|
||||||
|
<div className="text-sm font-normal text-gray-900">
|
||||||
|
{model.author}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col gap-2">
|
||||||
|
<div className="text-sm font-medium text-gray-500">
|
||||||
|
Compatibility
|
||||||
|
</div>
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<SimpleTag
|
||||||
|
title={usecase}
|
||||||
|
type={UsecaseTag.UsecaseDefault}
|
||||||
|
clickable={false}
|
||||||
|
/>
|
||||||
|
<SimpleTag
|
||||||
|
title={`${toGigabytes(maxRamRequired)} RAM required`}
|
||||||
|
type={RamRequired.RamDefault}
|
||||||
|
clickable={false}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
<SimpleTag
|
|
||||||
title={TagType.Medium}
|
|
||||||
type={TagType.Medium}
|
|
||||||
clickable={false}
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -77,16 +113,27 @@ const ExploreModelItem = forwardRef<HTMLDivElement, Props>(({ model }, ref) => {
|
|||||||
{model.longDescription}
|
{model.longDescription}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-col">
|
<div className="flex flex-col mt-5 gap-2">
|
||||||
<span className="text-sm font-medium text-gray-500">Tags</span>
|
<span className="text-sm font-medium text-gray-500">Tags</span>
|
||||||
|
<div className="flex flex-wrap gap-2">
|
||||||
|
{model.tags.map((tag) => (
|
||||||
|
<SimpleTag
|
||||||
|
key={tag}
|
||||||
|
title={tag}
|
||||||
|
type={MiscellanousTag.MiscellanousDefault}
|
||||||
|
clickable={false}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{model.availableVersions.length > 0 && (
|
{model.availableVersions?.length > 0 && (
|
||||||
<Fragment>
|
<Fragment>
|
||||||
{show && (
|
{show && (
|
||||||
<ModelVersionList
|
<ModelVersionList
|
||||||
model={model}
|
model={model}
|
||||||
versions={model.availableVersions}
|
versions={model.availableVersions}
|
||||||
|
recommendedVersion={suitableModel?.id ?? ""}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
<button
|
<button
|
||||||
|
|||||||
@ -1,34 +1,74 @@
|
|||||||
import SimpleTag, { TagType } from "../SimpleTag";
|
import SimpleTag from "../SimpleTag";
|
||||||
import PrimaryButton from "../PrimaryButton";
|
import PrimaryButton from "../PrimaryButton";
|
||||||
import { formatDownloadPercentage, toGigabytes } from "@/_utils/converter";
|
import { formatDownloadPercentage, toGigabytes } from "@/_utils/converter";
|
||||||
import { DownloadState } from "@/_models/DownloadState";
|
|
||||||
import SecondaryButton from "../SecondaryButton";
|
import SecondaryButton from "../SecondaryButton";
|
||||||
import { ModelVersion } from "@/_models/Product";
|
import { Product } from "@/_models/Product";
|
||||||
|
import { useCallback, useEffect, useMemo } from "react";
|
||||||
|
import { ModelVersion } from "@/_models/ModelVersion";
|
||||||
|
import useGetPerformanceTag from "@/_hooks/useGetPerformanceTag";
|
||||||
|
import useDownloadModel from "@/_hooks/useDownloadModel";
|
||||||
|
import { useGetDownloadedModels } from "@/_hooks/useGetDownloadedModels";
|
||||||
|
import { modelDownloadStateAtom } from "@/_helpers/atoms/DownloadState.atom";
|
||||||
|
import { atom, useAtomValue, useSetAtom } from "jotai";
|
||||||
|
import {
|
||||||
|
MainViewState,
|
||||||
|
setMainViewStateAtom,
|
||||||
|
} from "@/_helpers/atoms/MainView.atom";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
name: string;
|
suitableModel: ModelVersion;
|
||||||
status: TagType;
|
exploreModel: Product;
|
||||||
versions: ModelVersion[];
|
|
||||||
size?: number;
|
|
||||||
downloadState?: DownloadState;
|
|
||||||
onDownloadClick?: () => void;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const ExploreModelItemHeader: React.FC<Props> = ({
|
const ExploreModelItemHeader: React.FC<Props> = ({
|
||||||
name,
|
suitableModel,
|
||||||
status,
|
exploreModel,
|
||||||
size,
|
|
||||||
versions,
|
|
||||||
downloadState,
|
|
||||||
onDownloadClick,
|
|
||||||
}) => {
|
}) => {
|
||||||
|
const { downloadModel } = useDownloadModel();
|
||||||
|
const { downloadedModels } = useGetDownloadedModels();
|
||||||
|
const { performanceTag, title, getPerformanceForModel } =
|
||||||
|
useGetPerformanceTag();
|
||||||
|
const downloadAtom = useMemo(
|
||||||
|
() => atom((get) => get(modelDownloadStateAtom)[suitableModel.id]),
|
||||||
|
[suitableModel.id]
|
||||||
|
);
|
||||||
|
const downloadState = useAtomValue(downloadAtom);
|
||||||
|
const setMainViewState = useSetAtom(setMainViewStateAtom);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
getPerformanceForModel(suitableModel);
|
||||||
|
}, [suitableModel]);
|
||||||
|
|
||||||
|
const onDownloadClick = useCallback(() => {
|
||||||
|
downloadModel(exploreModel, suitableModel);
|
||||||
|
}, [exploreModel, suitableModel]);
|
||||||
|
|
||||||
|
const isDownloaded =
|
||||||
|
downloadedModels.find((model) => model.id === suitableModel.id) != null;
|
||||||
|
|
||||||
let downloadButton = (
|
let downloadButton = (
|
||||||
<PrimaryButton
|
<PrimaryButton
|
||||||
title={size ? `Download (${toGigabytes(size)})` : "Download"}
|
title={
|
||||||
onClick={() => onDownloadClick?.()}
|
suitableModel.size
|
||||||
|
? `Download (${toGigabytes(suitableModel.size)})`
|
||||||
|
: "Download"
|
||||||
|
}
|
||||||
|
onClick={() => onDownloadClick()}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (isDownloaded) {
|
||||||
|
downloadButton = (
|
||||||
|
<PrimaryButton
|
||||||
|
title="View Downloaded Model"
|
||||||
|
onClick={() => {
|
||||||
|
setMainViewState(MainViewState.MyModel);
|
||||||
|
}}
|
||||||
|
className="bg-green-500 hover:bg-green-400"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (downloadState != null) {
|
if (downloadState != null) {
|
||||||
// downloading
|
// downloading
|
||||||
downloadButton = (
|
downloadButton = (
|
||||||
@ -39,15 +79,15 @@ const ExploreModelItemHeader: React.FC<Props> = ({
|
|||||||
)})`}
|
)})`}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
} else if (versions.length === 0) {
|
|
||||||
downloadButton = <SecondaryButton disabled title="No files available" />;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center justify-between p-4 border-b border-gray-200">
|
<div className="flex items-center justify-between p-4 border-b border-gray-200">
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<span>{name}</span>
|
<span>{exploreModel.name}</span>
|
||||||
<SimpleTag title={status} type={status} clickable={false} />
|
{performanceTag && (
|
||||||
|
<SimpleTag title={title} type={performanceTag} clickable={false} />
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
{downloadButton}
|
{downloadButton}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -1,50 +1,26 @@
|
|||||||
import React, { useEffect } from "react";
|
import React, { useEffect } from "react";
|
||||||
import ExploreModelItem from "../ExploreModelItem";
|
import ExploreModelItem from "../ExploreModelItem";
|
||||||
import { modelSearchAtom } from "@/_helpers/JotaiWrapper";
|
import { getConfiguredModels } from "@/_hooks/useGetDownloadedModels";
|
||||||
import useGetHuggingFaceModel from "@/_hooks/useGetHuggingFaceModel";
|
import useGetConfiguredModels from "@/_hooks/useGetConfiguredModels";
|
||||||
import { useAtom, useAtomValue } from "jotai";
|
|
||||||
import { useInView } from "react-intersection-observer";
|
|
||||||
import { modelLoadMoreAtom } from "@/_helpers/atoms/ExploreModelLoading.atom";
|
|
||||||
import { Waveform } from "@uiball/loaders";
|
import { Waveform } from "@uiball/loaders";
|
||||||
|
|
||||||
const ExploreModelList: React.FC = () => {
|
const ExploreModelList: React.FC = () => {
|
||||||
const [loadMoreInProgress, setLoadMoreInProress] = useAtom(modelLoadMoreAtom);
|
const { loading, models } = useGetConfiguredModels();
|
||||||
const modelSearch = useAtomValue(modelSearchAtom);
|
|
||||||
const { modelList, getHuggingFaceModel } = useGetHuggingFaceModel();
|
|
||||||
const { ref, inView } = useInView({
|
|
||||||
threshold: 0,
|
|
||||||
triggerOnce: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (modelList.length === 0 && modelSearch.length > 0) {
|
getConfiguredModels();
|
||||||
setLoadMoreInProress(true);
|
}, []);
|
||||||
}
|
|
||||||
getHuggingFaceModel(modelSearch);
|
|
||||||
}, [modelSearch]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (inView) {
|
|
||||||
console.debug("Load more models..");
|
|
||||||
setLoadMoreInProress(true);
|
|
||||||
getHuggingFaceModel(modelSearch);
|
|
||||||
}
|
|
||||||
}, [inView]);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex flex-col flex-1 overflow-y-auto scroll">
|
<div className="flex flex-col flex-1 overflow-y-auto scroll">
|
||||||
{modelList.map((item, index) => (
|
{loading && (
|
||||||
<ExploreModelItem
|
<div className="mx-auto">
|
||||||
ref={index === modelList.length - 1 ? ref : null}
|
<Waveform size={24} color="#CBD5E0" />
|
||||||
key={item.id}
|
|
||||||
model={item}
|
|
||||||
/>
|
|
||||||
))}
|
|
||||||
{loadMoreInProgress && (
|
|
||||||
<div className="mx-auto mt-2 mb-4">
|
|
||||||
<Waveform size={24} color="#9CA3AF" />
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
{models.map((item) => (
|
||||||
|
<ExploreModelItem key={item.id} model={item} />
|
||||||
|
))}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@ -9,6 +9,8 @@ import {
|
|||||||
conversationStatesAtom,
|
conversationStatesAtom,
|
||||||
getActiveConvoIdAtom,
|
getActiveConvoIdAtom,
|
||||||
setActiveConvoIdAtom,
|
setActiveConvoIdAtom,
|
||||||
|
updateConversationErrorAtom,
|
||||||
|
updateConversationWaitingForResponseAtom,
|
||||||
} from "@/_helpers/atoms/Conversation.atom";
|
} from "@/_helpers/atoms/Conversation.atom";
|
||||||
import {
|
import {
|
||||||
setMainViewStateAtom,
|
setMainViewStateAtom,
|
||||||
@ -33,6 +35,10 @@ const HistoryItem: React.FC<Props> = ({
|
|||||||
const conversationStates = useAtomValue(conversationStatesAtom);
|
const conversationStates = useAtomValue(conversationStatesAtom);
|
||||||
const activeConvoId = useAtomValue(getActiveConvoIdAtom);
|
const activeConvoId = useAtomValue(getActiveConvoIdAtom);
|
||||||
const setActiveConvoId = useSetAtom(setActiveConvoIdAtom);
|
const setActiveConvoId = useSetAtom(setActiveConvoIdAtom);
|
||||||
|
const updateConvWaiting = useSetAtom(
|
||||||
|
updateConversationWaitingForResponseAtom
|
||||||
|
);
|
||||||
|
const updateConvError = useSetAtom(updateConversationErrorAtom);
|
||||||
const isSelected = activeConvoId === conversation.id;
|
const isSelected = activeConvoId === conversation.id;
|
||||||
|
|
||||||
const { initModel } = useInitModel();
|
const { initModel } = useInitModel();
|
||||||
@ -42,13 +48,16 @@ const HistoryItem: React.FC<Props> = ({
|
|||||||
DataService.GET_MODEL_BY_ID,
|
DataService.GET_MODEL_BY_ID,
|
||||||
conversation.model_id
|
conversation.model_id
|
||||||
);
|
);
|
||||||
if (!model) {
|
|
||||||
alert(
|
if (conversation.id) updateConvWaiting(conversation.id, true);
|
||||||
`Model ${conversation.model_id} not found! Please re-download the model first.`
|
initModel(model).then((res: any) => {
|
||||||
);
|
if (conversation.id) updateConvWaiting(conversation.id, false);
|
||||||
} else {
|
|
||||||
initModel(model);
|
if (res?.error && conversation.id) {
|
||||||
}
|
updateConvError(conversation.id, res.error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
if (activeConvoId !== conversation.id) {
|
if (activeConvoId !== conversation.id) {
|
||||||
setMainViewState(MainViewState.Conversation);
|
setMainViewState(MainViewState.Conversation);
|
||||||
setActiveConvoId(conversation.id);
|
setActiveConvoId(conversation.id);
|
||||||
|
|||||||
@ -6,17 +6,18 @@ import { useAtomValue } from "jotai";
|
|||||||
import { showingAdvancedPromptAtom } from "@/_helpers/atoms/Modal.atom";
|
import { showingAdvancedPromptAtom } from "@/_helpers/atoms/Modal.atom";
|
||||||
import SecondaryButton from "../SecondaryButton";
|
import SecondaryButton from "../SecondaryButton";
|
||||||
import { Fragment } from "react";
|
import { Fragment } from "react";
|
||||||
import { PlusIcon } from "@heroicons/react/24/outline";
|
import { PlusIcon, FaceSmileIcon } from "@heroicons/react/24/outline";
|
||||||
import useCreateConversation from "@/_hooks/useCreateConversation";
|
import useCreateConversation from "@/_hooks/useCreateConversation";
|
||||||
import { currentProductAtom } from "@/_helpers/atoms/Model.atom";
|
import { activeAssistantModelAtom } from "@/_helpers/atoms/Model.atom";
|
||||||
import { showingTyping } from "@/_helpers/JotaiWrapper";
|
|
||||||
import LoadingIndicator from "../LoadingIndicator";
|
import LoadingIndicator from "../LoadingIndicator";
|
||||||
|
import { currentConvoStateAtom } from "@/_helpers/atoms/Conversation.atom";
|
||||||
|
import SendButton from "../SendButton";
|
||||||
|
|
||||||
const InputToolbar: React.FC = () => {
|
const InputToolbar: React.FC = () => {
|
||||||
const showingAdvancedPrompt = useAtomValue(showingAdvancedPromptAtom);
|
const showingAdvancedPrompt = useAtomValue(showingAdvancedPromptAtom);
|
||||||
const currentProduct = useAtomValue(currentProductAtom);
|
const activeModel = useAtomValue(activeAssistantModelAtom);
|
||||||
const { requestCreateConvo } = useCreateConversation();
|
const { requestCreateConvo } = useCreateConversation();
|
||||||
const isTyping = useAtomValue(showingTyping);
|
const currentConvoState = useAtomValue(currentConvoStateAtom);
|
||||||
|
|
||||||
if (showingAdvancedPrompt) {
|
if (showingAdvancedPrompt) {
|
||||||
return <div />;
|
return <div />;
|
||||||
@ -26,22 +27,21 @@ const InputToolbar: React.FC = () => {
|
|||||||
// const onRegenerateClick = () => {};
|
// const onRegenerateClick = () => {};
|
||||||
|
|
||||||
const onNewConversationClick = () => {
|
const onNewConversationClick = () => {
|
||||||
if (currentProduct) {
|
if (activeModel) {
|
||||||
requestCreateConvo(currentProduct);
|
requestCreateConvo(activeModel);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Fragment>
|
<Fragment>
|
||||||
<div className="flex justify-between gap-2 mr-3 my-2">
|
{currentConvoState?.error && (
|
||||||
<div className="h-6">
|
<div className="flex flex-row justify-center">
|
||||||
{isTyping && (
|
<span className="mx-5 my-2 text-red-500 text-sm">
|
||||||
<div className="my-2" key="indicator">
|
{currentConvoState?.error?.toString()}
|
||||||
<LoadingIndicator />
|
</span>
|
||||||
</div>
|
|
||||||
)}{" "}
|
|
||||||
</div>
|
</div>
|
||||||
|
)}
|
||||||
|
<div className="flex justify-center gap-2 my-3">
|
||||||
{/* <SecondaryButton title="Regenerate" onClick={onRegenerateClick} /> */}
|
{/* <SecondaryButton title="Regenerate" onClick={onRegenerateClick} /> */}
|
||||||
<SecondaryButton
|
<SecondaryButton
|
||||||
onClick={onNewConversationClick}
|
onClick={onNewConversationClick}
|
||||||
@ -49,9 +49,12 @@ const InputToolbar: React.FC = () => {
|
|||||||
icon={<PlusIcon width={16} height={16} />}
|
icon={<PlusIcon width={16} height={16} />}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="mx-3 mb-3 flex-none overflow-hidden shadow-sm ring-1 ring-inset ring-gray-300 rounded-lg dark:bg-gray-800">
|
{/* My text input */}
|
||||||
<BasicPromptInput />
|
<div className="flex items-start space-x-4 mx-12 md:mx-32 2xl:mx-64 mb-5">
|
||||||
<BasicPromptAccessories />
|
<div className="min-w-0 flex-1 relative">
|
||||||
|
<BasicPromptInput />
|
||||||
|
<BasicPromptAccessories />
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</Fragment>
|
</Fragment>
|
||||||
);
|
);
|
||||||
|
|||||||
@ -12,7 +12,7 @@ const LoginButton: React.FC = () => {
|
|||||||
// <button
|
// <button
|
||||||
// onClick={signInWithKeyCloak}
|
// onClick={signInWithKeyCloak}
|
||||||
// type="button"
|
// type="button"
|
||||||
// className="rounded-md bg-indigo-600 px-2.5 py-1.5 text-sm font-semibold text-white shadow-sm hover:bg-indigo-500 focus-visible:outline focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-indigo-600"
|
// className="rounded-md bg-blue-600 px-2.5 py-1.5 text-sm font-semibold text-white shadow-sm hover:bg-blue-500 focus-visible:outline focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-blue-600"
|
||||||
// >
|
// >
|
||||||
// Login
|
// Login
|
||||||
// </button>
|
// </button>
|
||||||
|
|||||||
@ -1,21 +1,21 @@
|
|||||||
import React from "react";
|
import React, { useCallback } from "react";
|
||||||
import { Product } from "@/_models/Product";
|
|
||||||
import Image from "next/image";
|
|
||||||
import { ModelStatus, ModelStatusComponent } from "../ModelStatusComponent";
|
import { ModelStatus, ModelStatusComponent } from "../ModelStatusComponent";
|
||||||
import ModelActionMenu from "../ModelActionMenu";
|
import ModelActionMenu from "../ModelActionMenu";
|
||||||
import { useAtomValue } from "jotai";
|
import { useAtomValue } from "jotai";
|
||||||
import ModelActionButton, { ModelActionType } from "../ModelActionButton";
|
import ModelActionButton, { ModelActionType } from "../ModelActionButton";
|
||||||
import useStartStopModel from "@/_hooks/useStartStopModel";
|
import useStartStopModel from "@/_hooks/useStartStopModel";
|
||||||
import useDeleteModel from "@/_hooks/useDeleteModel";
|
import useDeleteModel from "@/_hooks/useDeleteModel";
|
||||||
import { currentProductAtom } from "@/_helpers/atoms/Model.atom";
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
|
import { activeAssistantModelAtom } from "@/_helpers/atoms/Model.atom";
|
||||||
|
import { toGigabytes } from "@/_utils/converter";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
model: Product;
|
model: AssistantModel;
|
||||||
};
|
};
|
||||||
|
|
||||||
const ModelRow: React.FC<Props> = ({ model }) => {
|
const ModelRow: React.FC<Props> = ({ model }) => {
|
||||||
const { startModel, stopModel } = useStartStopModel();
|
const { startModel, stopModel } = useStartStopModel();
|
||||||
const activeModel = useAtomValue(currentProductAtom);
|
const activeModel = useAtomValue(activeAssistantModelAtom);
|
||||||
const { deleteModel } = useDeleteModel();
|
const { deleteModel } = useDeleteModel();
|
||||||
|
|
||||||
let status = ModelStatus.Installed;
|
let status = ModelStatus.Installed;
|
||||||
@ -36,32 +36,23 @@ const ModelRow: React.FC<Props> = ({ model }) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const onDeleteClick = () => {
|
const onDeleteClick = useCallback(() => {
|
||||||
deleteModel(model);
|
deleteModel(model);
|
||||||
};
|
}, [model]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<tr
|
<tr className="border-b border-gray-200 last:border-b-0 last:rounded-lg">
|
||||||
className="border-b border-gray-200 last:border-b-0 last:rounded-lg"
|
|
||||||
key={model.id}
|
|
||||||
>
|
|
||||||
<td className="flex flex-col whitespace-nowrap px-6 py-4 text-sm font-medium text-gray-900">
|
<td className="flex flex-col whitespace-nowrap px-6 py-4 text-sm font-medium text-gray-900">
|
||||||
{model.name}
|
{model.name}
|
||||||
<span className="text-gray-500 font-normal">{model.version}</span>
|
<span className="text-gray-500 font-normal">{model.version}</span>
|
||||||
</td>
|
</td>
|
||||||
<td className="whitespace-nowrap px-6 py-4 text-sm text-gray-500">
|
<td className="whitespace-nowrap px-6 py-4 text-sm text-gray-500">
|
||||||
<div className="flex flex-col justify-start">
|
<div className="flex flex-col justify-start">
|
||||||
<span>{model.format}</span>
|
<span>GGUF</span>
|
||||||
{model.accelerated && (
|
|
||||||
<span className="flex items-center text-gray-500 text-sm font-normal gap-0.5">
|
|
||||||
<Image src={"/icons/flash.svg"} width={20} height={20} alt="" />
|
|
||||||
GPU Accelerated
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
</td>
|
</td>
|
||||||
<td className="whitespace-nowrap px-6 py-4 text-sm text-gray-500">
|
<td className="whitespace-nowrap px-6 py-4 text-sm text-gray-500">
|
||||||
{model.totalSize}
|
{toGigabytes(model.size)}
|
||||||
</td>
|
</td>
|
||||||
<td className="whitespace-nowrap px-6 py-4 text-sm text-gray-500">
|
<td className="whitespace-nowrap px-6 py-4 text-sm text-gray-500">
|
||||||
<ModelStatusComponent status={status} />
|
<ModelStatusComponent status={status} />
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
import { Fragment, useEffect } from "react";
|
import { Fragment, useEffect } from "react";
|
||||||
import { Listbox, Transition } from "@headlessui/react";
|
import { Listbox, Transition } from "@headlessui/react";
|
||||||
import { CheckIcon, ChevronUpDownIcon } from "@heroicons/react/20/solid";
|
import { CheckIcon, ChevronUpDownIcon } from "@heroicons/react/20/solid";
|
||||||
import { Product } from "@/_models/Product";
|
|
||||||
import { useAtom, useAtomValue } from "jotai";
|
import { useAtom, useAtomValue } from "jotai";
|
||||||
import { selectedModelAtom } from "@/_helpers/atoms/Model.atom";
|
import { selectedModelAtom } from "@/_helpers/atoms/Model.atom";
|
||||||
import { downloadedModelAtom } from "@/_helpers/atoms/DownloadedModel.atom";
|
import { downloadedModelAtom } from "@/_helpers/atoms/DownloadedModel.atom";
|
||||||
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
|
|
||||||
function classNames(...classes: any) {
|
function classNames(...classes: any) {
|
||||||
return classes.filter(Boolean).join(" ");
|
return classes.filter(Boolean).join(" ");
|
||||||
@ -20,7 +20,7 @@ const SelectModels: React.FC = () => {
|
|||||||
}
|
}
|
||||||
}, [downloadedModels]);
|
}, [downloadedModels]);
|
||||||
|
|
||||||
const onModelSelected = (model: Product) => {
|
const onModelSelected = (model: AssistantModel) => {
|
||||||
setSelectedModel(model);
|
setSelectedModel(model);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -36,7 +36,7 @@ const SelectModels: React.FC = () => {
|
|||||||
Select a Model:
|
Select a Model:
|
||||||
</Listbox.Label>
|
</Listbox.Label>
|
||||||
<div className="relative mt-[19px]">
|
<div className="relative mt-[19px]">
|
||||||
<Listbox.Button className="relative w-full cursor-default rounded-md bg-white py-1.5 pl-3 pr-10 text-left text-gray-900 shadow-sm ring-1 ring-inset ring-gray-300 focus:outline-none focus:ring-2 focus:ring-indigo-500 sm:text-sm sm:leading-6">
|
<Listbox.Button className="relative w-full cursor-default rounded-md bg-white py-1.5 pl-3 pr-10 text-left text-gray-900 shadow-sm ring-1 ring-inset ring-gray-300 focus:outline-none focus:ring-2 focus:ring-blue-500 sm:text-sm sm:leading-6">
|
||||||
<span className="flex items-center">
|
<span className="flex items-center">
|
||||||
<img
|
<img
|
||||||
src={selectedModel.avatarUrl}
|
src={selectedModel.avatarUrl}
|
||||||
@ -68,8 +68,8 @@ const SelectModels: React.FC = () => {
|
|||||||
key={model.id}
|
key={model.id}
|
||||||
className={({ active }) =>
|
className={({ active }) =>
|
||||||
classNames(
|
classNames(
|
||||||
active ? "bg-indigo-600 text-white" : "text-gray-900",
|
active ? "bg-blue-600 text-white" : "text-gray-900",
|
||||||
"relative cursor-default select-none py-2 pl-3 pr-9"
|
"relative cursor-default select-none py-2 pl-3 pr-9",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
value={model}
|
value={model}
|
||||||
@ -85,7 +85,7 @@ const SelectModels: React.FC = () => {
|
|||||||
<span
|
<span
|
||||||
className={classNames(
|
className={classNames(
|
||||||
selected ? "font-semibold" : "font-normal",
|
selected ? "font-semibold" : "font-normal",
|
||||||
"ml-3 block truncate"
|
"ml-3 block truncate",
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
{model.name}
|
{model.name}
|
||||||
@ -95,8 +95,8 @@ const SelectModels: React.FC = () => {
|
|||||||
{selected ? (
|
{selected ? (
|
||||||
<span
|
<span
|
||||||
className={classNames(
|
className={classNames(
|
||||||
active ? "text-white" : "text-indigo-600",
|
active ? "text-white" : "text-blue-600",
|
||||||
"absolute inset-y-0 right-0 flex items-center pr-4"
|
"absolute inset-y-0 right-0 flex items-center pr-4",
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
<CheckIcon className="h-5 w-5" aria-hidden="true" />
|
<CheckIcon className="h-5 w-5" aria-hidden="true" />
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import { Product } from "@/_models/Product";
|
|
||||||
import ModelRow from "../ModelRow";
|
import ModelRow from "../ModelRow";
|
||||||
import ModelTableHeader from "../ModelTableHeader";
|
import ModelTableHeader from "../ModelTableHeader";
|
||||||
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
models: Product[];
|
models: AssistantModel[];
|
||||||
};
|
};
|
||||||
|
|
||||||
const tableHeaders = ["MODEL", "FORMAT", "SIZE", "STATUS", "ACTIONS"];
|
const tableHeaders = ["MODEL", "FORMAT", "SIZE", "STATUS", "ACTIONS"];
|
||||||
|
|||||||
@ -1,26 +1,39 @@
|
|||||||
import React, { useMemo } from "react";
|
import React, { useMemo } from "react";
|
||||||
import { formatDownloadPercentage, toGigabytes } from "@/_utils/converter";
|
import { formatDownloadPercentage, toGigabytes } from "@/_utils/converter";
|
||||||
import Image from "next/image";
|
import Image from "next/image";
|
||||||
import { ModelVersion, Product } from "@/_models/Product";
|
import { Product } from "@/_models/Product";
|
||||||
import useDownloadModel from "@/_hooks/useDownloadModel";
|
import useDownloadModel from "@/_hooks/useDownloadModel";
|
||||||
import { modelDownloadStateAtom } from "@/_helpers/atoms/DownloadState.atom";
|
import { modelDownloadStateAtom } from "@/_helpers/atoms/DownloadState.atom";
|
||||||
import { atom, useAtomValue } from "jotai";
|
import { atom, useAtomValue } from "jotai";
|
||||||
|
import { ModelVersion } from "@/_models/ModelVersion";
|
||||||
|
import { useGetDownloadedModels } from "@/_hooks/useGetDownloadedModels";
|
||||||
|
import SimpleTag from "../SimpleTag";
|
||||||
|
import { RamRequired, UsecaseTag } from "../SimpleTag/TagType";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
model: Product;
|
model: Product;
|
||||||
modelVersion: ModelVersion;
|
modelVersion: ModelVersion;
|
||||||
|
isRecommended: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
const ModelVersionItem: React.FC<Props> = ({ model, modelVersion }) => {
|
const ModelVersionItem: React.FC<Props> = ({
|
||||||
const { downloadHfModel } = useDownloadModel();
|
model,
|
||||||
|
modelVersion,
|
||||||
|
isRecommended,
|
||||||
|
}) => {
|
||||||
|
const { downloadModel } = useDownloadModel();
|
||||||
|
const { downloadedModels } = useGetDownloadedModels();
|
||||||
|
const isDownloaded =
|
||||||
|
downloadedModels.find((model) => model.id === modelVersion.id) != null;
|
||||||
|
|
||||||
const downloadAtom = useMemo(
|
const downloadAtom = useMemo(
|
||||||
() => atom((get) => get(modelDownloadStateAtom)[modelVersion.path ?? ""]),
|
() => atom((get) => get(modelDownloadStateAtom)[modelVersion.id ?? ""]),
|
||||||
[modelVersion.path ?? ""]
|
[modelVersion.id ?? ""]
|
||||||
);
|
);
|
||||||
const downloadState = useAtomValue(downloadAtom);
|
const downloadState = useAtomValue(downloadAtom);
|
||||||
|
|
||||||
const onDownloadClick = () => {
|
const onDownloadClick = () => {
|
||||||
downloadHfModel(model, modelVersion);
|
downloadModel(model, modelVersion);
|
||||||
};
|
};
|
||||||
|
|
||||||
let downloadButton = (
|
let downloadButton = (
|
||||||
@ -36,15 +49,33 @@ const ModelVersionItem: React.FC<Props> = ({ model, modelVersion }) => {
|
|||||||
downloadButton = (
|
downloadButton = (
|
||||||
<div>{formatDownloadPercentage(downloadState.percent)}</div>
|
<div>{formatDownloadPercentage(downloadState.percent)}</div>
|
||||||
);
|
);
|
||||||
|
} else if (isDownloaded) {
|
||||||
|
downloadButton = <div>Downloaded</div>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { maxRamRequired, usecase } = modelVersion;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex justify-between items-center gap-4 pl-3 pt-3 pr-4 pb-3 border-t border-gray-200 first:border-t-0">
|
<div className="flex justify-between items-center gap-4 pl-3 pt-3 pr-4 pb-3 border-t border-gray-200 first:border-t-0">
|
||||||
<div className="flex items-center gap-4">
|
<div className="flex items-center gap-2">
|
||||||
<Image src={"/icons/app_icon.svg"} width={14} height={20} alt="" />
|
<Image src={"/icons/app_icon.svg"} width={14} height={20} alt="" />
|
||||||
<span className="font-sm text-gray-900">{modelVersion.path}</span>
|
<span className="font-sm text-gray-900 flex-1">
|
||||||
|
{modelVersion.name}
|
||||||
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex items-center gap-4">
|
<div className="flex items-center gap-4">
|
||||||
|
<div className="flex gap-2 justify-end">
|
||||||
|
<SimpleTag
|
||||||
|
title={usecase}
|
||||||
|
type={UsecaseTag.UsecaseDefault}
|
||||||
|
clickable={false}
|
||||||
|
/>
|
||||||
|
<SimpleTag
|
||||||
|
title={`${toGigabytes(maxRamRequired)} RAM required`}
|
||||||
|
type={RamRequired.RamDefault}
|
||||||
|
clickable={false}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
<div className="px-2.5 py-0.5 bg-gray-200 text-xs font-medium rounded">
|
<div className="px-2.5 py-0.5 bg-gray-200 text-xs font-medium rounded">
|
||||||
{toGigabytes(modelVersion.size)}
|
{toGigabytes(modelVersion.size)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -1,21 +1,36 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import ModelVersionItem from "../ModelVersionItem";
|
import ModelVersionItem from "../ModelVersionItem";
|
||||||
import { ModelVersion, Product } from "@/_models/Product";
|
import { Product } from "@/_models/Product";
|
||||||
|
import { ModelVersion } from "@/_models/ModelVersion";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
model: Product;
|
model: Product;
|
||||||
versions: ModelVersion[];
|
versions: ModelVersion[];
|
||||||
|
recommendedVersion: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
const ModelVersionList: React.FC<Props> = ({ model, versions }) => (
|
const ModelVersionList: React.FC<Props> = ({
|
||||||
<div className="px-4 py-5 border-t border-gray-200">
|
model,
|
||||||
<div className="text-sm font-medium text-gray-500">Available Versions</div>
|
versions,
|
||||||
<div className="border border-gray-200 rounded-lg overflow-hidden">
|
recommendedVersion,
|
||||||
{versions.map((item) => (
|
}) => {
|
||||||
<ModelVersionItem key={item.path} model={model} modelVersion={item} />
|
return (
|
||||||
))}
|
<div className="px-4 py-5 border-t border-gray-200">
|
||||||
|
<div className="text-sm font-medium text-gray-500">
|
||||||
|
Available Versions
|
||||||
|
</div>
|
||||||
|
<div className="border border-gray-200 rounded-lg overflow-hidden">
|
||||||
|
{versions.map((item) => (
|
||||||
|
<ModelVersionItem
|
||||||
|
key={item.id}
|
||||||
|
model={model}
|
||||||
|
modelVersion={item}
|
||||||
|
isRecommended={item.id === recommendedVersion}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
);
|
||||||
);
|
};
|
||||||
|
|
||||||
export default ModelVersionList;
|
export default ModelVersionList;
|
||||||
|
|||||||
@ -2,16 +2,16 @@ import ProgressBar from "../ProgressBar";
|
|||||||
import SystemItem from "../SystemItem";
|
import SystemItem from "../SystemItem";
|
||||||
import { useAtomValue } from "jotai";
|
import { useAtomValue } from "jotai";
|
||||||
import { appDownloadProgress } from "@/_helpers/JotaiWrapper";
|
import { appDownloadProgress } from "@/_helpers/JotaiWrapper";
|
||||||
import { currentProductAtom } from "@/_helpers/atoms/Model.atom";
|
|
||||||
import useGetAppVersion from "@/_hooks/useGetAppVersion";
|
import useGetAppVersion from "@/_hooks/useGetAppVersion";
|
||||||
import useGetSystemResources from "@/_hooks/useGetSystemResources";
|
import useGetSystemResources from "@/_hooks/useGetSystemResources";
|
||||||
import { modelDownloadStateAtom } from "@/_helpers/atoms/DownloadState.atom";
|
import { modelDownloadStateAtom } from "@/_helpers/atoms/DownloadState.atom";
|
||||||
import { DownloadState } from "@/_models/DownloadState";
|
import { DownloadState } from "@/_models/DownloadState";
|
||||||
import { formatDownloadPercentage } from "@/_utils/converter";
|
import { formatDownloadPercentage } from "@/_utils/converter";
|
||||||
|
import { activeAssistantModelAtom } from "@/_helpers/atoms/Model.atom";
|
||||||
|
|
||||||
const MonitorBar: React.FC = () => {
|
const MonitorBar: React.FC = () => {
|
||||||
const progress = useAtomValue(appDownloadProgress);
|
const progress = useAtomValue(appDownloadProgress);
|
||||||
const activeModel = useAtomValue(currentProductAtom);
|
const activeModel = useAtomValue(activeAssistantModelAtom);
|
||||||
const { version } = useGetAppVersion();
|
const { version } = useGetAppVersion();
|
||||||
const { ram, cpu } = useGetSystemResources();
|
const { ram, cpu } = useGetSystemResources();
|
||||||
const modelDownloadStates = useAtomValue(modelDownloadStateAtom);
|
const modelDownloadStates = useAtomValue(modelDownloadStateAtom);
|
||||||
|
|||||||
@ -7,14 +7,14 @@ import {
|
|||||||
MainViewState,
|
MainViewState,
|
||||||
setMainViewStateAtom,
|
setMainViewStateAtom,
|
||||||
} from "@/_helpers/atoms/MainView.atom";
|
} from "@/_helpers/atoms/MainView.atom";
|
||||||
import { currentProductAtom } from "@/_helpers/atoms/Model.atom";
|
|
||||||
import useCreateConversation from "@/_hooks/useCreateConversation";
|
import useCreateConversation from "@/_hooks/useCreateConversation";
|
||||||
import useInitModel from "@/_hooks/useInitModel";
|
import useInitModel from "@/_hooks/useInitModel";
|
||||||
import { Product } from "@/_models/Product";
|
|
||||||
import { PlusIcon } from "@heroicons/react/24/outline";
|
import { PlusIcon } from "@heroicons/react/24/outline";
|
||||||
|
import { activeAssistantModelAtom } from "@/_helpers/atoms/Model.atom";
|
||||||
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
|
|
||||||
const NewChatButton: React.FC = () => {
|
const NewChatButton: React.FC = () => {
|
||||||
const activeModel = useAtomValue(currentProductAtom);
|
const activeModel = useAtomValue(activeAssistantModelAtom);
|
||||||
const setMainView = useSetAtom(setMainViewStateAtom);
|
const setMainView = useSetAtom(setMainViewStateAtom);
|
||||||
const { requestCreateConvo } = useCreateConversation();
|
const { requestCreateConvo } = useCreateConversation();
|
||||||
const { initModel } = useInitModel();
|
const { initModel } = useInitModel();
|
||||||
@ -27,7 +27,7 @@ const NewChatButton: React.FC = () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const createConversationAndInitModel = async (model: Product) => {
|
const createConversationAndInitModel = async (model: AssistantModel) => {
|
||||||
await requestCreateConvo(model);
|
await requestCreateConvo(model);
|
||||||
await initModel(model);
|
await initModel(model);
|
||||||
};
|
};
|
||||||
|
|||||||
@ -16,7 +16,7 @@ const PrimaryButton: React.FC<Props> = ({
|
|||||||
<button
|
<button
|
||||||
onClick={onClick}
|
onClick={onClick}
|
||||||
type="button"
|
type="button"
|
||||||
className={`rounded-md bg-indigo-500 px-3.5 py-2.5 text-sm font-semibold text-white shadow-sm hover:bg-indigo-400 focus-visible:outline focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-indigo-50 line-clamp-1 flex-shrink-0 ${className} ${
|
className={`rounded-md bg-blue-500 px-3.5 py-2.5 text-sm font-semibold text-white shadow-sm hover:bg-blue-400 focus-visible:outline focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-indigo-50 line-clamp-1 flex-shrink-0 ${className} ${
|
||||||
fullWidth ? "flex-1 " : ""
|
fullWidth ? "flex-1 " : ""
|
||||||
}}`}
|
}}`}
|
||||||
>
|
>
|
||||||
|
|||||||
@ -12,10 +12,6 @@ const SendButton: React.FC = () => {
|
|||||||
const isWaitingForResponse = currentConvoState?.waitingForResponse ?? false;
|
const isWaitingForResponse = currentConvoState?.waitingForResponse ?? false;
|
||||||
const disabled = currentPrompt.trim().length === 0 || isWaitingForResponse;
|
const disabled = currentPrompt.trim().length === 0 || isWaitingForResponse;
|
||||||
|
|
||||||
const enabledStyle = {
|
|
||||||
backgroundColor: "#FACA15",
|
|
||||||
};
|
|
||||||
|
|
||||||
const disabledStyle = {
|
const disabledStyle = {
|
||||||
backgroundColor: "#F3F4F6",
|
backgroundColor: "#F3F4F6",
|
||||||
};
|
};
|
||||||
@ -23,11 +19,11 @@ const SendButton: React.FC = () => {
|
|||||||
return (
|
return (
|
||||||
<button
|
<button
|
||||||
onClick={sendChatMessage}
|
onClick={sendChatMessage}
|
||||||
style={disabled ? disabledStyle : enabledStyle}
|
style={disabled ? disabledStyle : {}}
|
||||||
type="submit"
|
type="submit"
|
||||||
className="p-2 gap-2.5 inline-flex items-center rounded-xl text-sm font-semibold shadow-sm hover:bg-indigo-500 focus-visible:outline focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-indigo-600"
|
className="inline-flex items-center rounded-md bg-indigo-600 px-3 py-2 text-sm font-semibold text-white shadow-sm hover:bg-indigo-500 focus-visible:outline focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-indigo-600"
|
||||||
>
|
>
|
||||||
<ArrowRightIcon width={16} height={16} />
|
Send
|
||||||
</button>
|
</button>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@ -7,10 +7,10 @@ import {
|
|||||||
MainViewState,
|
MainViewState,
|
||||||
setMainViewStateAtom,
|
setMainViewStateAtom,
|
||||||
} from "@/_helpers/atoms/MainView.atom";
|
} from "@/_helpers/atoms/MainView.atom";
|
||||||
import { currentProductAtom } from "@/_helpers/atoms/Model.atom";
|
import { activeAssistantModelAtom } from "@/_helpers/atoms/Model.atom";
|
||||||
import useInitModel from "@/_hooks/useInitModel";
|
import useInitModel from "@/_hooks/useInitModel";
|
||||||
import { Product } from "@/_models/Product";
|
|
||||||
import { useGetDownloadedModels } from "@/_hooks/useGetDownloadedModels";
|
import { useGetDownloadedModels } from "@/_hooks/useGetDownloadedModels";
|
||||||
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
|
|
||||||
enum ActionButton {
|
enum ActionButton {
|
||||||
DownloadModel = "Download a Model",
|
DownloadModel = "Download a Model",
|
||||||
@ -19,7 +19,7 @@ enum ActionButton {
|
|||||||
|
|
||||||
const SidebarEmptyHistory: React.FC = () => {
|
const SidebarEmptyHistory: React.FC = () => {
|
||||||
const { downloadedModels } = useGetDownloadedModels();
|
const { downloadedModels } = useGetDownloadedModels();
|
||||||
const activeModel = useAtomValue(currentProductAtom);
|
const activeModel = useAtomValue(activeAssistantModelAtom);
|
||||||
const setMainView = useSetAtom(setMainViewStateAtom);
|
const setMainView = useSetAtom(setMainViewStateAtom);
|
||||||
const { requestCreateConvo } = useCreateConversation();
|
const { requestCreateConvo } = useCreateConversation();
|
||||||
const [action, setAction] = useState(ActionButton.DownloadModel);
|
const [action, setAction] = useState(ActionButton.DownloadModel);
|
||||||
@ -46,7 +46,7 @@ const SidebarEmptyHistory: React.FC = () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const createConversationAndInitModel = async (model: Product) => {
|
const createConversationAndInitModel = async (model: AssistantModel) => {
|
||||||
await requestCreateConvo(model);
|
await requestCreateConvo(model);
|
||||||
await initModel(model);
|
await initModel(model);
|
||||||
};
|
};
|
||||||
|
|||||||
21
web/app/_components/SimpleTag/TagStyleMapper.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import { TagType } from "./TagType";
|
||||||
|
|
||||||
|
export const tagStyleMapper: Record<TagType, string> = {
|
||||||
|
GGUF: "bg-yellow-100 text-yellow-800",
|
||||||
|
PerformancePositive:
|
||||||
|
"text-green-700 ring-1 ring-inset ring-green-600/20 bg-green-50",
|
||||||
|
PerformanceNeutral:
|
||||||
|
"bg-yellow-50 text-yellow-800 ring-1 ring-inset ring-yellow-600/20",
|
||||||
|
PerformanceNegative:
|
||||||
|
"bg-red-50 ext-red-700 ring-1 ring-inset ring-red-600/10",
|
||||||
|
HardwareCompatible: "bg-red-50 ext-red-700 ring-1 ring-inset ring-red-600/10",
|
||||||
|
HardwareIncompatible:
|
||||||
|
"bg-red-50 ext-red-700 ring-1 ring-inset ring-red-600/10",
|
||||||
|
FreeStyle: "bg-gray-100 text-gray-800",
|
||||||
|
ExpectPerformanceMedium: "bg-yellow-100 text-yellow-800",
|
||||||
|
Version: "bg-red-100 text-yellow-800",
|
||||||
|
Default: "bg-blue-100 text-blue-800",
|
||||||
|
RamDefault: "bg-green-50 text-green-700",
|
||||||
|
UsecaseDefault: "bg-orange-100 text-yellow-800",
|
||||||
|
MiscellanousDefault: "bg-blue-100 text-blue-800",
|
||||||
|
};
|
||||||
62
web/app/_components/SimpleTag/TagType.ts
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
export enum ModelPerformance {
|
||||||
|
PerformancePositive = "PerformancePositive",
|
||||||
|
|
||||||
|
PerformanceNeutral = "PerformanceNeutral",
|
||||||
|
|
||||||
|
PerformanceNegative = "PerformanceNegative",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum HardwareCompatibility {
|
||||||
|
HardwareCompatible = "HardwareCompatible",
|
||||||
|
|
||||||
|
HardwareIncompatible = "HardwareIncompatible",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum ExpectedPerformance {
|
||||||
|
ExpectPerformanceMedium = "ExpectPerformanceMedium",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum ModelFormat {
|
||||||
|
GGUF = "GGUF",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum FreestyleTag {
|
||||||
|
FreeStyle = "FreeStyle",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum VersionTag {
|
||||||
|
Version = "Version",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum QuantMethodTag {
|
||||||
|
Default = "Default",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum NumOfBit {
|
||||||
|
Default = "Default",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum RamRequired {
|
||||||
|
RamDefault = "RamDefault",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum UsecaseTag {
|
||||||
|
UsecaseDefault = "UsecaseDefault",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum MiscellanousTag {
|
||||||
|
MiscellanousDefault = "MiscellanousDefault",
|
||||||
|
}
|
||||||
|
|
||||||
|
export type TagType =
|
||||||
|
| ModelPerformance
|
||||||
|
| HardwareCompatibility
|
||||||
|
| ExpectedPerformance
|
||||||
|
| ModelFormat
|
||||||
|
| FreestyleTag
|
||||||
|
| VersionTag
|
||||||
|
| QuantMethodTag
|
||||||
|
| NumOfBit
|
||||||
|
| RamRequired
|
||||||
|
| UsecaseTag
|
||||||
|
| MiscellanousTag;
|
||||||
@ -1,57 +1,6 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
|
import { TagType } from "./TagType";
|
||||||
export enum TagType {
|
import { tagStyleMapper } from "./TagStyleMapper";
|
||||||
Roleplay = "Roleplay",
|
|
||||||
Llama = "Llama",
|
|
||||||
Story = "Story",
|
|
||||||
Casual = "Casual",
|
|
||||||
Professional = "Professional",
|
|
||||||
CodeLlama = "CodeLlama",
|
|
||||||
Coding = "Coding",
|
|
||||||
|
|
||||||
// Positive
|
|
||||||
Recommended = "Recommended",
|
|
||||||
Compatible = "Compatible",
|
|
||||||
|
|
||||||
// Neutral
|
|
||||||
SlowOnDevice = "This model will be slow on your device",
|
|
||||||
|
|
||||||
// Negative
|
|
||||||
InsufficientRam = "Insufficient RAM",
|
|
||||||
Incompatible = "Incompatible with your device",
|
|
||||||
TooLarge = "This model is too large for your device",
|
|
||||||
|
|
||||||
// Performance
|
|
||||||
Medium = "Medium",
|
|
||||||
BalancedQuality = "Balanced Quality",
|
|
||||||
}
|
|
||||||
|
|
||||||
const tagStyleMapper: Record<TagType, string> = {
|
|
||||||
[TagType.Roleplay]: "bg-red-100 text-red-800",
|
|
||||||
[TagType.Llama]: "bg-green-100 text-green-800",
|
|
||||||
[TagType.Story]: "bg-blue-100 text-blue-800",
|
|
||||||
[TagType.Casual]: "bg-yellow-100 text-yellow-800",
|
|
||||||
[TagType.Professional]: "text-indigo-800 bg-indigo-100",
|
|
||||||
[TagType.CodeLlama]: "bg-pink-100 text-pink-800",
|
|
||||||
[TagType.Coding]: "text-purple-800 bg-purple-100",
|
|
||||||
|
|
||||||
[TagType.Recommended]:
|
|
||||||
"text-green-700 ring-1 ring-inset ring-green-600/20 bg-green-50",
|
|
||||||
[TagType.Compatible]:
|
|
||||||
"bg-red-50 ext-red-700 ring-1 ring-inset ring-red-600/10",
|
|
||||||
|
|
||||||
[TagType.SlowOnDevice]:
|
|
||||||
"bg-yellow-50 text-yellow-800 ring-1 ring-inset ring-yellow-600/20",
|
|
||||||
|
|
||||||
[TagType.Incompatible]:
|
|
||||||
"bg-red-50 ext-red-700 ring-1 ring-inset ring-red-600/10",
|
|
||||||
[TagType.InsufficientRam]:
|
|
||||||
"bg-red-50 ext-red-700 ring-1 ring-inset ring-red-600/10",
|
|
||||||
[TagType.TooLarge]: "bg-red-50 ext-red-700 ring-1 ring-inset ring-red-600/10",
|
|
||||||
|
|
||||||
[TagType.Medium]: "bg-yellow-100 text-yellow-800",
|
|
||||||
[TagType.BalancedQuality]: "bg-yellow-100 text-yellow-800",
|
|
||||||
};
|
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
title: string;
|
title: string;
|
||||||
@ -66,10 +15,11 @@ const SimpleTag: React.FC<Props> = ({
|
|||||||
title,
|
title,
|
||||||
type,
|
type,
|
||||||
}) => {
|
}) => {
|
||||||
|
if (!title || title.length === 0) return null;
|
||||||
if (!clickable) {
|
if (!clickable) {
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className={`px-2.5 py-0.5 rounded text-xs font-medium ${tagStyleMapper[type]}`}
|
className={`px-2.5 py-0.5 rounded text-xs font-medium items-center line-clamp-1 max-w-[40%] ${tagStyleMapper[type]}`}
|
||||||
>
|
>
|
||||||
{title}
|
{title}
|
||||||
</div>
|
</div>
|
||||||
@ -79,7 +29,7 @@ const SimpleTag: React.FC<Props> = ({
|
|||||||
return (
|
return (
|
||||||
<button
|
<button
|
||||||
onClick={onClick}
|
onClick={onClick}
|
||||||
className={`px-2.5 py-0.5 rounded text-xs font-medium ${tagStyleMapper[type]}`}
|
className={`px-2.5 py-0.5 rounded text-xs font-medium items-center line-clamp-1 max-w-[40%] ${tagStyleMapper[type]}`}
|
||||||
>
|
>
|
||||||
{title} x
|
{title} x
|
||||||
</button>
|
</button>
|
||||||
|
|||||||
@ -4,6 +4,7 @@ import { TextCode } from "../TextCode";
|
|||||||
import { getMessageCode } from "@/_utils/message";
|
import { getMessageCode } from "@/_utils/message";
|
||||||
import Image from "next/image";
|
import Image from "next/image";
|
||||||
import { MessageSenderType } from "@/_models/ChatMessage";
|
import { MessageSenderType } from "@/_models/ChatMessage";
|
||||||
|
import LoadingIndicator from "../LoadingIndicator";
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
avatarUrl: string;
|
avatarUrl: string;
|
||||||
@ -13,6 +14,17 @@ type Props = {
|
|||||||
text?: string;
|
text?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const renderMessageCode = (text: string) => {
|
||||||
|
return getMessageCode(text).map((item, i) => (
|
||||||
|
<div className="flex gap-1 flex-col" key={i}>
|
||||||
|
<p className="leading-[20px] whitespace-break-spaces text-sm font-normal dark:text-[#d1d5db]">
|
||||||
|
{item.text}
|
||||||
|
</p>
|
||||||
|
{item.code.trim().length > 0 && <TextCode text={item.code} />}
|
||||||
|
</div>
|
||||||
|
));
|
||||||
|
};
|
||||||
|
|
||||||
const SimpleTextMessage: React.FC<Props> = ({
|
const SimpleTextMessage: React.FC<Props> = ({
|
||||||
senderName,
|
senderName,
|
||||||
createdAt,
|
createdAt,
|
||||||
@ -25,7 +37,7 @@ const SimpleTextMessage: React.FC<Props> = ({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className={`flex items-start gap-2 px-[148px] ${backgroundColor} py-5`}
|
className={`flex items-start gap-2 px-12 md:px-32 2xl:px-64 ${backgroundColor} py-5`}
|
||||||
>
|
>
|
||||||
<Image
|
<Image
|
||||||
className="rounded-full"
|
className="rounded-full"
|
||||||
@ -43,17 +55,12 @@ const SimpleTextMessage: React.FC<Props> = ({
|
|||||||
{displayDate(createdAt)}
|
{displayDate(createdAt)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{text.includes("```") ? (
|
{text === "" ? (
|
||||||
getMessageCode(text).map((item, i) => (
|
<LoadingIndicator />
|
||||||
<div className="flex gap-1 flex-col" key={i}>
|
) : text.includes("```") ? (
|
||||||
<p className="leading-[20px] whitespace-break-spaces text-sm font-normal dark:text-[#d1d5db]">
|
renderMessageCode(text)
|
||||||
{item.text}
|
|
||||||
</p>
|
|
||||||
{item.code.trim().length > 0 && <TextCode text={item.code} />}
|
|
||||||
</div>
|
|
||||||
))
|
|
||||||
) : (
|
) : (
|
||||||
<span className="text-sm">{text}</span>
|
<span className="text-sm leading-loose font-normal">{text}</span>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -13,8 +13,6 @@ export default function JotaiWrapper({ children }: Props) {
|
|||||||
|
|
||||||
export const currentPromptAtom = atom<string>("");
|
export const currentPromptAtom = atom<string>("");
|
||||||
|
|
||||||
export const showingTyping = atom<boolean>(false);
|
|
||||||
|
|
||||||
export const appDownloadProgress = atom<number>(-1);
|
export const appDownloadProgress = atom<number>(-1);
|
||||||
export const searchingModelText = atom<string>("");
|
export const searchingModelText = atom<string>("");
|
||||||
|
|
||||||
|
|||||||
@ -55,6 +55,18 @@ export const updateConversationWaitingForResponseAtom = atom(
|
|||||||
currentState[conversationId] = {
|
currentState[conversationId] = {
|
||||||
...currentState[conversationId],
|
...currentState[conversationId],
|
||||||
waitingForResponse,
|
waitingForResponse,
|
||||||
|
error: undefined,
|
||||||
|
};
|
||||||
|
set(conversationStatesAtom, currentState);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
export const updateConversationErrorAtom = atom(
|
||||||
|
null,
|
||||||
|
(get, set, conversationId: string, error?: Error) => {
|
||||||
|
const currentState = { ...get(conversationStatesAtom) };
|
||||||
|
currentState[conversationId] = {
|
||||||
|
...currentState[conversationId],
|
||||||
|
error,
|
||||||
};
|
};
|
||||||
set(conversationStatesAtom, currentState);
|
set(conversationStatesAtom, currentState);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
import { Product } from "@/_models/Product";
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
import { atom } from "jotai";
|
import { atom } from "jotai";
|
||||||
|
|
||||||
export const downloadedModelAtom = atom<Product[]>([]);
|
/**
|
||||||
|
* @description: This atom is used to store the downloaded models
|
||||||
|
*/
|
||||||
|
export const downloadedModelAtom = atom<AssistantModel[]>([]);
|
||||||
|
|||||||
@ -1,6 +1,8 @@
|
|||||||
import { Product } from "@/_models/Product";
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
import { atom } from "jotai";
|
import { atom } from "jotai";
|
||||||
|
|
||||||
export const currentProductAtom = atom<Product | undefined>(undefined);
|
export const selectedModelAtom = atom<AssistantModel | undefined>(undefined);
|
||||||
|
|
||||||
export const selectedModelAtom = atom<Product | undefined>(undefined);
|
export const activeAssistantModelAtom = atom<AssistantModel | undefined>(
|
||||||
|
undefined
|
||||||
|
);
|
||||||
|
|||||||
@ -2,13 +2,15 @@ import { useAtom, useSetAtom } from "jotai";
|
|||||||
import { Conversation } from "@/_models/Conversation";
|
import { Conversation } from "@/_models/Conversation";
|
||||||
import { executeSerial } from "@/_services/pluginService";
|
import { executeSerial } from "@/_services/pluginService";
|
||||||
import { DataService } from "../../shared/coreService";
|
import { DataService } from "../../shared/coreService";
|
||||||
import { Product } from "@/_models/Product";
|
|
||||||
import {
|
import {
|
||||||
userConversationsAtom,
|
userConversationsAtom,
|
||||||
setActiveConvoIdAtom,
|
setActiveConvoIdAtom,
|
||||||
addNewConversationStateAtom,
|
addNewConversationStateAtom,
|
||||||
|
updateConversationWaitingForResponseAtom,
|
||||||
|
updateConversationErrorAtom,
|
||||||
} from "@/_helpers/atoms/Conversation.atom";
|
} from "@/_helpers/atoms/Conversation.atom";
|
||||||
import useInitModel from "./useInitModel";
|
import useInitModel from "./useInitModel";
|
||||||
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
|
|
||||||
const useCreateConversation = () => {
|
const useCreateConversation = () => {
|
||||||
const { initModel } = useInitModel();
|
const { initModel } = useInitModel();
|
||||||
@ -17,8 +19,12 @@ const useCreateConversation = () => {
|
|||||||
);
|
);
|
||||||
const setActiveConvoId = useSetAtom(setActiveConvoIdAtom);
|
const setActiveConvoId = useSetAtom(setActiveConvoIdAtom);
|
||||||
const addNewConvoState = useSetAtom(addNewConversationStateAtom);
|
const addNewConvoState = useSetAtom(addNewConversationStateAtom);
|
||||||
|
const updateConvWaiting = useSetAtom(
|
||||||
|
updateConversationWaitingForResponseAtom
|
||||||
|
);
|
||||||
|
const updateConvError = useSetAtom(updateConversationErrorAtom);
|
||||||
|
|
||||||
const requestCreateConvo = async (model: Product) => {
|
const requestCreateConvo = async (model: AssistantModel) => {
|
||||||
const conversationName = model.name;
|
const conversationName = model.name;
|
||||||
const conv: Conversation = {
|
const conv: Conversation = {
|
||||||
model_id: model.id,
|
model_id: model.id,
|
||||||
@ -27,7 +33,14 @@ const useCreateConversation = () => {
|
|||||||
name: conversationName,
|
name: conversationName,
|
||||||
};
|
};
|
||||||
const id = await executeSerial(DataService.CREATE_CONVERSATION, conv);
|
const id = await executeSerial(DataService.CREATE_CONVERSATION, conv);
|
||||||
await initModel(model);
|
|
||||||
|
if (id) updateConvWaiting(id, true);
|
||||||
|
initModel(model).then((res: any) => {
|
||||||
|
if (id) updateConvWaiting(id, false);
|
||||||
|
if (res?.error) {
|
||||||
|
updateConvError(id, res.error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
const mappedConvo: Conversation = {
|
const mappedConvo: Conversation = {
|
||||||
id,
|
id,
|
||||||
|
|||||||
@ -19,12 +19,13 @@ import {
|
|||||||
|
|
||||||
export default function useDeleteConversation() {
|
export default function useDeleteConversation() {
|
||||||
const [userConversations, setUserConversations] = useAtom(
|
const [userConversations, setUserConversations] = useAtom(
|
||||||
userConversationsAtom
|
userConversationsAtom,
|
||||||
);
|
);
|
||||||
const setCurrentPrompt = useSetAtom(currentPromptAtom);
|
const setCurrentPrompt = useSetAtom(currentPromptAtom);
|
||||||
const setShowingProductDetail = useSetAtom(showingProductDetailAtom);
|
const setShowingProductDetail = useSetAtom(showingProductDetailAtom);
|
||||||
const setShowingAdvancedPrompt = useSetAtom(showingAdvancedPromptAtom);
|
const setShowingAdvancedPrompt = useSetAtom(showingAdvancedPromptAtom);
|
||||||
const activeConvoId = useAtomValue(getActiveConvoIdAtom);
|
const activeConvoId = useAtomValue(getActiveConvoIdAtom);
|
||||||
|
|
||||||
const setActiveConvoId = useSetAtom(setActiveConvoIdAtom);
|
const setActiveConvoId = useSetAtom(setActiveConvoIdAtom);
|
||||||
const deleteMessages = useSetAtom(deleteConversationMessage);
|
const deleteMessages = useSetAtom(deleteConversationMessage);
|
||||||
const setMainViewState = useSetAtom(setMainViewStateAtom);
|
const setMainViewState = useSetAtom(setMainViewStateAtom);
|
||||||
@ -34,14 +35,17 @@ export default function useDeleteConversation() {
|
|||||||
try {
|
try {
|
||||||
await execute(DataService.DELETE_CONVERSATION, activeConvoId);
|
await execute(DataService.DELETE_CONVERSATION, activeConvoId);
|
||||||
const currentConversations = userConversations.filter(
|
const currentConversations = userConversations.filter(
|
||||||
(c) => c.id !== activeConvoId
|
(c) => c.id !== activeConvoId,
|
||||||
);
|
);
|
||||||
setUserConversations(currentConversations);
|
setUserConversations(currentConversations);
|
||||||
if (currentConversations.length === 0) {
|
|
||||||
setMainViewState(MainViewState.Welcome);
|
|
||||||
}
|
|
||||||
deleteMessages(activeConvoId);
|
deleteMessages(activeConvoId);
|
||||||
setActiveConvoId(undefined);
|
|
||||||
|
if (currentConversations.length > 0) {
|
||||||
|
setActiveConvoId(currentConversations[0].id);
|
||||||
|
} else {
|
||||||
|
setMainViewState(MainViewState.Welcome);
|
||||||
|
setActiveConvoId(undefined);
|
||||||
|
}
|
||||||
setCurrentPrompt("");
|
setCurrentPrompt("");
|
||||||
setShowingProductDetail(false);
|
setShowingProductDetail(false);
|
||||||
setShowingAdvancedPrompt(false);
|
setShowingAdvancedPrompt(false);
|
||||||
|
|||||||
@ -1,16 +1,16 @@
|
|||||||
import { execute, executeSerial } from "@/_services/pluginService";
|
import { execute, executeSerial } from "@/_services/pluginService";
|
||||||
import { DataService, ModelManagementService } from "../../shared/coreService";
|
import { DataService, ModelManagementService } from "../../shared/coreService";
|
||||||
import { Product } from "@/_models/Product";
|
|
||||||
import { useSetAtom } from "jotai";
|
import { useSetAtom } from "jotai";
|
||||||
import { downloadedModelAtom } from "@/_helpers/atoms/DownloadedModel.atom";
|
import { downloadedModelAtom } from "@/_helpers/atoms/DownloadedModel.atom";
|
||||||
import { getDownloadedModels } from "./useGetDownloadedModels";
|
import { getDownloadedModels } from "./useGetDownloadedModels";
|
||||||
|
import { AssistantModel } from "@/_models/AssistantModel";
|
||||||
|
|
||||||
export default function useDeleteModel() {
|
export default function useDeleteModel() {
|
||||||
const setDownloadedModels = useSetAtom(downloadedModelAtom);
|
const setDownloadedModels = useSetAtom(downloadedModelAtom);
|
||||||
|
|
||||||
const deleteModel = async (model: Product) => {
|
const deleteModel = async (model: AssistantModel) => {
|
||||||
execute(DataService.DELETE_DOWNLOAD_MODEL, model.id);
|
execute(DataService.DELETE_DOWNLOAD_MODEL, model.id);
|
||||||
await executeSerial(ModelManagementService.DELETE_MODEL, model.fileName);
|
await executeSerial(ModelManagementService.DELETE_MODEL, model.id);
|
||||||
|
|
||||||
// reload models
|
// reload models
|
||||||
const downloadedModels = await getDownloadedModels();
|
const downloadedModels = await getDownloadedModels();
|
||||||
|
|||||||
@ -1,38 +1,20 @@
|
|||||||
import { executeSerial } from "@/_services/pluginService";
|
import { executeSerial } from "@/_services/pluginService";
|
||||||
import { DataService, ModelManagementService } from "../../shared/coreService";
|
import { DataService, ModelManagementService } from "../../shared/coreService";
|
||||||
import { ModelVersion, Product } from "@/_models/Product";
|
import { Product } from "@/_models/Product";
|
||||||
|
import { ModelVersion } from "@/_models/ModelVersion";
|
||||||
|
|
||||||
export default function useDownloadModel() {
|
export default function useDownloadModel() {
|
||||||
const downloadModel = async (model: Product) => {
|
const downloadModel = async (model: Product, modelVersion: ModelVersion) => {
|
||||||
await executeSerial(DataService.STORE_MODEL, model);
|
modelVersion.startDownloadAt = Date.now();
|
||||||
await executeSerial(ModelManagementService.DOWNLOAD_MODEL, {
|
|
||||||
downloadUrl: model.downloadUrl,
|
|
||||||
fileName: model.fileName,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const downloadHfModel = async (
|
await executeSerial(DataService.STORE_MODEL, { model, modelVersion });
|
||||||
model: Product,
|
|
||||||
modelVersion: ModelVersion
|
|
||||||
) => {
|
|
||||||
const hfModel: Product = {
|
|
||||||
...model,
|
|
||||||
id: `${model.author}.${modelVersion.path}`,
|
|
||||||
slug: `${model.author}.${modelVersion.path}`,
|
|
||||||
name: `${model.name} - ${modelVersion.path}`,
|
|
||||||
fileName: modelVersion.path,
|
|
||||||
totalSize: modelVersion.size,
|
|
||||||
downloadUrl: modelVersion.downloadUrl,
|
|
||||||
};
|
|
||||||
await executeSerial(DataService.STORE_MODEL, hfModel);
|
|
||||||
await executeSerial(ModelManagementService.DOWNLOAD_MODEL, {
|
await executeSerial(ModelManagementService.DOWNLOAD_MODEL, {
|
||||||
downloadUrl: hfModel.downloadUrl,
|
downloadUrl: modelVersion.downloadLink,
|
||||||
fileName: hfModel.fileName,
|
fileName: modelVersion.id,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
downloadModel,
|
downloadModel,
|
||||||
downloadHfModel,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||