fix(dolphin phi2): resolved conflict

This commit is contained in:
hahuyhoang411 2024-02-19 09:56:44 +07:00
commit f7a3c921fd
114 changed files with 2183 additions and 1365 deletions

View File

@ -1,6 +1,12 @@
name: Jan Build Electron App Nightly or Manual name: Jan Build Electron App Nightly or Manual
on: on:
push:
branches:
- main
paths-ignore:
- 'README.md'
- 'docs/**'
schedule: schedule:
- cron: '0 20 * * 1,2,3' # At 8 PM UTC on Monday, Tuesday, and Wednesday which is 3 AM UTC+7 Tuesday, Wednesday, and Thursday - cron: '0 20 * * 1,2,3' # At 8 PM UTC on Monday, Tuesday, and Wednesday which is 3 AM UTC+7 Tuesday, Wednesday, and Thursday
workflow_dispatch: workflow_dispatch:
@ -23,12 +29,20 @@ jobs:
- name: Set public provider - name: Set public provider
id: set-public-provider id: set-public-provider
run: | run: |
if [ ${{ github.event == 'workflow_dispatch' }} ]; then if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo "::set-output name=public_provider::${{ github.event.inputs.public_provider }}" echo "::set-output name=public_provider::${{ github.event.inputs.public_provider }}"
echo "::set-output name=ref::${{ github.ref }}" echo "::set-output name=ref::${{ github.ref }}"
else else
echo "::set-output name=public_provider::cloudflare-r2" if [ "${{ github.event_name }}" == "schedule" ]; then
echo "::set-output name=ref::refs/heads/dev" echo "::set-output name=public_provider::cloudflare-r2"
echo "::set-output name=ref::refs/heads/dev"
elif [ "${{ github.event_name }}" == "push" ]; then
echo "::set-output name=public_provider::cloudflare-r2"
echo "::set-output name=ref::${{ github.ref }}"
else
echo "::set-output name=public_provider::none"
echo "::set-output name=ref::${{ github.ref }}"
fi
fi fi
# Job create Update app version based on latest release tag with build number and save to output # Job create Update app version based on latest release tag with build number and save to output
get-update-version: get-update-version:
@ -73,6 +87,17 @@ jobs:
push_to_branch: dev push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }} new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-pre-release-and-update-url-readme:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider]
secrets: inherit
if: github.event_name == 'push'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Pre-release
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-manual-and-update-url-readme: noti-discord-manual-and-update-url-readme:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider] needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version, set-public-provider]
secrets: inherit secrets: inherit

View File

@ -1,52 +0,0 @@
name: Jan Build Electron Pre Release
on:
push:
branches:
- main
paths:
- "!README.md"
jobs:
# Job create Update app version based on latest release tag with build number and save to output
get-update-version:
uses: ./.github/workflows/template-get-update-version.yml
build-macos:
uses: ./.github/workflows/template-build-macos.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: cloudflare-r2
new_version: ${{ needs.get-update-version.outputs.new_version }}
build-windows-x64:
uses: ./.github/workflows/template-build-windows-x64.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: cloudflare-r2
new_version: ${{ needs.get-update-version.outputs.new_version }}
build-linux-x64:
uses: ./.github/workflows/template-build-linux-x64.yml
secrets: inherit
needs: [get-update-version]
with:
ref: ${{ github.ref }}
public_provider: cloudflare-r2
new_version: ${{ needs.get-update-version.outputs.new_version }}
noti-discord-nightly-and-update-url-readme:
needs: [build-macos, build-windows-x64, build-linux-x64, get-update-version]
secrets: inherit
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
with:
ref: refs/heads/dev
build_reason: Nightly
push_to_branch: dev
new_version: ${{ needs.get-update-version.outputs.new_version }}

View File

@ -98,8 +98,8 @@ jobs:
make build-and-publish make build-and-publish
env: env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ANALYTICS_ID: ${{ secrets.JAN_APP_POSTHOG_PROJECT_API_KEY }} ANALYTICS_ID: ${{ secrets.JAN_APP_UMAMI_PROJECT_API_KEY }}
ANALYTICS_HOST: ${{ secrets.JAN_APP_POSTHOG_URL }} ANALYTICS_HOST: ${{ secrets.JAN_APP_UMAMI_URL }}
- name: Upload Artifact .deb file - name: Upload Artifact .deb file
if: inputs.public_provider != 'github' if: inputs.public_provider != 'github'

View File

@ -137,8 +137,8 @@ jobs:
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }} APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
APP_PATH: "." APP_PATH: "."
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }} DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
ANALYTICS_ID: ${{ secrets.JAN_APP_POSTHOG_PROJECT_API_KEY }} ANALYTICS_ID: ${{ secrets.JAN_APP_UMAMI_PROJECT_API_KEY }}
ANALYTICS_HOST: ${{ secrets.JAN_APP_POSTHOG_URL }} ANALYTICS_HOST: ${{ secrets.JAN_APP_UMAMI_URL }}
- name: Upload Artifact - name: Upload Artifact
if: inputs.public_provider != 'github' if: inputs.public_provider != 'github'

View File

@ -127,8 +127,8 @@ jobs:
make build-and-publish make build-and-publish
env: env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ANALYTICS_ID: ${{ secrets.JAN_APP_POSTHOG_PROJECT_API_KEY }} ANALYTICS_ID: ${{ secrets.JAN_APP_UMAMI_PROJECT_API_KEY }}
ANALYTICS_HOST: ${{ secrets.JAN_APP_POSTHOG_URL }} ANALYTICS_HOST: ${{ secrets.JAN_APP_UMAMI_URL }}
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }} AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}

View File

@ -17,7 +17,7 @@ jobs:
with: with:
fetch-depth: "0" fetch-depth: "0"
token: ${{ secrets.PAT_SERVICE_ACCOUNT }} token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
ref: main ref: dev
- name: Get Latest Release - name: Get Latest Release
uses: pozetroninc/github-action-get-latest-release@v0.7.0 uses: pozetroninc/github-action-get-latest-release@v0.7.0
@ -46,4 +46,4 @@ jobs:
git config --global user.name "Service Account" git config --global user.name "Service Account"
git add README.md git add README.md
git commit -m "Update README.md with Stable Download URLs" git commit -m "Update README.md with Stable Download URLs"
git -c http.extraheader="AUTHORIZATION: bearer ${{ secrets.PAT_SERVICE_ACCOUNT }}" push origin HEAD:main git -c http.extraheader="AUTHORIZATION: bearer ${{ secrets.PAT_SERVICE_ACCOUNT }}" push origin HEAD:dev

View File

@ -43,31 +43,31 @@ Jan is an open-source ChatGPT alternative that runs 100% offline on your compute
<tr style="text-align:center"> <tr style="text-align:center">
<td style="text-align:center"><b>Stable (Recommended)</b></td> <td style="text-align:center"><b>Stable (Recommended)</b></td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://github.com/janhq/jan/releases/download/v0.4.4/jan-win-x64-0.4.4.exe'> <a href='https://github.com/janhq/jan/releases/download/v0.4.5/jan-win-x64-0.4.5.exe'>
<img src='./docs/static/img/windows.png' style="height:14px; width: 14px" /> <img src='./docs/static/img/windows.png' style="height:14px; width: 14px" />
<b>jan.exe</b> <b>jan.exe</b>
</a> </a>
</td> </td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://github.com/janhq/jan/releases/download/v0.4.4/jan-mac-x64-0.4.4.dmg'> <a href='https://github.com/janhq/jan/releases/download/v0.4.5/jan-mac-x64-0.4.5.dmg'>
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" /> <img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
<b>Intel</b> <b>Intel</b>
</a> </a>
</td> </td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://github.com/janhq/jan/releases/download/v0.4.4/jan-mac-arm64-0.4.4.dmg'> <a href='https://github.com/janhq/jan/releases/download/v0.4.5/jan-mac-arm64-0.4.5.dmg'>
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" /> <img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
<b>M1/M2</b> <b>M1/M2</b>
</a> </a>
</td> </td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://github.com/janhq/jan/releases/download/v0.4.4/jan-linux-amd64-0.4.4.deb'> <a href='https://github.com/janhq/jan/releases/download/v0.4.5/jan-linux-amd64-0.4.5.deb'>
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" /> <img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.deb</b> <b>jan.deb</b>
</a> </a>
</td> </td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://github.com/janhq/jan/releases/download/v0.4.4/jan-linux-x86_64-0.4.4.AppImage'> <a href='https://github.com/janhq/jan/releases/download/v0.4.5/jan-linux-x86_64-0.4.5.AppImage'>
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" /> <img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.AppImage</b> <b>jan.AppImage</b>
</a> </a>
@ -76,31 +76,31 @@ Jan is an open-source ChatGPT alternative that runs 100% offline on your compute
<tr style="text-align:center"> <tr style="text-align:center">
<td style="text-align:center"><b>Experimental (Nightly Build)</b></td> <td style="text-align:center"><b>Experimental (Nightly Build)</b></td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-win-x64-0.4.4-194.exe'> <a href='https://delta.jan.ai/latest/jan-win-x64-0.4.5-224.exe'>
<img src='./docs/static/img/windows.png' style="height:14px; width: 14px" /> <img src='./docs/static/img/windows.png' style="height:14px; width: 14px" />
<b>jan.exe</b> <b>jan.exe</b>
</a> </a>
</td> </td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-mac-x64-0.4.4-194.dmg'> <a href='https://delta.jan.ai/latest/jan-mac-x64-0.4.5-224.dmg'>
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" /> <img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
<b>Intel</b> <b>Intel</b>
</a> </a>
</td> </td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-mac-arm64-0.4.4-194.dmg'> <a href='https://delta.jan.ai/latest/jan-mac-arm64-0.4.5-224.dmg'>
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" /> <img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
<b>M1/M2</b> <b>M1/M2</b>
</a> </a>
</td> </td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-linux-amd64-0.4.4-194.deb'> <a href='https://delta.jan.ai/latest/jan-linux-amd64-0.4.5-224.deb'>
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" /> <img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.deb</b> <b>jan.deb</b>
</a> </a>
</td> </td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-linux-x86_64-0.4.4-194.AppImage'> <a href='https://delta.jan.ai/latest/jan-linux-x86_64-0.4.5-224.AppImage'>
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" /> <img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.AppImage</b> <b>jan.AppImage</b>
</a> </a>

View File

@ -3,7 +3,6 @@
* @description Enum of all the routes exposed by the app * @description Enum of all the routes exposed by the app
*/ */
export enum AppRoute { export enum AppRoute {
appDataPath = 'appDataPath',
openExternalUrl = 'openExternalUrl', openExternalUrl = 'openExternalUrl',
openAppDirectory = 'openAppDirectory', openAppDirectory = 'openAppDirectory',
openFileExplore = 'openFileExplorer', openFileExplore = 'openFileExplorer',
@ -12,6 +11,7 @@ export enum AppRoute {
updateAppConfiguration = 'updateAppConfiguration', updateAppConfiguration = 'updateAppConfiguration',
relaunch = 'relaunch', relaunch = 'relaunch',
joinPath = 'joinPath', joinPath = 'joinPath',
isSubdirectory = 'isSubdirectory',
baseName = 'baseName', baseName = 'baseName',
startServer = 'startServer', startServer = 'startServer',
stopServer = 'stopServer', stopServer = 'stopServer',
@ -61,6 +61,7 @@ export enum FileManagerRoute {
syncFile = 'syncFile', syncFile = 'syncFile',
getJanDataFolderPath = 'getJanDataFolderPath', getJanDataFolderPath = 'getJanDataFolderPath',
getResourcePath = 'getResourcePath', getResourcePath = 'getResourcePath',
getUserHomePath = 'getUserHomePath',
fileStat = 'fileStat', fileStat = 'fileStat',
writeBlob = 'writeBlob', writeBlob = 'writeBlob',
} }

View File

@ -22,7 +22,11 @@ const executeOnMain: (extension: string, method: string, ...args: any[]) => Prom
* @param {object} network - Optional object to specify proxy/whether to ignore SSL certificates. * @param {object} network - Optional object to specify proxy/whether to ignore SSL certificates.
* @returns {Promise<any>} A promise that resolves when the file is downloaded. * @returns {Promise<any>} A promise that resolves when the file is downloaded.
*/ */
const downloadFile: (url: string, fileName: string, network?: { proxy?: string, ignoreSSL?: boolean }) => Promise<any> = (url, fileName, network) => { const downloadFile: (
url: string,
fileName: string,
network?: { proxy?: string; ignoreSSL?: boolean }
) => Promise<any> = (url, fileName, network) => {
return global.core?.api?.downloadFile(url, fileName, network) return global.core?.api?.downloadFile(url, fileName, network)
} }
@ -79,6 +83,12 @@ const openExternalUrl: (url: string) => Promise<any> = (url) =>
*/ */
const getResourcePath: () => Promise<string> = () => global.core.api?.getResourcePath() const getResourcePath: () => Promise<string> = () => global.core.api?.getResourcePath()
/**
* Gets the user's home path.
* @returns return user's home path
*/
const getUserHomePath = (): Promise<string> => global.core.api?.getUserHomePath()
/** /**
* Log to file from browser processes. * Log to file from browser processes.
* *
@ -87,6 +97,17 @@ const getResourcePath: () => Promise<string> = () => global.core.api?.getResourc
const log: (message: string, fileName?: string) => void = (message, fileName) => const log: (message: string, fileName?: string) => void = (message, fileName) =>
global.core.api?.log(message, fileName) global.core.api?.log(message, fileName)
/**
* Check whether the path is a subdirectory of another path.
*
* @param from - The path to check.
* @param to - The path to check against.
*
* @returns {Promise<boolean>} - A promise that resolves with a boolean indicating whether the path is a subdirectory.
*/
const isSubdirectory: (from: string, to: string) => Promise<boolean> = (from: string, to: string) =>
global.core.api?.isSubdirectory(from, to)
/** /**
* Register extension point function type definition * Register extension point function type definition
*/ */
@ -94,7 +115,7 @@ export type RegisterExtensionPoint = (
extensionName: string, extensionName: string,
extensionId: string, extensionId: string,
method: Function, method: Function,
priority?: number, priority?: number
) => void ) => void
/** /**
@ -111,5 +132,7 @@ export {
openExternalUrl, openExternalUrl,
baseName, baseName,
log, log,
isSubdirectory,
getUserHomePath,
FileStat, FileStat,
} }

View File

@ -2,7 +2,8 @@ import fs from 'fs'
import { JanApiRouteConfiguration, RouteConfiguration } from './configuration' import { JanApiRouteConfiguration, RouteConfiguration } from './configuration'
import { join } from 'path' import { join } from 'path'
import { ContentType, MessageStatus, Model, ThreadMessage } from './../../../index' import { ContentType, MessageStatus, Model, ThreadMessage } from './../../../index'
import { getJanDataFolderPath } from '../../utils' import { getEngineConfiguration, getJanDataFolderPath } from '../../utils'
import { DEFAULT_CHAT_COMPLETION_URL } from './consts'
export const getBuilder = async (configuration: RouteConfiguration) => { export const getBuilder = async (configuration: RouteConfiguration) => {
const directoryPath = join(getJanDataFolderPath(), configuration.dirName) const directoryPath = join(getJanDataFolderPath(), configuration.dirName)
@ -309,7 +310,7 @@ export const chatCompletions = async (request: any, reply: any) => {
const engineConfiguration = await getEngineConfiguration(requestedModel.engine) const engineConfiguration = await getEngineConfiguration(requestedModel.engine)
let apiKey: string | undefined = undefined let apiKey: string | undefined = undefined
let apiUrl: string = 'http://127.0.0.1:3928/inferences/llamacpp/chat_completion' // default nitro url let apiUrl: string = DEFAULT_CHAT_COMPLETION_URL
if (engineConfiguration) { if (engineConfiguration) {
apiKey = engineConfiguration.api_key apiKey = engineConfiguration.api_key
@ -320,7 +321,7 @@ export const chatCompletions = async (request: any, reply: any) => {
'Content-Type': 'text/event-stream', 'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache', 'Cache-Control': 'no-cache',
'Connection': 'keep-alive', 'Connection': 'keep-alive',
"Access-Control-Allow-Origin": "*" 'Access-Control-Allow-Origin': '*',
}) })
const headers: Record<string, any> = { const headers: Record<string, any> = {
@ -346,13 +347,3 @@ export const chatCompletions = async (request: any, reply: any) => {
response.body.pipe(reply.raw) response.body.pipe(reply.raw)
} }
} }
const getEngineConfiguration = async (engineId: string) => {
if (engineId !== 'openai') {
return undefined
}
const directoryPath = join(getJanDataFolderPath(), 'engines')
const filePath = join(directoryPath, `${engineId}.json`)
const data = await fs.readFileSync(filePath, 'utf-8')
return JSON.parse(data)
}

View File

@ -0,0 +1,19 @@
// The PORT to use for the Nitro subprocess
export const NITRO_DEFAULT_PORT = 3928
// The HOST address to use for the Nitro subprocess
export const LOCAL_HOST = '127.0.0.1'
export const SUPPORTED_MODEL_FORMAT = '.gguf'
// The URL for the Nitro subprocess
const NITRO_HTTP_SERVER_URL = `http://${LOCAL_HOST}:${NITRO_DEFAULT_PORT}`
// The URL for the Nitro subprocess to load a model
export const NITRO_HTTP_LOAD_MODEL_URL = `${NITRO_HTTP_SERVER_URL}/inferences/llamacpp/loadmodel`
// The URL for the Nitro subprocess to validate a model
export const NITRO_HTTP_VALIDATE_MODEL_URL = `${NITRO_HTTP_SERVER_URL}/inferences/llamacpp/modelstatus`
// The URL for the Nitro subprocess to kill itself
export const NITRO_HTTP_KILL_URL = `${NITRO_HTTP_SERVER_URL}/processmanager/destroy`
export const DEFAULT_CHAT_COMPLETION_URL = `http://${LOCAL_HOST}:${NITRO_DEFAULT_PORT}/inferences/llamacpp/chat_completion` // default nitro url

View File

@ -0,0 +1,351 @@
import fs from 'fs'
import { join } from 'path'
import { getJanDataFolderPath, getJanExtensionsPath, getSystemResourceInfo } from '../../utils'
import { logServer } from '../../log'
import { ChildProcessWithoutNullStreams, spawn } from 'child_process'
import { Model, ModelSettingParams, PromptTemplate } from '../../../types'
import {
LOCAL_HOST,
NITRO_DEFAULT_PORT,
NITRO_HTTP_KILL_URL,
NITRO_HTTP_LOAD_MODEL_URL,
NITRO_HTTP_VALIDATE_MODEL_URL,
SUPPORTED_MODEL_FORMAT,
} from './consts'
// The subprocess instance for Nitro
let subprocess: ChildProcessWithoutNullStreams | undefined = undefined
// TODO: move this to core type
interface NitroModelSettings extends ModelSettingParams {
llama_model_path: string
cpu_threads: number
}
export const startModel = async (modelId: string, settingParams?: ModelSettingParams) => {
try {
await runModel(modelId, settingParams)
return {
message: `Model ${modelId} started`,
}
} catch (e) {
return {
error: e,
}
}
}
const runModel = async (modelId: string, settingParams?: ModelSettingParams): Promise<void> => {
const janDataFolderPath = getJanDataFolderPath()
const modelFolderFullPath = join(janDataFolderPath, 'models', modelId)
if (!fs.existsSync(modelFolderFullPath)) {
throw `Model not found: ${modelId}`
}
const files: string[] = fs.readdirSync(modelFolderFullPath)
// Look for GGUF model file
const ggufBinFile = files.find((file) => file.toLowerCase().includes(SUPPORTED_MODEL_FORMAT))
const modelMetadataPath = join(modelFolderFullPath, 'model.json')
const modelMetadata: Model = JSON.parse(fs.readFileSync(modelMetadataPath, 'utf-8'))
if (!ggufBinFile) {
throw 'No GGUF model file found'
}
const modelBinaryPath = join(modelFolderFullPath, ggufBinFile)
const nitroResourceProbe = await getSystemResourceInfo()
const nitroModelSettings: NitroModelSettings = {
...modelMetadata.settings,
...settingParams,
llama_model_path: modelBinaryPath,
// This is critical and requires real CPU physical core count (or performance core)
cpu_threads: Math.max(1, nitroResourceProbe.numCpuPhysicalCore),
...(modelMetadata.settings.mmproj && {
mmproj: join(modelFolderFullPath, modelMetadata.settings.mmproj),
}),
}
logServer(`[NITRO]::Debug: Nitro model settings: ${JSON.stringify(nitroModelSettings)}`)
// Convert settings.prompt_template to system_prompt, user_prompt, ai_prompt
if (modelMetadata.settings.prompt_template) {
const promptTemplate = modelMetadata.settings.prompt_template
const prompt = promptTemplateConverter(promptTemplate)
if (prompt?.error) {
return Promise.reject(prompt.error)
}
nitroModelSettings.system_prompt = prompt.system_prompt
nitroModelSettings.user_prompt = prompt.user_prompt
nitroModelSettings.ai_prompt = prompt.ai_prompt
}
await runNitroAndLoadModel(modelId, nitroModelSettings)
}
// TODO: move to util
const promptTemplateConverter = (promptTemplate: string): PromptTemplate => {
// Split the string using the markers
const systemMarker = '{system_message}'
const promptMarker = '{prompt}'
if (promptTemplate.includes(systemMarker) && promptTemplate.includes(promptMarker)) {
// Find the indices of the markers
const systemIndex = promptTemplate.indexOf(systemMarker)
const promptIndex = promptTemplate.indexOf(promptMarker)
// Extract the parts of the string
const system_prompt = promptTemplate.substring(0, systemIndex)
const user_prompt = promptTemplate.substring(systemIndex + systemMarker.length, promptIndex)
const ai_prompt = promptTemplate.substring(promptIndex + promptMarker.length)
// Return the split parts
return { system_prompt, user_prompt, ai_prompt }
} else if (promptTemplate.includes(promptMarker)) {
// Extract the parts of the string for the case where only promptMarker is present
const promptIndex = promptTemplate.indexOf(promptMarker)
const user_prompt = promptTemplate.substring(0, promptIndex)
const ai_prompt = promptTemplate.substring(promptIndex + promptMarker.length)
// Return the split parts
return { user_prompt, ai_prompt }
}
// Return an error if none of the conditions are met
return { error: 'Cannot split prompt template' }
}
const runNitroAndLoadModel = async (modelId: string, modelSettings: NitroModelSettings) => {
// Gather system information for CPU physical cores and memory
const tcpPortUsed = require('tcp-port-used')
await stopModel(modelId)
await tcpPortUsed.waitUntilFree(NITRO_DEFAULT_PORT, 300, 5000)
/**
* There is a problem with Windows process manager
* Should wait for awhile to make sure the port is free and subprocess is killed
* The tested threshold is 500ms
**/
if (process.platform === 'win32') {
await new Promise((resolve) => setTimeout(resolve, 500))
}
await spawnNitroProcess()
await loadLLMModel(modelSettings)
await validateModelStatus()
}
const spawnNitroProcess = async (): Promise<void> => {
logServer(`[NITRO]::Debug: Spawning Nitro subprocess...`)
let binaryFolder = join(
getJanExtensionsPath(),
'@janhq',
'inference-nitro-extension',
'dist',
'bin'
)
let executableOptions = executableNitroFile()
const tcpPortUsed = require('tcp-port-used')
const args: string[] = ['1', LOCAL_HOST, NITRO_DEFAULT_PORT.toString()]
// Execute the binary
logServer(
`[NITRO]::Debug: Spawn nitro at path: ${executableOptions.executablePath}, and args: ${args}`
)
subprocess = spawn(
executableOptions.executablePath,
['1', LOCAL_HOST, NITRO_DEFAULT_PORT.toString()],
{
cwd: binaryFolder,
env: {
...process.env,
CUDA_VISIBLE_DEVICES: executableOptions.cudaVisibleDevices,
},
}
)
// Handle subprocess output
subprocess.stdout.on('data', (data: any) => {
logServer(`[NITRO]::Debug: ${data}`)
})
subprocess.stderr.on('data', (data: any) => {
logServer(`[NITRO]::Error: ${data}`)
})
subprocess.on('close', (code: any) => {
logServer(`[NITRO]::Debug: Nitro exited with code: ${code}`)
subprocess = undefined
})
tcpPortUsed.waitUntilUsed(NITRO_DEFAULT_PORT, 300, 30000).then(() => {
logServer(`[NITRO]::Debug: Nitro is ready`)
})
}
type NitroExecutableOptions = {
executablePath: string
cudaVisibleDevices: string
}
const executableNitroFile = (): NitroExecutableOptions => {
const nvidiaInfoFilePath = join(getJanDataFolderPath(), 'settings', 'settings.json')
let binaryFolder = join(
getJanExtensionsPath(),
'@janhq',
'inference-nitro-extension',
'dist',
'bin'
)
let cudaVisibleDevices = ''
let binaryName = 'nitro'
/**
* The binary folder is different for each platform.
*/
if (process.platform === 'win32') {
/**
* For Windows: win-cpu, win-cuda-11-7, win-cuda-12-0
*/
let nvidiaInfo = JSON.parse(fs.readFileSync(nvidiaInfoFilePath, 'utf-8'))
if (nvidiaInfo['run_mode'] === 'cpu') {
binaryFolder = join(binaryFolder, 'win-cpu')
} else {
if (nvidiaInfo['cuda'].version === '12') {
binaryFolder = join(binaryFolder, 'win-cuda-12-0')
} else {
binaryFolder = join(binaryFolder, 'win-cuda-11-7')
}
cudaVisibleDevices = nvidiaInfo['gpu_highest_vram']
}
binaryName = 'nitro.exe'
} else if (process.platform === 'darwin') {
/**
* For MacOS: mac-arm64 (Silicon), mac-x64 (InteL)
*/
if (process.arch === 'arm64') {
binaryFolder = join(binaryFolder, 'mac-arm64')
} else {
binaryFolder = join(binaryFolder, 'mac-x64')
}
} else {
/**
* For Linux: linux-cpu, linux-cuda-11-7, linux-cuda-12-0
*/
let nvidiaInfo = JSON.parse(fs.readFileSync(nvidiaInfoFilePath, 'utf-8'))
if (nvidiaInfo['run_mode'] === 'cpu') {
binaryFolder = join(binaryFolder, 'linux-cpu')
} else {
if (nvidiaInfo['cuda'].version === '12') {
binaryFolder = join(binaryFolder, 'linux-cuda-12-0')
} else {
binaryFolder = join(binaryFolder, 'linux-cuda-11-7')
}
cudaVisibleDevices = nvidiaInfo['gpu_highest_vram']
}
}
return {
executablePath: join(binaryFolder, binaryName),
cudaVisibleDevices,
}
}
const validateModelStatus = async (): Promise<void> => {
// Send a GET request to the validation URL.
// Retry the request up to 3 times if it fails, with a delay of 500 milliseconds between retries.
const fetchRT = require('fetch-retry')
const fetchRetry = fetchRT(fetch)
return fetchRetry(NITRO_HTTP_VALIDATE_MODEL_URL, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
retries: 5,
retryDelay: 500,
}).then(async (res: Response) => {
logServer(`[NITRO]::Debug: Validate model state success with response ${JSON.stringify(res)}`)
// If the response is OK, check model_loaded status.
if (res.ok) {
const body = await res.json()
// If the model is loaded, return an empty object.
// Otherwise, return an object with an error message.
if (body.model_loaded) {
return Promise.resolve()
}
}
return Promise.reject('Validate model status failed')
})
}
const loadLLMModel = async (settings: NitroModelSettings): Promise<Response> => {
logServer(`[NITRO]::Debug: Loading model with params ${JSON.stringify(settings)}`)
const fetchRT = require('fetch-retry')
const fetchRetry = fetchRT(fetch)
return fetchRetry(NITRO_HTTP_LOAD_MODEL_URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(settings),
retries: 3,
retryDelay: 500,
})
.then((res: any) => {
logServer(`[NITRO]::Debug: Load model success with response ${JSON.stringify(res)}`)
return Promise.resolve(res)
})
.catch((err: any) => {
logServer(`[NITRO]::Error: Load model failed with error ${err}`)
return Promise.reject(err)
})
}
/**
* Stop model and kill nitro process.
*/
export const stopModel = async (_modelId: string) => {
if (!subprocess) {
return {
error: "Model isn't running",
}
}
return new Promise((resolve, reject) => {
const controller = new AbortController()
setTimeout(() => {
controller.abort()
reject({
error: 'Failed to stop model: Timedout',
})
}, 5000)
const tcpPortUsed = require('tcp-port-used')
logServer(`[NITRO]::Debug: Request to kill Nitro`)
fetch(NITRO_HTTP_KILL_URL, {
method: 'DELETE',
signal: controller.signal,
})
.then(() => {
subprocess?.kill()
subprocess = undefined
})
.catch(() => {
// don't need to do anything, we still kill the subprocess
})
.then(() => tcpPortUsed.waitUntilFree(NITRO_DEFAULT_PORT, 300, 5000))
.then(() => logServer(`[NITRO]::Debug: Nitro process is terminated`))
.then(() =>
resolve({
message: 'Model stopped',
})
)
})
}

View File

@ -10,6 +10,8 @@ import {
} from '../common/builder' } from '../common/builder'
import { JanApiRouteConfiguration } from '../common/configuration' import { JanApiRouteConfiguration } from '../common/configuration'
import { startModel, stopModel } from '../common/startStopModel'
import { ModelSettingParams } from '../../../types'
export const commonRouter = async (app: HttpServer) => { export const commonRouter = async (app: HttpServer) => {
// Common Routes // Common Routes
@ -17,19 +19,33 @@ export const commonRouter = async (app: HttpServer) => {
app.get(`/${key}`, async (_request) => getBuilder(JanApiRouteConfiguration[key])) app.get(`/${key}`, async (_request) => getBuilder(JanApiRouteConfiguration[key]))
app.get(`/${key}/:id`, async (request: any) => app.get(`/${key}/:id`, async (request: any) =>
retrieveBuilder(JanApiRouteConfiguration[key], request.params.id), retrieveBuilder(JanApiRouteConfiguration[key], request.params.id)
) )
app.delete(`/${key}/:id`, async (request: any) => app.delete(`/${key}/:id`, async (request: any) =>
deleteBuilder(JanApiRouteConfiguration[key], request.params.id), deleteBuilder(JanApiRouteConfiguration[key], request.params.id)
) )
}) })
// Download Model Routes // Download Model Routes
app.get(`/models/download/:modelId`, async (request: any) => app.get(`/models/download/:modelId`, async (request: any) =>
downloadModel(request.params.modelId, { ignoreSSL: request.query.ignoreSSL === 'true', proxy: request.query.proxy }), downloadModel(request.params.modelId, {
ignoreSSL: request.query.ignoreSSL === 'true',
proxy: request.query.proxy,
})
) )
app.put(`/models/:modelId/start`, async (request: any) => {
let settingParams: ModelSettingParams | undefined = undefined
if (Object.keys(request.body).length !== 0) {
settingParams = JSON.parse(request.body) as ModelSettingParams
}
return startModel(request.params.modelId, settingParams)
})
app.put(`/models/:modelId/stop`, async (request: any) => stopModel(request.params.modelId))
// Chat Completion Routes // Chat Completion Routes
app.post(`/chat/completions`, async (request: any, reply: any) => chatCompletions(request, reply)) app.post(`/chat/completions`, async (request: any, reply: any) => chatCompletions(request, reply))

View File

@ -8,5 +8,7 @@ export const fsRouter = async (app: HttpServer) => {
app.post(`/app/${FileManagerRoute.getResourcePath}`, async (request: any, reply: any) => {}) app.post(`/app/${FileManagerRoute.getResourcePath}`, async (request: any, reply: any) => {})
app.post(`/app/${FileManagerRoute.getUserHomePath}`, async (request: any, reply: any) => {})
app.post(`/app/${FileManagerRoute.fileStat}`, async (request: any, reply: any) => {}) app.post(`/app/${FileManagerRoute.fileStat}`, async (request: any, reply: any) => {})
} }

View File

@ -2,38 +2,36 @@ import fs from 'fs'
import util from 'util' import util from 'util'
import { getAppLogPath, getServerLogPath } from './utils' import { getAppLogPath, getServerLogPath } from './utils'
export const log = function (message: string) { export const log = (message: string) => {
const appLogPath = getAppLogPath() const path = getAppLogPath()
if (!message.startsWith('[')) { if (!message.startsWith('[')) {
message = `[APP]::${message}` message = `[APP]::${message}`
} }
message = `${new Date().toISOString()} ${message}` message = `${new Date().toISOString()} ${message}`
if (fs.existsSync(appLogPath)) { writeLog(message, path)
var log_file = fs.createWriteStream(appLogPath, {
flags: 'a',
})
log_file.write(util.format(message) + '\n')
log_file.close()
console.debug(message)
}
} }
export const logServer = function (message: string) { export const logServer = (message: string) => {
const serverLogPath = getServerLogPath() const path = getServerLogPath()
if (!message.startsWith('[')) { if (!message.startsWith('[')) {
message = `[SERVER]::${message}` message = `[SERVER]::${message}`
} }
message = `${new Date().toISOString()} ${message}` message = `${new Date().toISOString()} ${message}`
writeLog(message, path)
}
if (fs.existsSync(serverLogPath)) { const writeLog = (message: string, logPath: string) => {
var log_file = fs.createWriteStream(serverLogPath, { if (!fs.existsSync(logPath)) {
fs.writeFileSync(logPath, message)
} else {
const logFile = fs.createWriteStream(logPath, {
flags: 'a', flags: 'a',
}) })
log_file.write(util.format(message) + '\n') logFile.write(util.format(message) + '\n')
log_file.close() logFile.close()
console.debug(message) console.debug(message)
} }
} }

View File

@ -1,16 +1,18 @@
import { AppConfiguration } from "../../types"; import { AppConfiguration, SystemResourceInfo } from '../../types'
import { join } from "path"; import { join } from 'path'
import fs from "fs"; import fs from 'fs'
import os from "os"; import os from 'os'
import { log, logServer } from '../log'
import childProcess from 'child_process'
// TODO: move this to core // TODO: move this to core
const configurationFileName = "settings.json"; const configurationFileName = 'settings.json'
// TODO: do no specify app name in framework module // TODO: do no specify app name in framework module
const defaultJanDataFolder = join(os.homedir(), "jan"); const defaultJanDataFolder = join(os.homedir(), 'jan')
const defaultAppConfig: AppConfiguration = { const defaultAppConfig: AppConfiguration = {
data_folder: defaultJanDataFolder, data_folder: defaultJanDataFolder,
}; }
/** /**
* Getting App Configurations. * Getting App Configurations.
@ -20,39 +22,39 @@ const defaultAppConfig: AppConfiguration = {
export const getAppConfigurations = (): AppConfiguration => { export const getAppConfigurations = (): AppConfiguration => {
// Retrieve Application Support folder path // Retrieve Application Support folder path
// Fallback to user home directory if not found // Fallback to user home directory if not found
const configurationFile = getConfigurationFilePath(); const configurationFile = getConfigurationFilePath()
if (!fs.existsSync(configurationFile)) { if (!fs.existsSync(configurationFile)) {
// create default app config if we don't have one // create default app config if we don't have one
console.debug(`App config not found, creating default config at ${configurationFile}`); console.debug(`App config not found, creating default config at ${configurationFile}`)
fs.writeFileSync(configurationFile, JSON.stringify(defaultAppConfig)); fs.writeFileSync(configurationFile, JSON.stringify(defaultAppConfig))
return defaultAppConfig; return defaultAppConfig
} }
try { try {
const appConfigurations: AppConfiguration = JSON.parse( const appConfigurations: AppConfiguration = JSON.parse(
fs.readFileSync(configurationFile, "utf-8"), fs.readFileSync(configurationFile, 'utf-8')
); )
return appConfigurations; return appConfigurations
} catch (err) { } catch (err) {
console.error(`Failed to read app config, return default config instead! Err: ${err}`); console.error(`Failed to read app config, return default config instead! Err: ${err}`)
return defaultAppConfig; return defaultAppConfig
} }
}; }
const getConfigurationFilePath = () => const getConfigurationFilePath = () =>
join( join(
global.core?.appPath() || process.env[process.platform == "win32" ? "USERPROFILE" : "HOME"], global.core?.appPath() || process.env[process.platform == 'win32' ? 'USERPROFILE' : 'HOME'],
configurationFileName, configurationFileName
); )
export const updateAppConfiguration = (configuration: AppConfiguration): Promise<void> => { export const updateAppConfiguration = (configuration: AppConfiguration): Promise<void> => {
const configurationFile = getConfigurationFilePath(); const configurationFile = getConfigurationFilePath()
console.debug("updateAppConfiguration, configurationFile: ", configurationFile); console.debug('updateAppConfiguration, configurationFile: ', configurationFile)
fs.writeFileSync(configurationFile, JSON.stringify(configuration)); fs.writeFileSync(configurationFile, JSON.stringify(configuration))
return Promise.resolve(); return Promise.resolve()
}; }
/** /**
* Utility function to get server log path * Utility function to get server log path
@ -60,13 +62,13 @@ export const updateAppConfiguration = (configuration: AppConfiguration): Promise
* @returns {string} The log path. * @returns {string} The log path.
*/ */
export const getServerLogPath = (): string => { export const getServerLogPath = (): string => {
const appConfigurations = getAppConfigurations(); const appConfigurations = getAppConfigurations()
const logFolderPath = join(appConfigurations.data_folder, "logs"); const logFolderPath = join(appConfigurations.data_folder, 'logs')
if (!fs.existsSync(logFolderPath)) { if (!fs.existsSync(logFolderPath)) {
fs.mkdirSync(logFolderPath, { recursive: true }); fs.mkdirSync(logFolderPath, { recursive: true })
} }
return join(logFolderPath, "server.log"); return join(logFolderPath, 'server.log')
}; }
/** /**
* Utility function to get app log path * Utility function to get app log path
@ -74,13 +76,13 @@ export const getServerLogPath = (): string => {
* @returns {string} The log path. * @returns {string} The log path.
*/ */
export const getAppLogPath = (): string => { export const getAppLogPath = (): string => {
const appConfigurations = getAppConfigurations(); const appConfigurations = getAppConfigurations()
const logFolderPath = join(appConfigurations.data_folder, "logs"); const logFolderPath = join(appConfigurations.data_folder, 'logs')
if (!fs.existsSync(logFolderPath)) { if (!fs.existsSync(logFolderPath)) {
fs.mkdirSync(logFolderPath, { recursive: true }); fs.mkdirSync(logFolderPath, { recursive: true })
} }
return join(logFolderPath, "app.log"); return join(logFolderPath, 'app.log')
}; }
/** /**
* Utility function to get data folder path * Utility function to get data folder path
@ -88,9 +90,9 @@ export const getAppLogPath = (): string => {
* @returns {string} The data folder path. * @returns {string} The data folder path.
*/ */
export const getJanDataFolderPath = (): string => { export const getJanDataFolderPath = (): string => {
const appConfigurations = getAppConfigurations(); const appConfigurations = getAppConfigurations()
return appConfigurations.data_folder; return appConfigurations.data_folder
}; }
/** /**
* Utility function to get extension path * Utility function to get extension path
@ -98,6 +100,70 @@ export const getJanDataFolderPath = (): string => {
* @returns {string} The extensions path. * @returns {string} The extensions path.
*/ */
export const getJanExtensionsPath = (): string => { export const getJanExtensionsPath = (): string => {
const appConfigurations = getAppConfigurations(); const appConfigurations = getAppConfigurations()
return join(appConfigurations.data_folder, "extensions"); return join(appConfigurations.data_folder, 'extensions')
}; }
/**
* Utility function to physical cpu count
*
* @returns {number} The physical cpu count.
*/
export const physicalCpuCount = async (): Promise<number> => {
const platform = os.platform()
if (platform === 'linux') {
const output = await exec('lscpu -p | egrep -v "^#" | sort -u -t, -k 2,4 | wc -l')
return parseInt(output.trim(), 10)
} else if (platform === 'darwin') {
const output = await exec('sysctl -n hw.physicalcpu_max')
return parseInt(output.trim(), 10)
} else if (platform === 'win32') {
const output = await exec('WMIC CPU Get NumberOfCores')
return output
.split(os.EOL)
.map((line: string) => parseInt(line))
.filter((value: number) => !isNaN(value))
.reduce((sum: number, number: number) => sum + number, 1)
} else {
const cores = os.cpus().filter((cpu: any, index: number) => {
const hasHyperthreading = cpu.model.includes('Intel')
const isOdd = index % 2 === 1
return !hasHyperthreading || isOdd
})
return cores.length
}
}
const exec = async (command: string): Promise<string> => {
return new Promise((resolve, reject) => {
childProcess.exec(command, { encoding: 'utf8' }, (error, stdout) => {
if (error) {
reject(error)
} else {
resolve(stdout)
}
})
})
}
export const getSystemResourceInfo = async (): Promise<SystemResourceInfo> => {
const cpu = await physicalCpuCount()
const message = `[NITRO]::CPU informations - ${cpu}`
log(message)
logServer(message)
return {
numCpuPhysicalCore: cpu,
memAvailable: 0, // TODO: this should not be 0
}
}
export const getEngineConfiguration = async (engineId: string) => {
if (engineId !== 'openai') {
return undefined
}
const directoryPath = join(getJanDataFolderPath(), 'engines')
const filePath = join(directoryPath, `${engineId}.json`)
const data = fs.readFileSync(filePath, 'utf-8')
return JSON.parse(data)
}

View File

@ -0,0 +1,6 @@
/**
* App configuration event name
*/
export enum AppConfigurationEventName {
OnConfigurationUpdate = 'OnConfigurationUpdate',
}

View File

@ -1 +1,2 @@
export * from './appConfigEntity' export * from './appConfigEntity'
export * from './appConfigEvent'

View File

@ -6,3 +6,4 @@ export * from './inference'
export * from './monitoring' export * from './monitoring'
export * from './file' export * from './file'
export * from './config' export * from './config'
export * from './miscellaneous'

View File

@ -0,0 +1,2 @@
export * from './systemResourceInfo'
export * from './promptTemplate'

View File

@ -0,0 +1,6 @@
export type PromptTemplate = {
system_prompt?: string
ai_prompt?: string
user_prompt?: string
error?: string
}

View File

@ -0,0 +1,4 @@
export type SystemResourceInfo = {
numCpuPhysicalCore: number
memAvailable: number
}

View File

@ -123,6 +123,7 @@ export type ModelSettingParams = {
user_prompt?: string user_prompt?: string
llama_model_path?: string llama_model_path?: string
mmproj?: string mmproj?: string
cont_batching?: boolean
} }
/** /**

View File

@ -43,5 +43,4 @@ export type ThreadState = {
waitingForResponse: boolean waitingForResponse: boolean
error?: Error error?: Error
lastMessage?: string lastMessage?: string
isFinishInit?: boolean
} }

View File

@ -1,5 +1,5 @@
GTM_ID=xxxx GTM_ID=xxxx
POSTHOG_PROJECT_API_KEY=xxxx UMAMI_PROJECT_API_KEY=xxxx
POSTHOG_APP_URL=xxxx UMAMI_APP_URL=xxxx
ALGOLIA_API_KEY=xxxx ALGOLIA_API_KEY=xxxx
ALGOLIA_APP_ID=xxxx ALGOLIA_APP_ID=xxxx

View File

@ -1,5 +1,5 @@
import { app, ipcMain, dialog, shell } from 'electron' import { app, ipcMain, dialog, shell } from 'electron'
import { join, basename } from 'path' import { join, basename, relative as getRelative, isAbsolute } from 'path'
import { WindowManager } from './../managers/window' import { WindowManager } from './../managers/window'
import { getResourcePath } from './../utils/path' import { getResourcePath } from './../utils/path'
import { AppRoute, AppConfiguration } from '@janhq/core' import { AppRoute, AppConfiguration } from '@janhq/core'
@ -50,6 +50,27 @@ export function handleAppIPCs() {
join(...paths) join(...paths)
) )
/**
* Checks if the given path is a subdirectory of the given directory.
*
* @param _event - The IPC event object.
* @param from - The path to check.
* @param to - The directory to check against.
*
* @returns {Promise<boolean>} - A promise that resolves with the result.
*/
ipcMain.handle(
AppRoute.isSubdirectory,
async (_event, from: string, to: string) => {
const relative = getRelative(from, to)
const isSubdir =
relative && !relative.startsWith('..') && !isAbsolute(relative)
if (isSubdir === '') return false
else return isSubdir
}
)
/** /**
* Retrieve basename from given path, respect to the current OS. * Retrieve basename from given path, respect to the current OS.
*/ */

View File

@ -1,4 +1,4 @@
import { ipcMain } from 'electron' import { ipcMain, app } from 'electron'
// @ts-ignore // @ts-ignore
import reflect from '@alumna/reflect' import reflect from '@alumna/reflect'
@ -38,6 +38,10 @@ export function handleFileMangerIPCs() {
getResourcePath() getResourcePath()
) )
ipcMain.handle(FileManagerRoute.getUserHomePath, async (_event) =>
app.getPath('home')
)
// handle fs is directory here // handle fs is directory here
ipcMain.handle( ipcMain.handle(
FileManagerRoute.fileStat, FileManagerRoute.fileStat,

View File

@ -28,6 +28,22 @@ import { setupCore } from './utils/setup'
app app
.whenReady() .whenReady()
.then(async () => {
if (!app.isPackaged) {
// Which means you're running from source code
const { default: installExtension, REACT_DEVELOPER_TOOLS } = await import(
'electron-devtools-installer'
) // Don't use import on top level, since the installer package is dev-only
try {
const name = installExtension(REACT_DEVELOPER_TOOLS)
console.log(`Added Extension: ${name}`)
} catch (err) {
console.log('An error occurred while installing devtools:')
console.error(err)
// Only log the error and don't throw it because it's not critical
}
}
})
.then(setupCore) .then(setupCore)
.then(createUserSpace) .then(createUserSpace)
.then(migrateExtensions) .then(migrateExtensions)

View File

@ -86,7 +86,7 @@
"request": "^2.88.2", "request": "^2.88.2",
"request-progress": "^3.0.0", "request-progress": "^3.0.0",
"rimraf": "^5.0.5", "rimraf": "^5.0.5",
"typescript": "^5.3.3", "typescript": "^5.2.2",
"ulid": "^2.3.0", "ulid": "^2.3.0",
"use-debounce": "^9.0.4" "use-debounce": "^9.0.4"
}, },
@ -99,6 +99,7 @@
"@typescript-eslint/parser": "^6.7.3", "@typescript-eslint/parser": "^6.7.3",
"electron": "28.0.0", "electron": "28.0.0",
"electron-builder": "^24.9.1", "electron-builder": "^24.9.1",
"electron-devtools-installer": "^3.2.0",
"electron-playwright-helpers": "^1.6.0", "electron-playwright-helpers": "^1.6.0",
"eslint-plugin-react": "^7.33.2", "eslint-plugin-react": "^7.33.2",
"run-script-os": "^1.1.6" "run-script-os": "^1.1.6"

View File

@ -1,9 +1,9 @@
import { PlaywrightTestConfig } from "@playwright/test"; import { PlaywrightTestConfig } from '@playwright/test'
const config: PlaywrightTestConfig = { const config: PlaywrightTestConfig = {
testDir: "./tests", testDir: './tests',
retries: 0, retries: 0,
timeout: 120000, globalTimeout: 300000,
}; }
export default config; export default config

View File

@ -9,6 +9,7 @@ import {
let electronApp: ElectronApplication let electronApp: ElectronApplication
let page: Page let page: Page
const TIMEOUT: number = parseInt(process.env.TEST_TIMEOUT || '300000')
test.beforeAll(async () => { test.beforeAll(async () => {
process.env.CI = 'e2e' process.env.CI = 'e2e'
@ -26,7 +27,9 @@ test.beforeAll(async () => {
}) })
await stubDialog(electronApp, 'showMessageBox', { response: 1 }) await stubDialog(electronApp, 'showMessageBox', { response: 1 })
page = await electronApp.firstWindow() page = await electronApp.firstWindow({
timeout: TIMEOUT,
})
}) })
test.afterAll(async () => { test.afterAll(async () => {
@ -34,8 +37,12 @@ test.afterAll(async () => {
await page.close() await page.close()
}) })
test('explores models', async () => { test('explores hub', async () => {
await page.getByTestId('Hub').first().click() test.setTimeout(TIMEOUT)
await page.getByTestId('testid-explore-models').isVisible() await page.getByTestId('Hub').first().click({
// More test cases here... timeout: TIMEOUT,
})
await page.getByTestId('hub-container-test-id').isVisible({
timeout: TIMEOUT,
})
}) })

View File

@ -1,55 +0,0 @@
import { _electron as electron } from 'playwright'
import { ElectronApplication, Page, expect, test } from '@playwright/test'
import {
findLatestBuild,
parseElectronApp,
stubDialog,
} from 'electron-playwright-helpers'
let electronApp: ElectronApplication
let page: Page
test.beforeAll(async () => {
process.env.CI = 'e2e'
const latestBuild = findLatestBuild('dist')
expect(latestBuild).toBeTruthy()
// parse the packaged Electron app and find paths and other info
const appInfo = parseElectronApp(latestBuild)
expect(appInfo).toBeTruthy()
expect(appInfo.asar).toBe(true)
expect(appInfo.executable).toBeTruthy()
expect(appInfo.main).toBeTruthy()
expect(appInfo.name).toBe('jan')
expect(appInfo.packageJson).toBeTruthy()
expect(appInfo.packageJson.name).toBe('jan')
expect(appInfo.platform).toBeTruthy()
expect(appInfo.platform).toBe(process.platform)
expect(appInfo.resourcesDir).toBeTruthy()
electronApp = await electron.launch({
args: [appInfo.main], // main file from package.json
executablePath: appInfo.executable, // path to the Electron executable
})
await stubDialog(electronApp, 'showMessageBox', { response: 1 })
page = await electronApp.firstWindow()
})
test.afterAll(async () => {
await electronApp.close()
await page.close()
})
test('renders the home page', async () => {
expect(page).toBeDefined()
// Welcome text is available
const welcomeText = await page
.getByTestId('testid-welcome-title')
.first()
.isVisible()
expect(welcomeText).toBe(false)
})

View File

@ -9,6 +9,7 @@ import {
let electronApp: ElectronApplication let electronApp: ElectronApplication
let page: Page let page: Page
const TIMEOUT: number = parseInt(process.env.TEST_TIMEOUT || '300000')
test.beforeAll(async () => { test.beforeAll(async () => {
process.env.CI = 'e2e' process.env.CI = 'e2e'
@ -26,7 +27,9 @@ test.beforeAll(async () => {
}) })
await stubDialog(electronApp, 'showMessageBox', { response: 1 }) await stubDialog(electronApp, 'showMessageBox', { response: 1 })
page = await electronApp.firstWindow() page = await electronApp.firstWindow({
timeout: TIMEOUT,
})
}) })
test.afterAll(async () => { test.afterAll(async () => {
@ -35,20 +38,24 @@ test.afterAll(async () => {
}) })
test('renders left navigation panel', async () => { test('renders left navigation panel', async () => {
// Chat section should be there test.setTimeout(TIMEOUT)
const chatSection = await page.getByTestId('Chat').first().isVisible()
expect(chatSection).toBe(false)
// Home actions
/* Disable unstable feature tests
** const botBtn = await page.getByTestId("Bot").first().isEnabled();
** Enable back when it is whitelisted
*/
const systemMonitorBtn = await page const systemMonitorBtn = await page
.getByTestId('System Monitor') .getByTestId('System Monitor')
.first() .first()
.isEnabled() .isEnabled({
const settingsBtn = await page.getByTestId('Settings').first().isEnabled() timeout: TIMEOUT,
})
const settingsBtn = await page
.getByTestId('Thread')
.first()
.isEnabled({ timeout: TIMEOUT })
expect([systemMonitorBtn, settingsBtn].filter((e) => !e).length).toBe(0) expect([systemMonitorBtn, settingsBtn].filter((e) => !e).length).toBe(0)
// Chat section should be there
await page.getByTestId('Local API Server').first().click({
timeout: TIMEOUT,
})
const localServer = await page.getByTestId('local-server-testid').first()
await expect(localServer).toBeVisible({
timeout: TIMEOUT,
})
}) })

View File

@ -9,6 +9,7 @@ import {
let electronApp: ElectronApplication let electronApp: ElectronApplication
let page: Page let page: Page
const TIMEOUT: number = parseInt(process.env.TEST_TIMEOUT || '300000')
test.beforeAll(async () => { test.beforeAll(async () => {
process.env.CI = 'e2e' process.env.CI = 'e2e'
@ -26,7 +27,9 @@ test.beforeAll(async () => {
}) })
await stubDialog(electronApp, 'showMessageBox', { response: 1 }) await stubDialog(electronApp, 'showMessageBox', { response: 1 })
page = await electronApp.firstWindow() page = await electronApp.firstWindow({
timeout: TIMEOUT,
})
}) })
test.afterAll(async () => { test.afterAll(async () => {
@ -35,6 +38,8 @@ test.afterAll(async () => {
}) })
test('shows settings', async () => { test('shows settings', async () => {
await page.getByTestId('Settings').first().click() test.setTimeout(TIMEOUT)
await page.getByTestId('testid-setting-description').isVisible() await page.getByTestId('Settings').first().click({ timeout: TIMEOUT })
const settingDescription = page.getByTestId('testid-setting-description')
await expect(settingDescription).toBeVisible({ timeout: TIMEOUT })
}) })

View File

@ -1,41 +0,0 @@
import { _electron as electron } from 'playwright'
import { ElectronApplication, Page, expect, test } from '@playwright/test'
import {
findLatestBuild,
parseElectronApp,
stubDialog,
} from 'electron-playwright-helpers'
let electronApp: ElectronApplication
let page: Page
test.beforeAll(async () => {
process.env.CI = 'e2e'
const latestBuild = findLatestBuild('dist')
expect(latestBuild).toBeTruthy()
// parse the packaged Electron app and find paths and other info
const appInfo = parseElectronApp(latestBuild)
expect(appInfo).toBeTruthy()
electronApp = await electron.launch({
args: [appInfo.main], // main file from package.json
executablePath: appInfo.executable, // path to the Electron executable
})
await stubDialog(electronApp, 'showMessageBox', { response: 1 })
page = await electronApp.firstWindow()
})
test.afterAll(async () => {
await electronApp.close()
await page.close()
})
test('shows system monitor', async () => {
await page.getByTestId('System Monitor').first().click()
await page.getByTestId('testid-system-monitor').isVisible()
// More test cases here...
})

View File

@ -8,7 +8,10 @@
"license": "AGPL-3.0", "license": "AGPL-3.0",
"scripts": { "scripts": {
"build": "tsc --module commonjs && rollup -c rollup.config.ts", "build": "tsc --module commonjs && rollup -c rollup.config.ts",
"build:publish": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../electron/pre-install" "build:publish:linux": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../electron/pre-install",
"build:publish:darwin": "rimraf *.tgz --glob && npm run build && ../../.github/scripts/auto-sign.sh && npm pack && cpx *.tgz ../../electron/pre-install",
"build:publish:win32": "rimraf *.tgz --glob && npm run build && npm pack && cpx *.tgz ../../electron/pre-install",
"build:publish": "run-script-os"
}, },
"devDependencies": { "devDependencies": {
"@rollup/plugin-commonjs": "^25.0.7", "@rollup/plugin-commonjs": "^25.0.7",
@ -22,7 +25,8 @@
"rollup-plugin-define": "^1.0.1", "rollup-plugin-define": "^1.0.1",
"rollup-plugin-sourcemaps": "^0.6.3", "rollup-plugin-sourcemaps": "^0.6.3",
"rollup-plugin-typescript2": "^0.36.0", "rollup-plugin-typescript2": "^0.36.0",
"typescript": "^5.3.3" "typescript": "^5.3.3",
"run-script-os": "^1.1.6"
}, },
"dependencies": { "dependencies": {
"@janhq/core": "file:../../core", "@janhq/core": "file:../../core",

View File

@ -12,12 +12,11 @@ export class Retrieval {
public chunkOverlap?: number = 0; public chunkOverlap?: number = 0;
private retriever: any; private retriever: any;
private embeddingModel: any = undefined; private embeddingModel?: OpenAIEmbeddings = undefined;
private textSplitter?: RecursiveCharacterTextSplitter; private textSplitter?: RecursiveCharacterTextSplitter;
constructor(chunkSize: number = 4000, chunkOverlap: number = 200) { constructor(chunkSize: number = 4000, chunkOverlap: number = 200) {
this.updateTextSplitter(chunkSize, chunkOverlap); this.updateTextSplitter(chunkSize, chunkOverlap);
this.embeddingModel = new OpenAIEmbeddings({});
} }
public updateTextSplitter(chunkSize: number, chunkOverlap: number): void { public updateTextSplitter(chunkSize: number, chunkOverlap: number): void {
@ -36,7 +35,7 @@ export class Retrieval {
if (engine === "nitro") { if (engine === "nitro") {
this.embeddingModel = new OpenAIEmbeddings( this.embeddingModel = new OpenAIEmbeddings(
{ openAIApiKey: "nitro-embedding" }, { openAIApiKey: "nitro-embedding" },
{ basePath: "http://127.0.0.1:3928/v1" }, { basePath: "http://127.0.0.1:3928/v1" }
); );
} else { } else {
// Fallback to OpenAI Settings // Fallback to OpenAI Settings
@ -50,11 +49,12 @@ export class Retrieval {
public ingestAgentKnowledge = async ( public ingestAgentKnowledge = async (
filePath: string, filePath: string,
memoryPath: string, memoryPath: string
): Promise<any> => { ): Promise<any> => {
const loader = new PDFLoader(filePath, { const loader = new PDFLoader(filePath, {
splitPages: true, splitPages: true,
}); });
if (!this.embeddingModel) return Promise.reject();
const doc = await loader.load(); const doc = await loader.load();
const docs = await this.textSplitter!.splitDocuments(doc); const docs = await this.textSplitter!.splitDocuments(doc);
const vectorStore = await HNSWLib.fromDocuments(docs, this.embeddingModel); const vectorStore = await HNSWLib.fromDocuments(docs, this.embeddingModel);
@ -62,6 +62,7 @@ export class Retrieval {
}; };
public loadRetrievalAgent = async (memoryPath: string): Promise<void> => { public loadRetrievalAgent = async (memoryPath: string): Promise<void> => {
if (!this.embeddingModel) return Promise.reject();
const vectorStore = await HNSWLib.load(memoryPath, this.embeddingModel); const vectorStore = await HNSWLib.load(memoryPath, this.embeddingModel);
this.retriever = vectorStore.asRetriever(2); this.retriever = vectorStore.asRetriever(2);
return Promise.resolve(); return Promise.resolve();

View File

@ -119,19 +119,20 @@ export default class JSONConversationalExtension extends ConversationalExtension
if (!(await fs.existsSync(threadDirPath))) if (!(await fs.existsSync(threadDirPath)))
await fs.mkdirSync(threadDirPath) await fs.mkdirSync(threadDirPath)
if (message.content[0].type === 'image') { if (message.content[0]?.type === 'image') {
const filesPath = await joinPath([threadDirPath, 'files']) const filesPath = await joinPath([threadDirPath, 'files'])
if (!(await fs.existsSync(filesPath))) await fs.mkdirSync(filesPath) if (!(await fs.existsSync(filesPath))) await fs.mkdirSync(filesPath)
const imagePath = await joinPath([filesPath, `${message.id}.png`]) const imagePath = await joinPath([filesPath, `${message.id}.png`])
const base64 = message.content[0].text.annotations[0] const base64 = message.content[0].text.annotations[0]
await this.storeImage(base64, imagePath) await this.storeImage(base64, imagePath)
// if (fs.existsSync(imagePath)) { if ((await fs.existsSync(imagePath)) && message.content?.length) {
// message.content[0].text.annotations[0] = imagePath // Use file path instead of blob
// } message.content[0].text.annotations[0] = `threads/${message.thread_id}/files/${message.id}.png`
}
} }
if (message.content[0].type === 'pdf') { if (message.content[0]?.type === 'pdf') {
const filesPath = await joinPath([threadDirPath, 'files']) const filesPath = await joinPath([threadDirPath, 'files'])
if (!(await fs.existsSync(filesPath))) await fs.mkdirSync(filesPath) if (!(await fs.existsSync(filesPath))) await fs.mkdirSync(filesPath)
@ -139,7 +140,7 @@ export default class JSONConversationalExtension extends ConversationalExtension
const blob = message.content[0].text.annotations[0] const blob = message.content[0].text.annotations[0]
await this.storeFile(blob, filePath) await this.storeFile(blob, filePath)
if (await fs.existsSync(filePath)) { if ((await fs.existsSync(filePath)) && message.content?.length) {
// Use file path instead of blob // Use file path instead of blob
message.content[0].text.annotations[0] = `threads/${message.thread_id}/files/${message.id}.pdf` message.content[0].text.annotations[0] = `threads/${message.thread_id}/files/${message.id}.pdf`
} }

View File

@ -1 +1 @@
0.2.14 0.3.5

View File

@ -35,7 +35,7 @@
"rollup-plugin-sourcemaps": "^0.6.3", "rollup-plugin-sourcemaps": "^0.6.3",
"rollup-plugin-typescript2": "^0.36.0", "rollup-plugin-typescript2": "^0.36.0",
"run-script-os": "^1.1.6", "run-script-os": "^1.1.6",
"typescript": "^5.3.3" "typescript": "^5.2.2"
}, },
"dependencies": { "dependencies": {
"@janhq/core": "file:../../core", "@janhq/core": "file:../../core",

View File

@ -27,6 +27,9 @@ export default [
TROUBLESHOOTING_URL: JSON.stringify( TROUBLESHOOTING_URL: JSON.stringify(
"https://jan.ai/guides/troubleshooting" "https://jan.ai/guides/troubleshooting"
), ),
JAN_SERVER_INFERENCE_URL: JSON.stringify(
"http://localhost:1337/v1/chat/completions"
),
}), }),
// Allow json resolution // Allow json resolution
json(), json(),

View File

@ -1,22 +1,7 @@
declare const NODE: string; declare const NODE: string;
declare const INFERENCE_URL: string; declare const INFERENCE_URL: string;
declare const TROUBLESHOOTING_URL: string; declare const TROUBLESHOOTING_URL: string;
declare const JAN_SERVER_INFERENCE_URL: string;
/**
* The parameters for the initModel function.
* @property settings - The settings for the machine learning model.
* @property settings.ctx_len - The context length.
* @property settings.ngl - The number of generated tokens.
* @property settings.cont_batching - Whether to use continuous batching.
* @property settings.embedding - Whether to use embedding.
*/
interface EngineSettings {
ctx_len: number;
ngl: number;
cpu_threads: number;
cont_batching: boolean;
embedding: boolean;
}
/** /**
* The response from the initModel function. * The response from the initModel function.
@ -26,8 +11,3 @@ interface ModelOperationResponse {
error?: any; error?: any;
modelFile?: string; modelFile?: string;
} }
interface ResourcesInfo {
numCpuPhysicalCore: number;
memAvailable: number;
}

View File

@ -6,6 +6,7 @@ import { Observable } from "rxjs";
* @returns An Observable that emits the generated response as a string. * @returns An Observable that emits the generated response as a string.
*/ */
export function requestInference( export function requestInference(
inferenceUrl: string,
recentMessages: any[], recentMessages: any[],
model: Model, model: Model,
controller?: AbortController controller?: AbortController
@ -17,7 +18,7 @@ export function requestInference(
stream: true, stream: true,
...model.parameters, ...model.parameters,
}); });
fetch(INFERENCE_URL, { fetch(inferenceUrl, {
method: "POST", method: "POST",
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",

View File

@ -24,6 +24,7 @@ import {
MessageEvent, MessageEvent,
ModelEvent, ModelEvent,
InferenceEvent, InferenceEvent,
ModelSettingParams,
} from "@janhq/core"; } from "@janhq/core";
import { requestInference } from "./helpers/sse"; import { requestInference } from "./helpers/sse";
import { ulid } from "ulid"; import { ulid } from "ulid";
@ -45,7 +46,7 @@ export default class JanInferenceNitroExtension extends InferenceExtension {
private _currentModel: Model | undefined; private _currentModel: Model | undefined;
private _engineSettings: EngineSettings = { private _engineSettings: ModelSettingParams = {
ctx_len: 2048, ctx_len: 2048,
ngl: 100, ngl: 100,
cpu_threads: 1, cpu_threads: 1,
@ -67,35 +68,48 @@ export default class JanInferenceNitroExtension extends InferenceExtension {
*/ */
private nitroProcessInfo: any = undefined; private nitroProcessInfo: any = undefined;
private inferenceUrl = "";
/** /**
* Subscribes to events emitted by the @janhq/core package. * Subscribes to events emitted by the @janhq/core package.
*/ */
async onLoad() { async onLoad() {
if (!(await fs.existsSync(JanInferenceNitroExtension._homeDir))) { if (!(await fs.existsSync(JanInferenceNitroExtension._homeDir))) {
await fs try {
.mkdirSync(JanInferenceNitroExtension._homeDir) await fs.mkdirSync(JanInferenceNitroExtension._homeDir);
.catch((err: Error) => console.debug(err)); } catch (e) {
console.debug(e);
}
} }
// init inference url
// @ts-ignore
const electronApi = window?.electronAPI;
this.inferenceUrl = INFERENCE_URL;
if (!electronApi) {
this.inferenceUrl = JAN_SERVER_INFERENCE_URL;
}
console.debug("Inference url: ", this.inferenceUrl);
if (!(await fs.existsSync(JanInferenceNitroExtension._settingsDir))) if (!(await fs.existsSync(JanInferenceNitroExtension._settingsDir)))
await fs.mkdirSync(JanInferenceNitroExtension._settingsDir); await fs.mkdirSync(JanInferenceNitroExtension._settingsDir);
this.writeDefaultEngineSettings(); this.writeDefaultEngineSettings();
// Events subscription // Events subscription
events.on(MessageEvent.OnMessageSent, (data: MessageRequest) => events.on(MessageEvent.OnMessageSent, (data: MessageRequest) =>
this.onMessageRequest(data), this.onMessageRequest(data)
); );
events.on(ModelEvent.OnModelInit, (model: Model) => events.on(ModelEvent.OnModelInit, (model: Model) =>
this.onModelInit(model), this.onModelInit(model)
); );
events.on(ModelEvent.OnModelStop, (model: Model) => events.on(ModelEvent.OnModelStop, (model: Model) =>
this.onModelStop(model), this.onModelStop(model)
); );
events.on(InferenceEvent.OnInferenceStopped, () => events.on(InferenceEvent.OnInferenceStopped, () =>
this.onInferenceStopped(), this.onInferenceStopped()
); );
// Attempt to fetch nvidia info // Attempt to fetch nvidia info
@ -120,7 +134,7 @@ export default class JanInferenceNitroExtension extends InferenceExtension {
} else { } else {
await fs.writeFileSync( await fs.writeFileSync(
engineFile, engineFile,
JSON.stringify(this._engineSettings, null, 2), JSON.stringify(this._engineSettings, null, 2)
); );
} }
} catch (err) { } catch (err) {
@ -133,6 +147,7 @@ export default class JanInferenceNitroExtension extends InferenceExtension {
const modelFullPath = await joinPath(["models", model.id]); const modelFullPath = await joinPath(["models", model.id]);
this._currentModel = model;
const nitroInitResult = await executeOnMain(NODE, "runModel", { const nitroInitResult = await executeOnMain(NODE, "runModel", {
modelFullPath, modelFullPath,
model, model,
@ -143,12 +158,11 @@ export default class JanInferenceNitroExtension extends InferenceExtension {
return; return;
} }
this._currentModel = model;
events.emit(ModelEvent.OnModelReady, model); events.emit(ModelEvent.OnModelReady, model);
this.getNitroProcesHealthIntervalId = setInterval( this.getNitroProcesHealthIntervalId = setInterval(
() => this.periodicallyGetNitroHealth(), () => this.periodicallyGetNitroHealth(),
JanInferenceNitroExtension._intervalHealthCheck, JanInferenceNitroExtension._intervalHealthCheck
); );
} }
@ -205,7 +219,11 @@ export default class JanInferenceNitroExtension extends InferenceExtension {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
if (!this._currentModel) return Promise.reject("No model loaded"); if (!this._currentModel) return Promise.reject("No model loaded");
requestInference(data.messages ?? [], this._currentModel).subscribe({ requestInference(
this.inferenceUrl,
data.messages ?? [],
this._currentModel
).subscribe({
next: (_content: any) => {}, next: (_content: any) => {},
complete: async () => { complete: async () => {
resolve(message); resolve(message);
@ -225,6 +243,9 @@ export default class JanInferenceNitroExtension extends InferenceExtension {
*/ */
private async onMessageRequest(data: MessageRequest) { private async onMessageRequest(data: MessageRequest) {
if (data.model?.engine !== InferenceEngine.nitro || !this._currentModel) { if (data.model?.engine !== InferenceEngine.nitro || !this._currentModel) {
console.log(
`Model is not nitro or no model loaded ${data.model?.engine} ${this._currentModel}`
);
return; return;
} }
@ -250,7 +271,12 @@ export default class JanInferenceNitroExtension extends InferenceExtension {
...(this._currentModel || {}), ...(this._currentModel || {}),
...(data.model || {}), ...(data.model || {}),
}; };
requestInference(data.messages ?? [], model, this.controller).subscribe({ requestInference(
this.inferenceUrl,
data.messages ?? [],
model,
this.controller
).subscribe({
next: (content: any) => { next: (content: any) => {
const messageContent: ThreadContent = { const messageContent: ThreadContent = {
type: ContentType.Text, type: ContentType.Text,

View File

@ -3,11 +3,19 @@ import path from "path";
import { ChildProcessWithoutNullStreams, spawn } from "child_process"; import { ChildProcessWithoutNullStreams, spawn } from "child_process";
import tcpPortUsed from "tcp-port-used"; import tcpPortUsed from "tcp-port-used";
import fetchRT from "fetch-retry"; import fetchRT from "fetch-retry";
import { log, getJanDataFolderPath } from "@janhq/core/node"; import {
log,
getJanDataFolderPath,
getSystemResourceInfo,
} from "@janhq/core/node";
import { getNitroProcessInfo, updateNvidiaInfo } from "./nvidia"; import { getNitroProcessInfo, updateNvidiaInfo } from "./nvidia";
import { Model, InferenceEngine, ModelSettingParams } from "@janhq/core"; import {
Model,
InferenceEngine,
ModelSettingParams,
PromptTemplate,
} from "@janhq/core";
import { executableNitroFile } from "./execute"; import { executableNitroFile } from "./execute";
import { physicalCpuCount } from "./utils";
// Polyfill fetch with retry // Polyfill fetch with retry
const fetchRetry = fetchRT(fetch); const fetchRetry = fetchRT(fetch);
@ -19,25 +27,6 @@ interface ModelInitOptions {
modelFullPath: string; modelFullPath: string;
model: Model; model: Model;
} }
/**
* The response object of Prompt Template parsing.
*/
interface PromptTemplate {
system_prompt?: string;
ai_prompt?: string;
user_prompt?: string;
error?: string;
}
/**
* Model setting args for Nitro model load.
*/
interface ModelSettingArgs extends ModelSettingParams {
llama_model_path: string;
cpu_threads: number;
}
// The PORT to use for the Nitro subprocess // The PORT to use for the Nitro subprocess
const PORT = 3928; const PORT = 3928;
// The HOST address to use for the Nitro subprocess // The HOST address to use for the Nitro subprocess
@ -60,7 +49,7 @@ let subprocess: ChildProcessWithoutNullStreams | undefined = undefined;
// The current model file url // The current model file url
let currentModelFile: string = ""; let currentModelFile: string = "";
// The current model settings // The current model settings
let currentSettings: ModelSettingArgs | undefined = undefined; let currentSettings: ModelSettingParams | undefined = undefined;
/** /**
* Stops a Nitro subprocess. * Stops a Nitro subprocess.
@ -106,7 +95,7 @@ async function runModel(
if (wrapper.model.engine !== InferenceEngine.nitro) { if (wrapper.model.engine !== InferenceEngine.nitro) {
return Promise.reject("Not a nitro model"); return Promise.reject("Not a nitro model");
} else { } else {
const nitroResourceProbe = await getResourcesInfo(); const nitroResourceProbe = await getSystemResourceInfo();
// Convert settings.prompt_template to system_prompt, user_prompt, ai_prompt // Convert settings.prompt_template to system_prompt, user_prompt, ai_prompt
if (wrapper.model.settings.prompt_template) { if (wrapper.model.settings.prompt_template) {
const promptTemplate = wrapper.model.settings.prompt_template; const promptTemplate = wrapper.model.settings.prompt_template;
@ -220,6 +209,9 @@ function promptTemplateConverter(promptTemplate: string): PromptTemplate {
* @returns A Promise that resolves when the model is loaded successfully, or rejects with an error message if the model is not found or fails to load. * @returns A Promise that resolves when the model is loaded successfully, or rejects with an error message if the model is not found or fails to load.
*/ */
function loadLLMModel(settings: any): Promise<Response> { function loadLLMModel(settings: any): Promise<Response> {
if (!settings?.ngl) {
settings.ngl = 100;
}
log(`[NITRO]::Debug: Loading model with params ${JSON.stringify(settings)}`); log(`[NITRO]::Debug: Loading model with params ${JSON.stringify(settings)}`);
return fetchRetry(NITRO_HTTP_LOAD_MODEL_URL, { return fetchRetry(NITRO_HTTP_LOAD_MODEL_URL, {
method: "POST", method: "POST",
@ -240,7 +232,7 @@ function loadLLMModel(settings: any): Promise<Response> {
}) })
.catch((err) => { .catch((err) => {
log(`[NITRO]::Error: Load model failed with error ${err}`); log(`[NITRO]::Error: Load model failed with error ${err}`);
return Promise.reject(); return Promise.reject(err);
}); });
} }
@ -262,9 +254,9 @@ async function validateModelStatus(): Promise<void> {
retryDelay: 500, retryDelay: 500,
}).then(async (res: Response) => { }).then(async (res: Response) => {
log( log(
`[NITRO]::Debug: Validate model state success with response ${JSON.stringify( `[NITRO]::Debug: Validate model state with response ${JSON.stringify(
res, res.status
)}`, )}`
); );
// If the response is OK, check model_loaded status. // If the response is OK, check model_loaded status.
if (res.ok) { if (res.ok) {
@ -272,9 +264,19 @@ async function validateModelStatus(): Promise<void> {
// If the model is loaded, return an empty object. // If the model is loaded, return an empty object.
// Otherwise, return an object with an error message. // Otherwise, return an object with an error message.
if (body.model_loaded) { if (body.model_loaded) {
log(
`[NITRO]::Debug: Validate model state success with response ${JSON.stringify(
body
)}`
);
return Promise.resolve(); return Promise.resolve();
} }
} }
log(
`[NITRO]::Debug: Validate model state failed with response ${JSON.stringify(
res.statusText
)}`
);
return Promise.reject("Validate model status failed"); return Promise.reject("Validate model status failed");
}); });
} }
@ -351,22 +353,6 @@ function spawnNitroProcess(): Promise<any> {
}); });
} }
/**
* Get the system resources information
* TODO: Move to Core so that it can be reused
*/
function getResourcesInfo(): Promise<ResourcesInfo> {
return new Promise(async (resolve) => {
const cpu = await physicalCpuCount();
log(`[NITRO]::CPU informations - ${cpu}`);
const response: ResourcesInfo = {
numCpuPhysicalCore: cpu,
memAvailable: 0,
};
resolve(response);
});
}
/** /**
* Every module should have a dispose function * Every module should have a dispose function
* This will be called when the extension is unloaded and should clean up any resources * This will be called when the extension is unloaded and should clean up any resources

View File

@ -1,56 +0,0 @@
import os from "os";
import childProcess from "child_process";
function exec(command: string): Promise<string> {
return new Promise((resolve, reject) => {
childProcess.exec(command, { encoding: "utf8" }, (error, stdout) => {
if (error) {
reject(error);
} else {
resolve(stdout);
}
});
});
}
let amount: number;
const platform = os.platform();
export async function physicalCpuCount(): Promise<number> {
return new Promise((resolve, reject) => {
if (platform === "linux") {
exec('lscpu -p | egrep -v "^#" | sort -u -t, -k 2,4 | wc -l')
.then((output) => {
amount = parseInt(output.trim(), 10);
resolve(amount);
})
.catch(reject);
} else if (platform === "darwin") {
exec("sysctl -n hw.physicalcpu_max")
.then((output) => {
amount = parseInt(output.trim(), 10);
resolve(amount);
})
.catch(reject);
} else if (platform === "win32") {
exec("WMIC CPU Get NumberOfCores")
.then((output) => {
amount = output
.split(os.EOL)
.map((line: string) => parseInt(line))
.filter((value: number) => !isNaN(value))
.reduce((sum: number, number: number) => sum + number, 1);
resolve(amount);
})
.catch(reject);
} else {
const cores = os.cpus().filter((cpu: any, index: number) => {
const hasHyperthreading = cpu.model.includes("Intel");
const isOdd = index % 2 === 1;
return !hasHyperthreading || isOdd;
});
amount = cores.length;
resolve(amount);
}
});
}

View File

@ -20,6 +20,8 @@ import {
MessageEvent, MessageEvent,
ModelEvent, ModelEvent,
InferenceEvent, InferenceEvent,
AppConfigurationEventName,
joinPath,
} from "@janhq/core"; } from "@janhq/core";
import { requestInference } from "./helpers/sse"; import { requestInference } from "./helpers/sse";
import { ulid } from "ulid"; import { ulid } from "ulid";
@ -31,7 +33,7 @@ import { join } from "path";
* It also subscribes to events emitted by the @janhq/core package and handles new message requests. * It also subscribes to events emitted by the @janhq/core package and handles new message requests.
*/ */
export default class JanInferenceOpenAIExtension extends BaseExtension { export default class JanInferenceOpenAIExtension extends BaseExtension {
private static readonly _homeDir = "file://engines"; private static readonly _engineDir = "file://engines";
private static readonly _engineMetadataFileName = "openai.json"; private static readonly _engineMetadataFileName = "openai.json";
private static _currentModel: OpenAIModel; private static _currentModel: OpenAIModel;
@ -48,9 +50,9 @@ export default class JanInferenceOpenAIExtension extends BaseExtension {
* Subscribes to events emitted by the @janhq/core package. * Subscribes to events emitted by the @janhq/core package.
*/ */
async onLoad() { async onLoad() {
if (!(await fs.existsSync(JanInferenceOpenAIExtension._homeDir))) { if (!(await fs.existsSync(JanInferenceOpenAIExtension._engineDir))) {
await fs await fs
.mkdirSync(JanInferenceOpenAIExtension._homeDir) .mkdirSync(JanInferenceOpenAIExtension._engineDir)
.catch((err) => console.debug(err)); .catch((err) => console.debug(err));
} }
@ -71,6 +73,20 @@ export default class JanInferenceOpenAIExtension extends BaseExtension {
events.on(InferenceEvent.OnInferenceStopped, () => { events.on(InferenceEvent.OnInferenceStopped, () => {
JanInferenceOpenAIExtension.handleInferenceStopped(this); JanInferenceOpenAIExtension.handleInferenceStopped(this);
}); });
const settingsFilePath = await joinPath([
JanInferenceOpenAIExtension._engineDir,
JanInferenceOpenAIExtension._engineMetadataFileName,
]);
events.on(
AppConfigurationEventName.OnConfigurationUpdate,
(settingsKey: string) => {
// Update settings on changes
if (settingsKey === settingsFilePath)
JanInferenceOpenAIExtension.writeDefaultEngineSettings();
},
);
} }
/** /**
@ -81,7 +97,7 @@ export default class JanInferenceOpenAIExtension extends BaseExtension {
static async writeDefaultEngineSettings() { static async writeDefaultEngineSettings() {
try { try {
const engineFile = join( const engineFile = join(
JanInferenceOpenAIExtension._homeDir, JanInferenceOpenAIExtension._engineDir,
JanInferenceOpenAIExtension._engineMetadataFileName, JanInferenceOpenAIExtension._engineMetadataFileName,
); );
if (await fs.existsSync(engineFile)) { if (await fs.existsSync(engineFile)) {
@ -182,7 +198,7 @@ export default class JanInferenceOpenAIExtension extends BaseExtension {
}, },
error: async (err) => { error: async (err) => {
if (instance.isCancelled || message.content.length > 0) { if (instance.isCancelled || message.content.length > 0) {
message.status = MessageStatus.Error; message.status = MessageStatus.Stopped;
events.emit(MessageEvent.OnMessageUpdate, message); events.emit(MessageEvent.OnMessageUpdate, message);
return; return;
} }
@ -194,7 +210,7 @@ export default class JanInferenceOpenAIExtension extends BaseExtension {
}, },
}; };
message.content = [messageContent]; message.content = [messageContent];
message.status = MessageStatus.Ready; message.status = MessageStatus.Error;
events.emit(MessageEvent.OnMessageUpdate, message); events.emit(MessageEvent.OnMessageUpdate, message);
}, },
}); });

View File

@ -13,7 +13,7 @@
"format": "gguf", "format": "gguf",
"settings": { "settings": {
"ctx_len": 4096, "ctx_len": 4096,
"prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant\n", "prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant",
"llama_model_path": "dolphin-2_6-phi-2.Q8_0.gguf" "llama_model_path": "dolphin-2_6-phi-2.Q8_0.gguf"
}, },
"parameters": { "parameters": {

View File

@ -26,6 +26,8 @@
"dotenv": "^16.3.1", "dotenv": "^16.3.1",
"fastify": "^4.24.3", "fastify": "^4.24.3",
"request": "^2.88.2", "request": "^2.88.2",
"fetch-retry": "^5.0.6",
"tcp-port-used": "^1.0.2",
"request-progress": "^3.0.0" "request-progress": "^3.0.0"
}, },
"devDependencies": { "devDependencies": {
@ -35,6 +37,7 @@
"@typescript-eslint/parser": "^6.7.3", "@typescript-eslint/parser": "^6.7.3",
"eslint-plugin-react": "^7.33.2", "eslint-plugin-react": "^7.33.2",
"run-script-os": "^1.1.6", "run-script-os": "^1.1.6",
"@types/tcp-port-used": "^1.0.4",
"typescript": "^5.2.2" "typescript": "^5.2.2"
} }
} }

View File

@ -18,6 +18,7 @@
}, },
"dependencies": { "dependencies": {
"@radix-ui/react-avatar": "^1.0.4", "@radix-ui/react-avatar": "^1.0.4",
"@radix-ui/react-checkbox": "^1.0.4",
"@radix-ui/react-context": "^1.0.1", "@radix-ui/react-context": "^1.0.1",
"@radix-ui/react-dialog": "^1.0.5", "@radix-ui/react-dialog": "^1.0.5",
"@radix-ui/react-icons": "^1.3.0", "@radix-ui/react-icons": "^1.3.0",

View File

@ -9,7 +9,7 @@
} }
&-secondary-blue { &-secondary-blue {
@apply bg-blue-200 text-blue-600 hover:bg-blue-500/50; @apply bg-blue-200 text-blue-600 hover:bg-blue-300/50 dark:hover:bg-blue-200/80;
} }
&-danger { &-danger {
@ -17,7 +17,7 @@
} }
&-secondary-danger { &-secondary-danger {
@apply bg-red-200 text-red-600 hover:bg-red-500/50; @apply bg-red-200 text-red-600 hover:bg-red-300/50 dark:hover:bg-red-200/80;
} }
&-outline { &-outline {
@ -67,14 +67,18 @@
[type='submit'] { [type='submit'] {
&.btn-primary { &.btn-primary {
@apply bg-primary hover:bg-primary/90; @apply bg-primary hover:bg-primary/90;
@apply disabled:pointer-events-none disabled:bg-zinc-100 disabled:text-zinc-400;
} }
&.btn-secondary { &.btn-secondary {
@apply bg-secondary hover:bg-secondary/80; @apply bg-secondary hover:bg-secondary/80;
@apply disabled:pointer-events-none disabled:bg-zinc-100 disabled:text-zinc-400;
} }
&.btn-secondary-blue { &.btn-secondary-blue {
@apply bg-blue-200 text-blue-900 hover:bg-blue-200/80; @apply bg-blue-200 text-blue-900 hover:bg-blue-200/80;
@apply disabled:pointer-events-none disabled:bg-zinc-100 disabled:text-zinc-400;
} }
&.btn-danger { &.btn-danger {
@apply bg-danger hover:bg-danger/90; @apply bg-danger hover:bg-danger/90;
@apply disabled:pointer-events-none disabled:bg-zinc-100 disabled:text-zinc-400;
} }
} }

View File

@ -0,0 +1,29 @@
'use client'
import * as React from 'react'
import * as CheckboxPrimitive from '@radix-ui/react-checkbox'
import { CheckIcon } from '@radix-ui/react-icons'
import { twMerge } from 'tailwind-merge'
const Checkbox = React.forwardRef<
React.ElementRef<typeof CheckboxPrimitive.Root>,
React.ComponentPropsWithoutRef<typeof CheckboxPrimitive.Root>
>(({ className, ...props }, ref) => (
<CheckboxPrimitive.Root
ref={ref}
className={twMerge('checkbox', className)}
{...props}
>
<CheckboxPrimitive.Indicator
className={twMerge(
'flex flex-shrink-0 items-center justify-center text-current'
)}
>
<CheckIcon className="checkbox--icon" />
</CheckboxPrimitive.Indicator>
</CheckboxPrimitive.Root>
))
Checkbox.displayName = CheckboxPrimitive.Root.displayName
export { Checkbox }

View File

@ -0,0 +1,7 @@
.checkbox {
@apply border-border data-[state=checked]:bg-primary h-5 w-5 flex-shrink-0 rounded-md border data-[state=checked]:text-white;
&--icon {
@apply h-4 w-4;
}
}

View File

@ -12,3 +12,4 @@ export * from './command'
export * from './textarea' export * from './textarea'
export * from './select' export * from './select'
export * from './slider' export * from './slider'
export * from './checkbox'

View File

@ -1,6 +1,6 @@
.input { .input {
@apply border-border placeholder:text-muted-foreground flex h-9 w-full rounded-lg border bg-transparent px-3 py-1 transition-colors; @apply border-border placeholder:text-muted-foreground flex h-9 w-full rounded-lg border bg-transparent px-3 py-1 transition-colors;
@apply disabled:cursor-not-allowed disabled:bg-zinc-100; @apply disabled:cursor-not-allowed disabled:bg-zinc-100 disabled:dark:bg-zinc-800 disabled:dark:text-zinc-600;
@apply focus-within:outline-none focus-visible:outline-0 focus-visible:ring-2 focus-visible:ring-blue-500 focus-visible:ring-offset-1; @apply focus-within:outline-none focus-visible:outline-0 focus-visible:ring-2 focus-visible:ring-blue-500 focus-visible:ring-offset-1;
@apply file:border-0 file:bg-transparent file:font-medium; @apply file:border-0 file:bg-transparent file:font-medium;
} }

View File

@ -16,6 +16,7 @@
@import './textarea/styles.scss'; @import './textarea/styles.scss';
@import './select/styles.scss'; @import './select/styles.scss';
@import './slider/styles.scss'; @import './slider/styles.scss';
@import './checkbox/styles.scss';
.animate-spin { .animate-spin {
animation: spin 1s linear infinite; animation: spin 1s linear infinite;

View File

@ -1,5 +1,6 @@
.select { .select {
@apply placeholder:text-muted-foreground border-border flex h-9 w-full items-center justify-between whitespace-nowrap rounded-md border bg-transparent px-3 py-2 text-sm shadow-sm disabled:cursor-not-allowed disabled:opacity-50 [&>span]:line-clamp-1; @apply placeholder:text-muted-foreground border-border flex h-9 w-full items-center justify-between whitespace-nowrap rounded-md border bg-transparent px-3 py-2 text-sm shadow-sm disabled:cursor-not-allowed [&>span]:line-clamp-1;
@apply disabled:cursor-not-allowed disabled:bg-zinc-100 disabled:dark:bg-zinc-800 disabled:dark:text-zinc-600;
@apply focus-within:outline-none focus-visible:outline-0 focus-visible:ring-2 focus-visible:ring-blue-500 focus-visible:ring-offset-1; @apply focus-within:outline-none focus-visible:outline-0 focus-visible:ring-2 focus-visible:ring-blue-500 focus-visible:ring-offset-1;
&-caret { &-caret {

View File

@ -13,6 +13,8 @@ import { useClickOutside } from '@/hooks/useClickOutside'
import { usePath } from '@/hooks/usePath' import { usePath } from '@/hooks/usePath'
import { openFileTitle } from '@/utils/titleUtils'
import { activeThreadAtom } from '@/helpers/atoms/Thread.atom' import { activeThreadAtom } from '@/helpers/atoms/Thread.atom'
interface Props { interface Props {
@ -38,13 +40,6 @@ export default function CardSidebar({
useClickOutside(() => setMore(false), null, [menu, toggle]) useClickOutside(() => setMore(false), null, [menu, toggle])
let openFolderTitle: string = 'Open Containing Folder'
if (isMac) {
openFolderTitle = 'Show in Finder'
} else if (isWindows) {
openFolderTitle = 'Show in File Explorer'
}
return ( return (
<div <div
className={twMerge( className={twMerge(
@ -118,7 +113,7 @@ export default function CardSidebar({
{title === 'Model' ? ( {title === 'Model' ? (
<div className="flex flex-col"> <div className="flex flex-col">
<span className="font-medium text-black dark:text-muted-foreground"> <span className="font-medium text-black dark:text-muted-foreground">
{openFolderTitle} {openFileTitle()}
</span> </span>
<span className="mt-1 text-muted-foreground"> <span className="mt-1 text-muted-foreground">
Opens thread.json. Changes affect this thread only. Opens thread.json. Changes affect this thread only.
@ -126,7 +121,7 @@ export default function CardSidebar({
</div> </div>
) : ( ) : (
<span className="text-bold text-black dark:text-muted-foreground"> <span className="text-bold text-black dark:text-muted-foreground">
Show in Finder {openFileTitle()}
</span> </span>
)} )}
</> </>

View File

@ -26,6 +26,8 @@ import { useMainViewState } from '@/hooks/useMainViewState'
import useRecommendedModel from '@/hooks/useRecommendedModel' import useRecommendedModel from '@/hooks/useRecommendedModel'
import useUpdateModelParameters from '@/hooks/useUpdateModelParameters'
import { toGibibytes } from '@/utils/converter' import { toGibibytes } from '@/utils/converter'
import ModelLabel from '../ModelLabel' import ModelLabel from '../ModelLabel'
@ -34,68 +36,40 @@ import OpenAiKeyInput from '../OpenAiKeyInput'
import { serverEnabledAtom } from '@/helpers/atoms/LocalServer.atom' import { serverEnabledAtom } from '@/helpers/atoms/LocalServer.atom'
import { import {
ModelParams,
activeThreadAtom, activeThreadAtom,
getActiveThreadIdAtom,
setThreadModelParamsAtom, setThreadModelParamsAtom,
threadStatesAtom,
} from '@/helpers/atoms/Thread.atom' } from '@/helpers/atoms/Thread.atom'
export const selectedModelAtom = atom<Model | undefined>(undefined) export const selectedModelAtom = atom<Model | undefined>(undefined)
export default function DropdownListSidebar() { // TODO: Move all of the unscoped logics outside of the component
const activeThreadId = useAtomValue(getActiveThreadIdAtom) const DropdownListSidebar = ({
strictedThread = true,
}: {
strictedThread?: boolean
}) => {
const activeThread = useAtomValue(activeThreadAtom) const activeThread = useAtomValue(activeThreadAtom)
const threadStates = useAtomValue(threadStatesAtom)
const [selectedModel, setSelectedModel] = useAtom(selectedModelAtom) const [selectedModel, setSelectedModel] = useAtom(selectedModelAtom)
const setThreadModelParams = useSetAtom(setThreadModelParamsAtom) const setThreadModelParams = useSetAtom(setThreadModelParamsAtom)
const { activeModel, stateModel } = useActiveModel()
const { stateModel } = useActiveModel()
const [serverEnabled, setServerEnabled] = useAtom(serverEnabledAtom) const [serverEnabled, setServerEnabled] = useAtom(serverEnabledAtom)
const { setMainViewState } = useMainViewState() const { setMainViewState } = useMainViewState()
const [loader, setLoader] = useState(0)
const { recommendedModel, downloadedModels } = useRecommendedModel() const { recommendedModel, downloadedModels } = useRecommendedModel()
const { updateModelParameter } = useUpdateModelParameters()
/**
* Default value for max_tokens and ctx_len
* Its to avoid OOM issue since a model can set a big number for these settings
*/
const defaultValue = (value?: number) => {
if (value && value < 4096) return value
return 4096
}
useEffect(() => { useEffect(() => {
setSelectedModel(selectedModel || activeModel || recommendedModel) if (!activeThread) return
if (activeThread) { let model = downloadedModels.find(
const finishInit = threadStates[activeThread.id].isFinishInit ?? true (model) => model.id === activeThread.assistants[0].model.id
if (finishInit) return )
const modelParams: ModelParams = { if (!model) {
...recommendedModel?.parameters, model = recommendedModel
...recommendedModel?.settings,
/**
* This is to set default value for these settings instead of maximum value
* Should only apply when model.json has these settings
*/
...(recommendedModel?.parameters.max_tokens && {
max_tokens: defaultValue(recommendedModel?.parameters.max_tokens),
}),
...(recommendedModel?.settings.ctx_len && {
ctx_len: defaultValue(recommendedModel?.settings.ctx_len),
}),
}
setThreadModelParams(activeThread.id, modelParams)
} }
// eslint-disable-next-line react-hooks/exhaustive-deps setSelectedModel(model)
}, [ }, [recommendedModel, activeThread, downloadedModels, setSelectedModel])
recommendedModel,
activeThread,
setSelectedModel,
setThreadModelParams,
threadStates,
])
const [loader, setLoader] = useState(0)
// This is fake loader please fix this when we have realtime percentage when load model // This is fake loader please fix this when we have realtime percentage when load model
useEffect(() => { useEffect(() => {
@ -132,25 +106,35 @@ export default function DropdownListSidebar() {
setServerEnabled(false) setServerEnabled(false)
} }
if (activeThreadId) { if (activeThread) {
const modelParams = { const modelParams = {
...model?.parameters, ...model?.parameters,
...model?.settings, ...model?.settings,
} }
setThreadModelParams(activeThreadId, modelParams) // Update model paramter to the thread state
setThreadModelParams(activeThread.id, modelParams)
// Update model parameter to the thread file
if (model)
updateModelParameter(activeThread.id, {
params: modelParams,
modelId: model.id,
engine: model.engine,
})
} }
}, },
// eslint-disable-next-line react-hooks/exhaustive-deps
[ [
downloadedModels, downloadedModels,
serverEnabled, serverEnabled,
activeThreadId, activeThread,
activeModel, setSelectedModel,
setServerEnabled,
setThreadModelParams, setThreadModelParams,
updateModelParameter,
] ]
) )
if (!activeThread) { if (strictedThread && !activeThread) {
return null return null
} }
@ -236,10 +220,9 @@ export default function DropdownListSidebar() {
</Select> </Select>
</div> </div>
<OpenAiKeyInput <OpenAiKeyInput />
selectedModel={selectedModel}
serverEnabled={serverEnabled}
/>
</> </>
) )
} }
export default DropdownListSidebar

View File

@ -27,6 +27,8 @@ import { usePath } from '@/hooks/usePath'
import { showRightSideBarAtom } from '@/screens/Chat/Sidebar' import { showRightSideBarAtom } from '@/screens/Chat/Sidebar'
import { openFileTitle } from '@/utils/titleUtils'
import { activeThreadAtom } from '@/helpers/atoms/Thread.atom' import { activeThreadAtom } from '@/helpers/atoms/Thread.atom'
const TopBar = () => { const TopBar = () => {
@ -120,13 +122,14 @@ const TopBar = () => {
</span> </span>
</div> </div>
</div> </div>
{activeThread && ( <div
<div className={twMerge(
className={twMerge( 'absolute right-0 h-full w-80',
'absolute right-0 h-full w-80', showing && 'border-l border-border'
showing && 'border-l border-border' )}
)} >
> {((activeThread && mainViewState === MainViewState.Thread) ||
mainViewState === MainViewState.LocalServer) && (
<div className="flex h-full w-52 items-center justify-between px-4"> <div className="flex h-full w-52 items-center justify-between px-4">
{showing && ( {showing && (
<div className="relative flex h-full items-center"> <div className="relative flex h-full items-center">
@ -161,7 +164,7 @@ const TopBar = () => {
className="text-muted-foreground" className="text-muted-foreground"
/> />
<span className="font-medium text-black dark:text-muted-foreground"> <span className="font-medium text-black dark:text-muted-foreground">
Show in Finder {openFileTitle()}
</span> </span>
</div> </div>
<div <div
@ -206,7 +209,7 @@ const TopBar = () => {
/> />
<div className="flex flex-col"> <div className="flex flex-col">
<span className="font-medium text-black dark:text-muted-foreground"> <span className="font-medium text-black dark:text-muted-foreground">
Show in Finder {openFileTitle()}
</span> </span>
</div> </div>
</div> </div>
@ -227,8 +230,8 @@ const TopBar = () => {
/> />
</div> </div>
</div> </div>
</div> )}
)} </div>
</div> </div>
)} )}
<CommandSearch /> <CommandSearch />

View File

@ -12,7 +12,8 @@ import TopBar from '@/containers/Layout/TopBar'
import { MainViewState } from '@/constants/screens' import { MainViewState } from '@/constants/screens'
import { useMainViewState } from '@/hooks/useMainViewState' import { useMainViewState } from '@/hooks/useMainViewState'
import { SUCCESS_SET_NEW_DESTINATION } from '@/hooks/useVaultDirectory'
import { SUCCESS_SET_NEW_DESTINATION } from '@/screens/Settings/Advanced/DataFolder'
const BaseLayout = (props: PropsWithChildren) => { const BaseLayout = (props: PropsWithChildren) => {
const { children } = props const { children } = props

View File

@ -1,16 +1,19 @@
import React, { useEffect, useState } from 'react' import React, { useEffect, useState } from 'react'
import { InferenceEngine, Model } from '@janhq/core' import { InferenceEngine } from '@janhq/core'
import { Input } from '@janhq/uikit' import { Input } from '@janhq/uikit'
import { useAtomValue } from 'jotai'
import { useEngineSettings } from '@/hooks/useEngineSettings' import { useEngineSettings } from '@/hooks/useEngineSettings'
type Props = { import { selectedModelAtom } from '../DropdownListSidebar'
selectedModel?: Model
serverEnabled: boolean
}
const OpenAiKeyInput: React.FC<Props> = ({ selectedModel, serverEnabled }) => { import { serverEnabledAtom } from '@/helpers/atoms/LocalServer.atom'
const OpenAiKeyInput: React.FC = () => {
const selectedModel = useAtomValue(selectedModelAtom)
const serverEnabled = useAtomValue(serverEnabledAtom)
const [openAISettings, setOpenAISettings] = useState< const [openAISettings, setOpenAISettings] = useState<
{ api_key: string } | undefined { api_key: string } | undefined
>(undefined) >(undefined)
@ -20,8 +23,7 @@ const OpenAiKeyInput: React.FC<Props> = ({ selectedModel, serverEnabled }) => {
readOpenAISettings().then((settings) => { readOpenAISettings().then((settings) => {
setOpenAISettings(settings) setOpenAISettings(settings)
}) })
// eslint-disable-next-line react-hooks/exhaustive-deps }, [readOpenAISettings])
}, [])
if (!selectedModel || selectedModel.engine !== InferenceEngine.openai) { if (!selectedModel || selectedModel.engine !== InferenceEngine.openai) {
return null return null

View File

@ -13,20 +13,26 @@ import {
} from '@janhq/core' } from '@janhq/core'
import { useAtomValue, useSetAtom } from 'jotai' import { useAtomValue, useSetAtom } from 'jotai'
import { activeModelAtom, stateModelAtom } from '@/hooks/useActiveModel' import {
activeModelAtom,
loadModelErrorAtom,
stateModelAtom,
} from '@/hooks/useActiveModel'
import { useGetDownloadedModels } from '@/hooks/useGetDownloadedModels' import { useGetDownloadedModels } from '@/hooks/useGetDownloadedModels'
import { queuedMessageAtom } from '@/hooks/useSendChatMessage'
import { toaster } from '../Toast' import { toaster } from '../Toast'
import { extensionManager } from '@/extension' import { extensionManager } from '@/extension'
import { import {
addNewMessageAtom, addNewMessageAtom,
updateMessageAtom, updateMessageAtom,
generateResponseAtom,
} from '@/helpers/atoms/ChatMessage.atom' } from '@/helpers/atoms/ChatMessage.atom'
import { import {
updateThreadWaitingForResponseAtom, updateThreadWaitingForResponseAtom,
threadsAtom, threadsAtom,
isGeneratingResponseAtom,
} from '@/helpers/atoms/Thread.atom' } from '@/helpers/atoms/Thread.atom'
export default function EventHandler({ children }: { children: ReactNode }) { export default function EventHandler({ children }: { children: ReactNode }) {
@ -35,12 +41,14 @@ export default function EventHandler({ children }: { children: ReactNode }) {
const { downloadedModels } = useGetDownloadedModels() const { downloadedModels } = useGetDownloadedModels()
const setActiveModel = useSetAtom(activeModelAtom) const setActiveModel = useSetAtom(activeModelAtom)
const setStateModel = useSetAtom(stateModelAtom) const setStateModel = useSetAtom(stateModelAtom)
const setGenerateResponse = useSetAtom(generateResponseAtom) const setQueuedMessage = useSetAtom(queuedMessageAtom)
const setLoadModelError = useSetAtom(loadModelErrorAtom)
const updateThreadWaiting = useSetAtom(updateThreadWaitingForResponseAtom) const updateThreadWaiting = useSetAtom(updateThreadWaitingForResponseAtom)
const threads = useAtomValue(threadsAtom) const threads = useAtomValue(threadsAtom)
const modelsRef = useRef(downloadedModels) const modelsRef = useRef(downloadedModels)
const threadsRef = useRef(threads) const threadsRef = useRef(threads)
const setIsGeneratingResponse = useSetAtom(isGeneratingResponseAtom)
useEffect(() => { useEffect(() => {
threadsRef.current = threads threadsRef.current = threads
@ -52,7 +60,6 @@ export default function EventHandler({ children }: { children: ReactNode }) {
const onNewMessageResponse = useCallback( const onNewMessageResponse = useCallback(
(message: ThreadMessage) => { (message: ThreadMessage) => {
setGenerateResponse(false)
addNewMessage(message) addNewMessage(message)
}, },
[addNewMessage] [addNewMessage]
@ -64,6 +71,7 @@ export default function EventHandler({ children }: { children: ReactNode }) {
toaster({ toaster({
title: 'Success!', title: 'Success!',
description: `Model ${model.id} has been started.`, description: `Model ${model.id} has been started.`,
type: 'success',
}) })
setStateModel(() => ({ setStateModel(() => ({
state: 'stop', state: 'stop',
@ -85,18 +93,19 @@ export default function EventHandler({ children }: { children: ReactNode }) {
(res: any) => { (res: any) => {
const errorMessage = `${res.error}` const errorMessage = `${res.error}`
console.error('Failed to load model: ' + errorMessage) console.error('Failed to load model: ' + errorMessage)
setLoadModelError(errorMessage)
setStateModel(() => ({ setStateModel(() => ({
state: 'start', state: 'start',
loading: false, loading: false,
model: res.modelId, model: res.modelId,
})) }))
setQueuedMessage(false)
}, },
[setStateModel] [setStateModel, setQueuedMessage, setLoadModelError]
) )
const onMessageResponseUpdate = useCallback( const onMessageResponseUpdate = useCallback(
(message: ThreadMessage) => { (message: ThreadMessage) => {
setGenerateResponse(false)
updateMessage( updateMessage(
message.id, message.id,
message.thread_id, message.thread_id,
@ -104,11 +113,17 @@ export default function EventHandler({ children }: { children: ReactNode }) {
message.status message.status
) )
if (message.status === MessageStatus.Pending) { if (message.status === MessageStatus.Pending) {
if (message.content.length) {
updateThreadWaiting(message.thread_id, false)
setIsGeneratingResponse(false)
}
return return
} }
// Mark the thread as not waiting for response // Mark the thread as not waiting for response
updateThreadWaiting(message.thread_id, false) updateThreadWaiting(message.thread_id, false)
setIsGeneratingResponse(false)
const thread = threadsRef.current?.find((e) => e.id == message.thread_id) const thread = threadsRef.current?.find((e) => e.id == message.thread_id)
if (thread) { if (thread) {
const messageContent = message.content[0]?.text.value ?? '' const messageContent = message.content[0]?.text.value ?? ''

View File

@ -6,8 +6,6 @@ import { Toaster } from 'react-hot-toast'
import { TooltipProvider } from '@janhq/uikit' import { TooltipProvider } from '@janhq/uikit'
import { PostHogProvider } from 'posthog-js/react'
import GPUDriverPrompt from '@/containers/GPUDriverPromptModal' import GPUDriverPrompt from '@/containers/GPUDriverPromptModal'
import EventListenerWrapper from '@/containers/Providers/EventListener' import EventListenerWrapper from '@/containers/Providers/EventListener'
import JotaiWrapper from '@/containers/Providers/Jotai' import JotaiWrapper from '@/containers/Providers/Jotai'
@ -21,7 +19,7 @@ import {
setupBaseExtensions, setupBaseExtensions,
} from '@/services/extensionService' } from '@/services/extensionService'
import { instance } from '@/utils/posthog' import Umami from '@/utils/umami'
import KeyListener from './KeyListener' import KeyListener from './KeyListener'
@ -70,25 +68,22 @@ const Providers = (props: PropsWithChildren) => {
}, [setupCore]) }, [setupCore])
return ( return (
<PostHogProvider client={instance}> <JotaiWrapper>
<JotaiWrapper> <ThemeWrapper>
<ThemeWrapper> <Umami />
{setupCore && activated && ( {setupCore && activated && (
<KeyListener> <KeyListener>
<FeatureToggleWrapper> <FeatureToggleWrapper>
<EventListenerWrapper> <EventListenerWrapper>
<TooltipProvider delayDuration={0}> <TooltipProvider delayDuration={0}>{children}</TooltipProvider>
{children} {!isMac && <GPUDriverPrompt />}
</TooltipProvider> </EventListenerWrapper>
{!isMac && <GPUDriverPrompt />} <Toaster />
</EventListenerWrapper> </FeatureToggleWrapper>
<Toaster position="top-right" /> </KeyListener>
</FeatureToggleWrapper> )}
</KeyListener> </ThemeWrapper>
)} </JotaiWrapper>
</ThemeWrapper>
</JotaiWrapper>
</PostHogProvider>
) )
} }

View File

@ -6,7 +6,99 @@ import { twMerge } from 'tailwind-merge'
type Props = { type Props = {
title?: string title?: string
description?: string description?: string
type?: 'default' | 'error' | 'success' type?: 'default' | 'error' | 'success' | 'warning'
}
const ErrorIcon = () => {
return (
<svg
width="20"
height="20"
viewBox="0 0 20 20"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
fillRule="evenodd"
clipRule="evenodd"
d="M20 10C20 15.5228 15.5228 20 10 20H0.993697C0.110179 20 -0.332289 18.9229 0.292453 18.2929L2.2495 16.3195C0.843343 14.597 1.21409e-08 12.397 1.21409e-08 10C1.21409e-08 4.47715 4.47715 0 10 0C15.5228 0 20 4.47715 20 10ZM13.2071 6.79289C13.5976 7.18342 13.5976 7.81658 13.2071 8.20711L11.4142 10L13.2071 11.7929C13.5976 12.1834 13.5976 12.8166 13.2071 13.2071C12.8166 13.5976 12.1834 13.5976 11.7929 13.2071L10 11.4142L8.20711 13.2071C7.81658 13.5976 7.18342 13.5976 6.79289 13.2071C6.40237 12.8166 6.40237 12.1834 6.79289 11.7929L8.58579 10L6.79289 8.20711C6.40237 7.81658 6.40237 7.18342 6.79289 6.79289C7.18342 6.40237 7.81658 6.40237 8.20711 6.79289L10 8.58579L11.7929 6.79289C12.1834 6.40237 12.8166 6.40237 13.2071 6.79289Z"
fill="#EA2E4E"
/>
</svg>
)
}
const WarningIcon = () => {
return (
<svg
width="20"
height="20"
viewBox="0 0 20 20"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
fillRule="evenodd"
clipRule="evenodd"
d="M20 10C20 15.5228 15.5228 20 10 20H0.993697C0.110179 20 -0.332289 18.9229 0.292453 18.2929L2.2495 16.3195C0.843343 14.597 1.21409e-08 12.397 1.21409e-08 10C1.21409e-08 4.47715 4.47715 0 10 0C15.5228 0 20 4.47715 20 10ZM10.99 6C10.99 5.44772 10.5446 5 9.99502 5C9.44549 5 9 5.44772 9 6V10C9 10.5523 9.44549 11 9.99502 11C10.5446 11 10.99 10.5523 10.99 10V6ZM9.99502 13C9.44549 13 9 13.4477 9 14C9 14.5523 9.44549 15 9.99502 15H10.005C10.5545 15 11 14.5523 11 14C11 13.4477 10.5545 13 10.005 13H9.99502Z"
fill="#FACC15"
/>
</svg>
)
}
const SuccessIcon = () => {
return (
<svg
width="20"
height="20"
viewBox="0 0 20 20"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
fillRule="evenodd"
clipRule="evenodd"
d="M20 10C20 15.5228 15.5228 20 10 20H0.993697C0.110179 20 -0.332289 18.9229 0.292453 18.2929L2.2495 16.3195C0.843343 14.597 1.21409e-08 12.397 1.21409e-08 10C1.21409e-08 4.47715 4.47715 0 10 0C15.5228 0 20 4.47715 20 10ZM13.7071 8.70711C14.0976 8.31658 14.0976 7.68342 13.7071 7.29289C13.3166 6.90237 12.6834 6.90237 12.2929 7.29289L9 10.5858L7.70711 9.2929C7.31658 8.90237 6.68342 8.90237 6.29289 9.2929C5.90237 9.68342 5.90237 10.3166 6.29289 10.7071L8.29289 12.7071C8.48043 12.8946 8.73478 13 9 13C9.26522 13 9.51957 12.8946 9.70711 12.7071L13.7071 8.70711Z"
fill="#34D399"
/>
</svg>
)
}
const DefaultIcon = () => {
return (
<svg
width="20"
height="20"
viewBox="0 0 20 20"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
fillRule="evenodd"
clipRule="evenodd"
d="M10 20C15.5228 20 20 15.5228 20 10C20 4.47715 15.5228 0 10 0C4.47715 0 2.11188e-08 4.47715 2.11188e-08 10C2.11188e-08 12.397 0.843343 14.597 2.2495 16.3195L0.292453 18.2929C-0.332289 18.9229 0.110179 20 0.993697 20H10ZM5.5 8C5.5 7.44772 5.94772 7 6.5 7H13.5C14.0523 7 14.5 7.44772 14.5 8C14.5 8.55229 14.0523 9 13.5 9H6.5C5.94772 9 5.5 8.55229 5.5 8ZM6.5 11C5.94772 11 5.5 11.4477 5.5 12C5.5 12.5523 5.94772 13 6.5 13H9.5C10.0523 13 10.5 12.5523 10.5 12C10.5 11.4477 10.0523 11 9.5 11H6.5Z"
fill="#60A5FA"
/>
</svg>
)
}
const renderIcon = (type: string) => {
switch (type) {
case 'warning':
return <WarningIcon />
case 'error':
return <ErrorIcon />
case 'success':
return <SuccessIcon />
default:
return <DefaultIcon />
}
} }
export function toaster(props: Props) { export function toaster(props: Props) {
@ -16,37 +108,52 @@ export function toaster(props: Props) {
return ( return (
<div <div
className={twMerge( className={twMerge(
'unset-drag relative flex min-w-[200px] max-w-[350px] gap-x-4 rounded-lg border border-border bg-background px-4 py-3', 'unset-drag dark:bg-zinc-white relative flex animate-enter items-center gap-x-4 rounded-lg bg-foreground px-4 py-2 text-white dark:border dark:border-border',
t.visible ? 'animate-enter' : 'animate-leave', t.visible ? 'animate-enter' : 'animate-leave'
type === 'success' && 'bg-primary text-primary-foreground'
)} )}
> >
<div> <div className="flex items-start gap-x-3 dark:text-black">
<h1 <div className="mt-1">{renderIcon(type)}</div>
className={twMerge( <div className="pr-4">
'font-medium', <h1 className="font-bold">{title}</h1>
type === 'success' && 'font-medium text-primary-foreground' <p>{description}</p>
)} </div>
> <XIcon
{title} size={24}
</h1> className="absolute right-2 top-2 w-4 cursor-pointer dark:text-black"
<p onClick={() => toast.dismiss(t.id)}
className={twMerge( />
'mt-1 text-muted-foreground',
type === 'success' && 'text-primary-foreground/80'
)}
>
{description}
</p>
</div> </div>
<XIcon
size={24}
className="absolute right-2 top-2 w-4 cursor-pointer text-muted-foreground"
onClick={() => toast.dismiss(t.id)}
/>
</div> </div>
) )
}, },
{ id: 'toast', duration: 3000 } { id: 'toast', duration: 2000, position: 'top-right' }
)
}
export function snackbar(props: Props) {
const { description, type = 'default' } = props
return toast.custom(
(t) => {
return (
<div
className={twMerge(
'unset-drag dark:bg-zinc-white relative bottom-2 flex animate-enter items-center gap-x-4 rounded-lg bg-foreground px-4 py-2 text-white dark:border dark:border-border',
t.visible ? 'animate-enter' : 'animate-leave'
)}
>
<div className="flex items-start gap-x-3 dark:text-black">
<div>{renderIcon(type)}</div>
<p className="pr-4">{description}</p>
<XIcon
size={24}
className="absolute right-2 top-1/2 w-4 -translate-y-1/2 cursor-pointer dark:text-black"
onClick={() => toast.dismiss(t.id)}
/>
</div>
</div>
)
},
{ id: 'snackbar', duration: 2000, position: 'bottom-center' }
) )
} }

View File

@ -14,8 +14,6 @@ import {
/** /**
* Stores all chat messages for all threads * Stores all chat messages for all threads
*/ */
export const generateResponseAtom = atom<boolean>(false)
export const chatMessages = atom<Record<string, ThreadMessage[]>>({}) export const chatMessages = atom<Record<string, ThreadMessage[]>>({})
/** /**

View File

@ -2,5 +2,6 @@ import { atom } from 'jotai'
export const totalRamAtom = atom<number>(0) export const totalRamAtom = atom<number>(0)
export const usedRamAtom = atom<number>(0) export const usedRamAtom = atom<number>(0)
export const availableRamAtom = atom<number>(0)
export const cpuUsageAtom = atom<number>(0) export const cpuUsageAtom = atom<number>(0)

View File

@ -23,6 +23,7 @@ export const setActiveThreadIdAtom = atom(
export const waitingToSendMessage = atom<boolean | undefined>(undefined) export const waitingToSendMessage = atom<boolean | undefined>(undefined)
export const isGeneratingResponseAtom = atom<boolean | undefined>(undefined)
/** /**
* Stores all thread states for the current user * Stores all thread states for the current user
*/ */
@ -46,18 +47,6 @@ export const deleteThreadStateAtom = atom(
} }
) )
export const updateThreadInitSuccessAtom = atom(
null,
(get, set, threadId: string) => {
const currentState = { ...get(threadStatesAtom) }
currentState[threadId] = {
...currentState[threadId],
isFinishInit: true,
}
set(threadStatesAtom, currentState)
}
)
export const updateThreadWaitingForResponseAtom = atom( export const updateThreadWaitingForResponseAtom = atom(
null, null,
(get, set, threadId: string, waitingForResponse: boolean) => { (get, set, threadId: string, waitingForResponse: boolean) => {

View File

@ -1,5 +1,5 @@
import { events, Model, ModelEvent } from '@janhq/core' import { events, Model, ModelEvent } from '@janhq/core'
import { atom, useAtom, useAtomValue } from 'jotai' import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'
import { toaster } from '@/containers/Toast' import { toaster } from '@/containers/Toast'
@ -9,6 +9,7 @@ import { LAST_USED_MODEL_ID } from './useRecommendedModel'
import { activeThreadAtom } from '@/helpers/atoms/Thread.atom' import { activeThreadAtom } from '@/helpers/atoms/Thread.atom'
export const activeModelAtom = atom<Model | undefined>(undefined) export const activeModelAtom = atom<Model | undefined>(undefined)
export const loadModelErrorAtom = atom<string | undefined>(undefined)
export const stateModelAtom = atom({ export const stateModelAtom = atom({
state: 'start', state: 'start',
@ -21,6 +22,7 @@ export function useActiveModel() {
const activeThread = useAtomValue(activeThreadAtom) const activeThread = useAtomValue(activeThreadAtom)
const [stateModel, setStateModel] = useAtom(stateModelAtom) const [stateModel, setStateModel] = useAtom(stateModelAtom)
const { downloadedModels } = useGetDownloadedModels() const { downloadedModels } = useGetDownloadedModels()
const setLoadModelError = useSetAtom(loadModelErrorAtom)
const startModel = async (modelId: string) => { const startModel = async (modelId: string) => {
if ( if (
@ -31,6 +33,7 @@ export function useActiveModel() {
return return
} }
// TODO: incase we have multiple assistants, the configuration will be from assistant // TODO: incase we have multiple assistants, the configuration will be from assistant
setLoadModelError(undefined)
setActiveModel(undefined) setActiveModel(undefined)
@ -42,6 +45,7 @@ export function useActiveModel() {
toaster({ toaster({
title: `Model ${modelId} not found!`, title: `Model ${modelId} not found!`,
description: `Please download the model first.`, description: `Please download the model first.`,
type: 'warning',
}) })
setStateModel(() => ({ setStateModel(() => ({
state: 'start', state: 'start',

View File

@ -7,21 +7,23 @@ import {
ThreadState, ThreadState,
Model, Model,
} from '@janhq/core' } from '@janhq/core'
import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai' import { atom, useSetAtom } from 'jotai'
import { selectedModelAtom } from '@/containers/DropdownListSidebar'
import { fileUploadAtom } from '@/containers/Providers/Jotai' import { fileUploadAtom } from '@/containers/Providers/Jotai'
import { generateThreadId } from '@/utils/thread' import { generateThreadId } from '@/utils/thread'
import useDeleteThread from './useDeleteThread' import useRecommendedModel from './useRecommendedModel'
import useSetActiveThread from './useSetActiveThread'
import { extensionManager } from '@/extension' import { extensionManager } from '@/extension'
import { import {
threadsAtom, threadsAtom,
setActiveThreadIdAtom,
threadStatesAtom, threadStatesAtom,
updateThreadAtom, updateThreadAtom,
updateThreadInitSuccessAtom, setThreadModelParamsAtom,
} from '@/helpers/atoms/Thread.atom' } from '@/helpers/atoms/Thread.atom'
const createNewThreadAtom = atom(null, (get, set, newThread: Thread) => { const createNewThreadAtom = atom(null, (get, set, newThread: Thread) => {
@ -32,7 +34,6 @@ const createNewThreadAtom = atom(null, (get, set, newThread: Thread) => {
hasMore: false, hasMore: false,
waitingForResponse: false, waitingForResponse: false,
lastMessage: undefined, lastMessage: undefined,
isFinishInit: false,
} }
currentState[newThread.id] = threadState currentState[newThread.id] = threadState
set(threadStatesAtom, currentState) set(threadStatesAtom, currentState)
@ -43,46 +44,35 @@ const createNewThreadAtom = atom(null, (get, set, newThread: Thread) => {
}) })
export const useCreateNewThread = () => { export const useCreateNewThread = () => {
const threadStates = useAtomValue(threadStatesAtom)
const updateThreadFinishInit = useSetAtom(updateThreadInitSuccessAtom)
const createNewThread = useSetAtom(createNewThreadAtom) const createNewThread = useSetAtom(createNewThreadAtom)
const setActiveThreadId = useSetAtom(setActiveThreadIdAtom) const { setActiveThread } = useSetActiveThread()
const updateThread = useSetAtom(updateThreadAtom) const updateThread = useSetAtom(updateThreadAtom)
const [fileUpload, setFileUpload] = useAtom(fileUploadAtom) const setFileUpload = useSetAtom(fileUploadAtom)
const { deleteThread } = useDeleteThread() const setSelectedModel = useSetAtom(selectedModelAtom)
const setThreadModelParams = useSetAtom(setThreadModelParamsAtom)
const { recommendedModel, downloadedModels } = useRecommendedModel()
const requestCreateNewThread = async ( const requestCreateNewThread = async (
assistant: Assistant, assistant: Assistant,
model?: Model | undefined model?: Model | undefined
) => { ) => {
// loop through threads state and filter if there's any thread that is not finish init const defaultModel = model ?? recommendedModel ?? downloadedModels[0]
let unfinishedInitThreadId: string | undefined = undefined
for (const key in threadStates) {
const isFinishInit = threadStates[key].isFinishInit ?? true
if (!isFinishInit) {
unfinishedInitThreadId = key
break
}
}
if (unfinishedInitThreadId) {
await deleteThread(unfinishedInitThreadId)
}
const modelId = model ? model.id : '*'
const createdAt = Date.now() const createdAt = Date.now()
const assistantInfo: ThreadAssistantInfo = { const assistantInfo: ThreadAssistantInfo = {
assistant_id: assistant.id, assistant_id: assistant.id,
assistant_name: assistant.name, assistant_name: assistant.name,
tools: assistant.tools, tools: assistant.tools,
model: { model: {
id: modelId, id: defaultModel?.id ?? '*',
settings: {}, settings: defaultModel?.settings ?? {},
parameters: {}, parameters: defaultModel?.parameters ?? {},
engine: undefined, engine: defaultModel?.engine,
}, },
instructions: assistant.instructions, instructions: assistant.instructions,
} }
const threadId = generateThreadId(assistant.id) const threadId = generateThreadId(assistant.id)
const thread: Thread = { const thread: Thread = {
id: threadId, id: threadId,
@ -94,22 +84,27 @@ export const useCreateNewThread = () => {
} }
// add the new thread on top of the thread list to the state // add the new thread on top of the thread list to the state
//TODO: Why do we have thread list then thread states? Should combine them
createNewThread(thread) createNewThread(thread)
setActiveThreadId(thread.id)
setSelectedModel(defaultModel)
setThreadModelParams(thread.id, {
...defaultModel?.settings,
...defaultModel?.parameters,
})
// Delete the file upload state // Delete the file upload state
setFileUpload([]) setFileUpload([])
// Update thread metadata
await updateThreadMetadata(thread)
setActiveThread(thread)
} }
function updateThreadMetadata(thread: Thread) { async function updateThreadMetadata(thread: Thread) {
updateThread(thread) updateThread(thread)
const threadState = threadStates[thread.id]
const isFinishInit = threadState?.isFinishInit ?? true
if (!isFinishInit) {
updateThreadFinishInit(thread.id)
}
extensionManager await extensionManager
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational) .get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
?.saveThread(thread) ?.saveThread(thread)
} }

View File

@ -19,6 +19,7 @@ export default function useDeleteModel() {
toaster({ toaster({
title: 'Model Deletion Successful', title: 'Model Deletion Successful',
description: `The model ${model.id} has been successfully deleted.`, description: `The model ${model.id} has been successfully deleted.`,
type: 'success',
}) })
} }

View File

@ -21,7 +21,6 @@ import {
threadsAtom, threadsAtom,
setActiveThreadIdAtom, setActiveThreadIdAtom,
deleteThreadStateAtom, deleteThreadStateAtom,
threadStatesAtom,
updateThreadStateLastMessageAtom, updateThreadStateLastMessageAtom,
} from '@/helpers/atoms/Thread.atom' } from '@/helpers/atoms/Thread.atom'
@ -34,7 +33,6 @@ export default function useDeleteThread() {
const deleteMessages = useSetAtom(deleteChatMessagesAtom) const deleteMessages = useSetAtom(deleteChatMessagesAtom)
const cleanMessages = useSetAtom(cleanChatMessagesAtom) const cleanMessages = useSetAtom(cleanChatMessagesAtom)
const deleteThreadState = useSetAtom(deleteThreadStateAtom) const deleteThreadState = useSetAtom(deleteThreadStateAtom)
const threadStates = useAtomValue(threadStatesAtom)
const updateThreadLastMessage = useSetAtom(updateThreadStateLastMessageAtom) const updateThreadLastMessage = useSetAtom(updateThreadStateLastMessageAtom)
const cleanThread = async (threadId: string) => { const cleanThread = async (threadId: string) => {
@ -49,6 +47,14 @@ export default function useDeleteThread() {
threadId, threadId,
messages.filter((msg) => msg.role === ChatCompletionRole.System) messages.filter((msg) => msg.role === ChatCompletionRole.System)
) )
thread.metadata = {
...thread.metadata,
lastMessage: undefined,
}
await extensionManager
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
?.saveThread(thread)
updateThreadLastMessage(threadId, undefined) updateThreadLastMessage(threadId, undefined)
} }
} }
@ -66,21 +72,16 @@ export default function useDeleteThread() {
const availableThreads = threads.filter((c) => c.id !== threadId) const availableThreads = threads.filter((c) => c.id !== threadId)
setThreads(availableThreads) setThreads(availableThreads)
const deletingThreadState = threadStates[threadId]
const isFinishInit = deletingThreadState?.isFinishInit ?? true
// delete the thread state // delete the thread state
deleteThreadState(threadId) deleteThreadState(threadId)
if (isFinishInit) { deleteMessages(threadId)
deleteMessages(threadId) setCurrentPrompt('')
setCurrentPrompt('') toaster({
toaster({ title: 'Thread successfully deleted.',
title: 'Thread successfully deleted.', description: `Thread ${threadId} has been successfully deleted.`,
description: `Thread ${threadId} has been successfully deleted.`, type: 'success',
}) })
}
if (availableThreads.length > 0) { if (availableThreads.length > 0) {
setActiveThreadId(availableThreads[0].id) setActiveThreadId(availableThreads[0].id)
} else { } else {

View File

@ -26,6 +26,7 @@ const setDownloadStateSuccessAtom = atom(null, (get, set, modelId: string) => {
toaster({ toaster({
title: 'Download Completed', title: 'Download Completed',
description: `Download ${modelId} completed`, description: `Download ${modelId} completed`,
type: 'success',
}) })
}) })
@ -61,6 +62,7 @@ const setDownloadStateCancelledAtom = atom(
toaster({ toaster({
title: 'Cancel Download', title: 'Cancel Download',
description: `Model ${modelId} cancel download`, description: `Model ${modelId} cancel download`,
type: 'warning',
}) })
return return

View File

@ -1,7 +1,9 @@
import { fs, joinPath } from '@janhq/core' import { useCallback } from 'react'
import { fs, joinPath, events, AppConfigurationEventName } from '@janhq/core'
export const useEngineSettings = () => { export const useEngineSettings = () => {
const readOpenAISettings = async () => { const readOpenAISettings = useCallback(async () => {
if ( if (
!(await fs.existsSync(await joinPath(['file://engines', 'openai.json']))) !(await fs.existsSync(await joinPath(['file://engines', 'openai.json'])))
) )
@ -14,17 +16,24 @@ export const useEngineSettings = () => {
return typeof settings === 'object' ? settings : JSON.parse(settings) return typeof settings === 'object' ? settings : JSON.parse(settings)
} }
return {} return {}
} }, [])
const saveOpenAISettings = async ({ const saveOpenAISettings = async ({
apiKey, apiKey,
}: { }: {
apiKey: string | undefined apiKey: string | undefined
}) => { }) => {
const settings = await readOpenAISettings() const settings = await readOpenAISettings()
const settingFilePath = await joinPath(['file://engines', 'openai.json'])
settings.api_key = apiKey settings.api_key = apiKey
await fs.writeFileSync(
await joinPath(['file://engines', 'openai.json']), await fs.writeFileSync(settingFilePath, JSON.stringify(settings))
JSON.stringify(settings)
// Sec: Don't attach the settings data to the event
events.emit(
AppConfigurationEventName.OnConfigurationUpdate,
settingFilePath
) )
} }
return { readOpenAISettings, saveOpenAISettings } return { readOpenAISettings, saveOpenAISettings }

View File

@ -0,0 +1,59 @@
import { useEffect, useState } from 'react'
import { fs, AppConfiguration, joinPath, getUserHomePath } from '@janhq/core'
export default function useFactoryReset() {
const [defaultJanDataFolder, setdefaultJanDataFolder] = useState('')
useEffect(() => {
async function getDefaultJanDataFolder() {
const homePath = await getUserHomePath()
const defaultJanDataFolder = await joinPath([homePath, 'jan'])
setdefaultJanDataFolder(defaultJanDataFolder)
}
getDefaultJanDataFolder()
}, [])
const resetAll = async (keepCurrentFolder?: boolean) => {
// read the place of jan data folder
const appConfiguration: AppConfiguration | undefined =
await window.core?.api?.getAppConfigurations()
if (!appConfiguration) {
console.debug('Failed to get app configuration')
}
console.debug('appConfiguration: ', appConfiguration)
const janDataFolderPath = appConfiguration!.data_folder
if (defaultJanDataFolder === janDataFolderPath) {
console.debug('Jan data folder is already at user home')
} else {
// if jan data folder is not at user home, we update the app configuration to point to user home
if (!keepCurrentFolder) {
const configuration: AppConfiguration = {
data_folder: defaultJanDataFolder,
}
await window.core?.api?.updateAppConfiguration(configuration)
}
}
const modelPath = await joinPath([janDataFolderPath, 'models'])
const threadPath = await joinPath([janDataFolderPath, 'threads'])
console.debug(`Removing models at ${modelPath}`)
await fs.rmdirSync(modelPath, { recursive: true })
console.debug(`Removing threads at ${threadPath}`)
await fs.rmdirSync(threadPath, { recursive: true })
// reset the localStorage
localStorage.clear()
await window.core?.api?.relaunch()
}
return {
defaultJanDataFolder,
resetAll,
}
}

View File

@ -6,6 +6,7 @@ import { useSetAtom } from 'jotai'
import { extensionManager } from '@/extension/ExtensionManager' import { extensionManager } from '@/extension/ExtensionManager'
import { import {
availableRamAtom,
cpuUsageAtom, cpuUsageAtom,
totalRamAtom, totalRamAtom,
usedRamAtom, usedRamAtom,
@ -16,6 +17,7 @@ export default function useGetSystemResources() {
const [cpu, setCPU] = useState<number>(0) const [cpu, setCPU] = useState<number>(0)
const setTotalRam = useSetAtom(totalRamAtom) const setTotalRam = useSetAtom(totalRamAtom)
const setUsedRam = useSetAtom(usedRamAtom) const setUsedRam = useSetAtom(usedRamAtom)
const setAvailableRam = useSetAtom(availableRamAtom)
const setCpuUsage = useSetAtom(cpuUsageAtom) const setCpuUsage = useSetAtom(cpuUsageAtom)
const getSystemResources = async () => { const getSystemResources = async () => {
@ -40,6 +42,10 @@ export default function useGetSystemResources() {
setTotalRam(resourceInfor.mem.totalMemory) setTotalRam(resourceInfor.mem.totalMemory)
setRam(Math.round(ram * 100)) setRam(Math.round(ram * 100))
if (resourceInfor.mem.totalMemory && resourceInfor.mem.usedMemory)
setAvailableRam(
resourceInfor.mem.totalMemory - resourceInfor.mem.usedMemory
)
setCPU(Math.round(currentLoadInfor?.cpu?.usage ?? 0)) setCPU(Math.round(currentLoadInfor?.cpu?.usage ?? 0))
setCpuUsage(Math.round(currentLoadInfor?.cpu?.usage ?? 0)) setCpuUsage(Math.round(currentLoadInfor?.cpu?.usage ?? 0))
} }

View File

@ -3,28 +3,23 @@ import { useAtomValue } from 'jotai'
import { selectedModelAtom } from '@/containers/DropdownListSidebar' import { selectedModelAtom } from '@/containers/DropdownListSidebar'
import { activeThreadAtom, threadStatesAtom } from '@/helpers/atoms/Thread.atom' import { activeThreadAtom } from '@/helpers/atoms/Thread.atom'
export const usePath = () => { export const usePath = () => {
const activeThread = useAtomValue(activeThreadAtom) const activeThread = useAtomValue(activeThreadAtom)
const threadStates = useAtomValue(threadStatesAtom)
const selectedModel = useAtomValue(selectedModelAtom) const selectedModel = useAtomValue(selectedModelAtom)
const onReviewInFinder = async (type: string) => { const onReviewInFinder = async (type: string) => {
if (!activeThread) return // TODO: this logic should be refactored.
const activeThreadState = threadStates[activeThread.id] if (type !== 'Model' && !activeThread) return
if (!activeThreadState.isFinishInit) {
alert('Thread is not started yet')
return
}
const userSpace = await getJanDataFolderPath() const userSpace = await getJanDataFolderPath()
let filePath = undefined let filePath = undefined
const assistantId = activeThread.assistants[0]?.assistant_id const assistantId = activeThread?.assistants[0]?.assistant_id
switch (type) { switch (type) {
case 'Engine': case 'Engine':
case 'Thread': case 'Thread':
filePath = await joinPath(['threads', activeThread.id]) filePath = await joinPath(['threads', activeThread?.id ?? ''])
break break
case 'Model': case 'Model':
if (!selectedModel) return if (!selectedModel) return
@ -44,20 +39,20 @@ export const usePath = () => {
} }
const onViewJson = async (type: string) => { const onViewJson = async (type: string) => {
if (!activeThread) return // TODO: this logic should be refactored.
const activeThreadState = threadStates[activeThread.id] if (type !== 'Model' && !activeThread) return
if (!activeThreadState.isFinishInit) {
alert('Thread is not started yet')
return
}
const userSpace = await getJanDataFolderPath() const userSpace = await getJanDataFolderPath()
let filePath = undefined let filePath = undefined
const assistantId = activeThread.assistants[0]?.assistant_id const assistantId = activeThread?.assistants[0]?.assistant_id
switch (type) { switch (type) {
case 'Engine': case 'Engine':
case 'Thread': case 'Thread':
filePath = await joinPath(['threads', activeThread.id, 'thread.json']) filePath = await joinPath([
'threads',
activeThread?.id ?? '',
'thread.json',
])
break break
case 'Model': case 'Model':
if (!selectedModel) return if (!selectedModel) return
@ -78,11 +73,6 @@ export const usePath = () => {
const onViewFile = async (id: string) => { const onViewFile = async (id: string) => {
if (!activeThread) return if (!activeThread) return
const activeThreadState = threadStates[activeThread.id]
if (!activeThreadState.isFinishInit) {
alert('Thread is not started yet')
return
}
const userSpace = await getJanDataFolderPath() const userSpace = await getJanDataFolderPath()
let filePath = undefined let filePath = undefined
@ -92,9 +82,21 @@ export const usePath = () => {
openFileExplorer(fullPath) openFileExplorer(fullPath)
} }
const onViewFileContainer = async () => {
if (!activeThread) return
const userSpace = await getJanDataFolderPath()
let filePath = undefined
filePath = await joinPath(['threads', `${activeThread.id}/files`])
if (!filePath) return
const fullPath = await joinPath([userSpace, filePath])
openFileExplorer(fullPath)
}
return { return {
onReviewInFinder, onReviewInFinder,
onViewJson, onViewJson,
onViewFile, onViewFile,
onViewFileContainer,
} }
} }

View File

@ -26,7 +26,6 @@ export default function useRecommendedModel() {
const activeModel = useAtomValue(activeModelAtom) const activeModel = useAtomValue(activeModelAtom)
const [downloadedModels, setDownloadedModels] = useState<Model[]>([]) const [downloadedModels, setDownloadedModels] = useState<Model[]>([])
const [recommendedModel, setRecommendedModel] = useState<Model | undefined>() const [recommendedModel, setRecommendedModel] = useState<Model | undefined>()
const threadStates = useAtomValue(threadStatesAtom)
const activeThread = useAtomValue(activeThreadAtom) const activeThread = useAtomValue(activeThreadAtom)
const getAndSortDownloadedModels = useCallback(async (): Promise<Model[]> => { const getAndSortDownloadedModels = useCallback(async (): Promise<Model[]> => {
@ -43,30 +42,12 @@ export default function useRecommendedModel() {
Model | undefined Model | undefined
> => { > => {
const models = await getAndSortDownloadedModels() const models = await getAndSortDownloadedModels()
if (!activeThread) { if (!activeThread) return
return const modelId = activeThread.assistants[0]?.model.id
} const model = models.find((model) => model.id === modelId)
const finishInit = threadStates[activeThread.id].isFinishInit ?? true if (model) {
if (finishInit) { setRecommendedModel(model)
const modelId = activeThread.assistants[0]?.model.id
const model = models.find((model) => model.id === modelId)
if (model) {
setRecommendedModel(model)
}
return
} else {
const modelId = activeThread.assistants[0]?.model.id
if (modelId !== '*') {
const model = models.find((model) => model.id === modelId)
if (model) {
setRecommendedModel(model)
}
return
}
} }
if (activeModel) { if (activeModel) {

View File

@ -1,5 +1,5 @@
/* eslint-disable @typescript-eslint/no-explicit-any */ /* eslint-disable @typescript-eslint/no-explicit-any */
import { useEffect, useRef, useState } from 'react' import { useEffect, useRef } from 'react'
import { import {
ChatCompletionMessage, ChatCompletionMessage,
@ -18,73 +18,74 @@ import {
ChatCompletionMessageContentType, ChatCompletionMessageContentType,
AssistantTool, AssistantTool,
} from '@janhq/core' } from '@janhq/core'
import { useAtom, useAtomValue, useSetAtom } from 'jotai' import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'
import { ulid } from 'ulid' import { ulid } from 'ulid'
import { selectedModelAtom } from '@/containers/DropdownListSidebar' import { selectedModelAtom } from '@/containers/DropdownListSidebar'
import { currentPromptAtom, fileUploadAtom } from '@/containers/Providers/Jotai' import { currentPromptAtom, fileUploadAtom } from '@/containers/Providers/Jotai'
import { toaster } from '@/containers/Toast'
import { getBase64 } from '@/utils/base64' import { getBase64 } from '@/utils/base64'
import { toRuntimeParams, toSettingParams } from '@/utils/modelParam' import { toRuntimeParams, toSettingParams } from '@/utils/modelParam'
import { useActiveModel } from './useActiveModel' import { loadModelErrorAtom, useActiveModel } from './useActiveModel'
import { extensionManager } from '@/extension/ExtensionManager' import { extensionManager } from '@/extension/ExtensionManager'
import { import {
addNewMessageAtom, addNewMessageAtom,
generateResponseAtom,
getCurrentChatMessagesAtom, getCurrentChatMessagesAtom,
} from '@/helpers/atoms/ChatMessage.atom' } from '@/helpers/atoms/ChatMessage.atom'
import { import {
activeThreadAtom, activeThreadAtom,
engineParamsUpdateAtom, engineParamsUpdateAtom,
getActiveThreadModelParamsAtom, getActiveThreadModelParamsAtom,
threadStatesAtom, isGeneratingResponseAtom,
updateThreadAtom, updateThreadAtom,
updateThreadInitSuccessAtom,
updateThreadWaitingForResponseAtom, updateThreadWaitingForResponseAtom,
} from '@/helpers/atoms/Thread.atom' } from '@/helpers/atoms/Thread.atom'
export const queuedMessageAtom = atom(false)
export const reloadModelAtom = atom(false)
export default function useSendChatMessage() { export default function useSendChatMessage() {
const activeThread = useAtomValue(activeThreadAtom) const activeThread = useAtomValue(activeThreadAtom)
const addNewMessage = useSetAtom(addNewMessageAtom) const addNewMessage = useSetAtom(addNewMessageAtom)
const updateThread = useSetAtom(updateThreadAtom) const updateThread = useSetAtom(updateThreadAtom)
const updateThreadWaiting = useSetAtom(updateThreadWaitingForResponseAtom) const updateThreadWaiting = useSetAtom(updateThreadWaitingForResponseAtom)
const [currentPrompt, setCurrentPrompt] = useAtom(currentPromptAtom) const setCurrentPrompt = useSetAtom(currentPromptAtom)
const setGenerateResponse = useSetAtom(generateResponseAtom)
const currentMessages = useAtomValue(getCurrentChatMessagesAtom) const currentMessages = useAtomValue(getCurrentChatMessagesAtom)
const { activeModel } = useActiveModel() const { activeModel } = useActiveModel()
const selectedModel = useAtomValue(selectedModelAtom) const selectedModel = useAtomValue(selectedModelAtom)
const { startModel } = useActiveModel() const { startModel } = useActiveModel()
const [queuedMessage, setQueuedMessage] = useState(false) const setQueuedMessage = useSetAtom(queuedMessageAtom)
const loadModelFailed = useAtomValue(loadModelErrorAtom)
const modelRef = useRef<Model | undefined>() const modelRef = useRef<Model | undefined>()
const threadStates = useAtomValue(threadStatesAtom) const loadModelFailedRef = useRef<string | undefined>()
const updateThreadInitSuccess = useSetAtom(updateThreadInitSuccessAtom)
const activeModelParams = useAtomValue(getActiveThreadModelParamsAtom) const activeModelParams = useAtomValue(getActiveThreadModelParamsAtom)
const engineParamsUpdate = useAtomValue(engineParamsUpdateAtom) const engineParamsUpdate = useAtomValue(engineParamsUpdateAtom)
const setEngineParamsUpdate = useSetAtom(engineParamsUpdateAtom)
const [reloadModel, setReloadModel] = useState(false) const setEngineParamsUpdate = useSetAtom(engineParamsUpdateAtom)
const setReloadModel = useSetAtom(reloadModelAtom)
const [fileUpload, setFileUpload] = useAtom(fileUploadAtom) const [fileUpload, setFileUpload] = useAtom(fileUploadAtom)
const setIsGeneratingResponse = useSetAtom(isGeneratingResponseAtom)
useEffect(() => { useEffect(() => {
modelRef.current = activeModel modelRef.current = activeModel
}, [activeModel]) }, [activeModel])
useEffect(() => {
loadModelFailedRef.current = loadModelFailed
}, [loadModelFailed])
const resendChatMessage = async (currentMessage: ThreadMessage) => { const resendChatMessage = async (currentMessage: ThreadMessage) => {
if (!activeThread) { if (!activeThread) {
console.error('No active thread') console.error('No active thread')
return return
} }
setIsGeneratingResponse(true)
updateThreadWaiting(activeThread.id, true) updateThreadWaiting(activeThread.id, true)
const messages: ChatCompletionMessage[] = [ const messages: ChatCompletionMessage[] = [
activeThread.assistants[0]?.instructions, activeThread.assistants[0]?.instructions,
] ]
@ -121,85 +122,28 @@ export default function useSendChatMessage() {
if (activeModel?.id !== modelId) { if (activeModel?.id !== modelId) {
setQueuedMessage(true) setQueuedMessage(true)
startModel(modelId) startModel(modelId)
await WaitForModelStarting(modelId) await waitForModelStarting(modelId)
setQueuedMessage(false) setQueuedMessage(false)
} }
events.emit(MessageEvent.OnMessageSent, messageRequest) events.emit(MessageEvent.OnMessageSent, messageRequest)
} }
// TODO: Refactor @louis const sendChatMessage = async (message: string) => {
const WaitForModelStarting = async (modelId: string) => { if (!message || message.trim().length === 0) return
return new Promise<void>((resolve) => {
setTimeout(async () => {
if (modelRef.current?.id !== modelId) {
console.debug('waiting for model to start')
await WaitForModelStarting(modelId)
resolve()
} else {
resolve()
}
}, 200)
})
}
const sendChatMessage = async () => {
setGenerateResponse(true)
if (!currentPrompt || currentPrompt.trim().length === 0) return
if (!activeThread) { if (!activeThread) {
console.error('No active thread') console.error('No active thread')
return return
} }
setIsGeneratingResponse(true)
if (engineParamsUpdate) setReloadModel(true) if (engineParamsUpdate) setReloadModel(true)
const activeThreadState = threadStates[activeThread.id]
const runtimeParams = toRuntimeParams(activeModelParams) const runtimeParams = toRuntimeParams(activeModelParams)
const settingParams = toSettingParams(activeModelParams) const settingParams = toSettingParams(activeModelParams)
// if the thread is not initialized, we need to initialize it first
if (
!activeThreadState.isFinishInit ||
activeThread.assistants[0].model.id !== selectedModel?.id
) {
if (!selectedModel) {
toaster({ title: 'Please select a model' })
return
}
const assistantId = activeThread.assistants[0].assistant_id ?? ''
const assistantName = activeThread.assistants[0].assistant_name ?? ''
const instructions = activeThread.assistants[0].instructions ?? ''
const tools = activeThread.assistants[0].tools ?? []
const updatedThread: Thread = {
...activeThread,
assistants: [
{
assistant_id: assistantId,
assistant_name: assistantName,
instructions: instructions,
tools: tools,
model: {
id: selectedModel.id,
settings: settingParams,
parameters: runtimeParams,
engine: selectedModel.engine,
},
},
],
}
updateThreadInitSuccess(activeThread.id)
updateThread(updatedThread)
await extensionManager
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
?.saveThread(updatedThread)
}
updateThreadWaiting(activeThread.id, true) updateThreadWaiting(activeThread.id, true)
const prompt = message.trim()
const prompt = currentPrompt.trim()
setCurrentPrompt('') setCurrentPrompt('')
const base64Blob = fileUpload[0] const base64Blob = fileUpload[0]
@ -326,6 +270,14 @@ export default function useSendChatMessage() {
setFileUpload([]) setFileUpload([])
} }
const updatedThread: Thread = {
...activeThread,
updated: timestamp,
}
// change last update thread when send message
updateThread(updatedThread)
await extensionManager await extensionManager
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational) .get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
?.addNewMessage(threadMessage) ?.addNewMessage(threadMessage)
@ -335,7 +287,7 @@ export default function useSendChatMessage() {
if (activeModel?.id !== modelId) { if (activeModel?.id !== modelId) {
setQueuedMessage(true) setQueuedMessage(true)
startModel(modelId) startModel(modelId)
await WaitForModelStarting(modelId) await waitForModelStarting(modelId)
setQueuedMessage(false) setQueuedMessage(false)
} }
@ -345,10 +297,21 @@ export default function useSendChatMessage() {
setEngineParamsUpdate(false) setEngineParamsUpdate(false)
} }
const waitForModelStarting = async (modelId: string) => {
return new Promise<void>((resolve) => {
setTimeout(async () => {
if (modelRef.current?.id !== modelId && !loadModelFailedRef.current) {
await waitForModelStarting(modelId)
resolve()
} else {
resolve()
}
}, 200)
})
}
return { return {
reloadModel,
sendChatMessage, sendChatMessage,
resendChatMessage, resendChatMessage,
queuedMessage,
} }
} }

View File

@ -1,5 +1,3 @@
import { useEffect } from 'react'
import { import {
InferenceEvent, InferenceEvent,
ExtensionTypeEnum, ExtensionTypeEnum,
@ -15,6 +13,7 @@ import { setConvoMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
import { import {
ModelParams, ModelParams,
getActiveThreadIdAtom, getActiveThreadIdAtom,
isGeneratingResponseAtom,
setActiveThreadIdAtom, setActiveThreadIdAtom,
setThreadModelParamsAtom, setThreadModelParamsAtom,
} from '@/helpers/atoms/Thread.atom' } from '@/helpers/atoms/Thread.atom'
@ -24,6 +23,7 @@ export default function useSetActiveThread() {
const setActiveThreadId = useSetAtom(setActiveThreadIdAtom) const setActiveThreadId = useSetAtom(setActiveThreadIdAtom)
const setThreadMessage = useSetAtom(setConvoMessagesAtom) const setThreadMessage = useSetAtom(setConvoMessagesAtom)
const setThreadModelParams = useSetAtom(setThreadModelParamsAtom) const setThreadModelParams = useSetAtom(setThreadModelParamsAtom)
const setIsGeneratingResponse = useSetAtom(isGeneratingResponseAtom)
const setActiveThread = async (thread: Thread) => { const setActiveThread = async (thread: Thread) => {
if (activeThreadId === thread.id) { if (activeThreadId === thread.id) {
@ -31,6 +31,7 @@ export default function useSetActiveThread() {
return return
} }
setIsGeneratingResponse(false)
events.emit(InferenceEvent.OnInferenceStopped, thread.id) events.emit(InferenceEvent.OnInferenceStopped, thread.id)
// load the corresponding messages // load the corresponding messages

View File

@ -1,4 +1,4 @@
import { useEffect, useState } from 'react' import { useCallback, useEffect, useState } from 'react'
import { fs, joinPath } from '@janhq/core' import { fs, joinPath } from '@janhq/core'
import { atom, useAtom } from 'jotai' import { atom, useAtom } from 'jotai'
@ -32,7 +32,7 @@ export const useSettings = () => {
}) })
} }
const readSettings = async () => { const readSettings = useCallback(async () => {
if (!window?.core?.api) { if (!window?.core?.api) {
return return
} }
@ -42,7 +42,8 @@ export const useSettings = () => {
return typeof settings === 'object' ? settings : JSON.parse(settings) return typeof settings === 'object' ? settings : JSON.parse(settings)
} }
return {} return {}
} }, [])
const saveSettings = async ({ const saveSettings = async ({
runMode, runMode,
notify, notify,

View File

@ -5,24 +5,24 @@ import {
ConversationalExtension, ConversationalExtension,
} from '@janhq/core' } from '@janhq/core'
import { useAtom } from 'jotai' import { useAtomValue, useSetAtom } from 'jotai'
import useSetActiveThread from './useSetActiveThread' import useSetActiveThread from './useSetActiveThread'
import { extensionManager } from '@/extension/ExtensionManager' import { extensionManager } from '@/extension/ExtensionManager'
import { import {
ModelParams, ModelParams,
activeThreadAtom,
threadModelParamsAtom, threadModelParamsAtom,
threadStatesAtom, threadStatesAtom,
threadsAtom, threadsAtom,
} from '@/helpers/atoms/Thread.atom' } from '@/helpers/atoms/Thread.atom'
const useThreads = () => { const useThreads = () => {
const [threadStates, setThreadStates] = useAtom(threadStatesAtom) const setThreadStates = useSetAtom(threadStatesAtom)
const [threads, setThreads] = useAtom(threadsAtom) const setThreads = useSetAtom(threadsAtom)
const [threadModelRuntimeParams, setThreadModelRuntimeParams] = useAtom( const setThreadModelRuntimeParams = useSetAtom(threadModelParamsAtom)
threadModelParamsAtom const activeThread = useAtomValue(activeThreadAtom)
)
const { setActiveThread } = useSetActiveThread() const { setActiveThread } = useSetActiveThread()
const getThreads = async () => { const getThreads = async () => {
@ -39,7 +39,6 @@ const useThreads = () => {
hasMore: false, hasMore: false,
waitingForResponse: false, waitingForResponse: false,
lastMessage, lastMessage,
isFinishInit: true,
} }
const modelParams = thread.assistants?.[0]?.model?.parameters const modelParams = thread.assistants?.[0]?.model?.parameters
@ -51,41 +50,12 @@ const useThreads = () => {
} }
}) })
// allow at max 1 unfinished init thread and it should be at the top of the list
let unfinishedThreadId: string | undefined = undefined
const unfinishedThreadState: Record<string, ThreadState> = {}
for (const key of Object.keys(threadStates)) {
const threadState = threadStates[key]
if (threadState.isFinishInit === false) {
unfinishedThreadState[key] = threadState
unfinishedThreadId = key
break
}
}
const unfinishedThread: Thread | undefined = threads.find(
(thread) => thread.id === unfinishedThreadId
)
let allThreads: Thread[] = [...localThreads]
if (unfinishedThread) {
allThreads = [unfinishedThread, ...localThreads]
}
if (unfinishedThreadId) {
localThreadStates[unfinishedThreadId] =
unfinishedThreadState[unfinishedThreadId]
threadModelParams[unfinishedThreadId] =
threadModelRuntimeParams[unfinishedThreadId]
}
// updating app states // updating app states
setThreadStates(localThreadStates) setThreadStates(localThreadStates)
setThreads(allThreads) setThreads(localThreads)
setThreadModelRuntimeParams(threadModelParams) setThreadModelRuntimeParams(threadModelParams)
if (allThreads.length > 0) { if (localThreads.length && !activeThread) {
setActiveThread(allThreads[0]) setActiveThread(localThreads[0])
} }
} catch (error) { } catch (error) {
console.error(error) console.error(error)

View File

@ -2,12 +2,15 @@
import { import {
ConversationalExtension, ConversationalExtension,
ExtensionTypeEnum, ExtensionTypeEnum,
InferenceEngine,
Thread, Thread,
ThreadAssistantInfo, ThreadAssistantInfo,
} from '@janhq/core' } from '@janhq/core'
import { useAtomValue, useSetAtom } from 'jotai' import { useAtomValue, useSetAtom } from 'jotai'
import { selectedModelAtom } from '@/containers/DropdownListSidebar'
import { toRuntimeParams, toSettingParams } from '@/utils/modelParam' import { toRuntimeParams, toSettingParams } from '@/utils/modelParam'
import { extensionManager } from '@/extension' import { extensionManager } from '@/extension'
@ -19,16 +22,22 @@ import {
threadsAtom, threadsAtom,
} from '@/helpers/atoms/Thread.atom' } from '@/helpers/atoms/Thread.atom'
export type UpdateModelParameter = {
params?: ModelParams
modelId?: string
engine?: InferenceEngine
}
export default function useUpdateModelParameters() { export default function useUpdateModelParameters() {
const threads = useAtomValue(threadsAtom) const threads = useAtomValue(threadsAtom)
const setThreadModelParams = useSetAtom(setThreadModelParamsAtom) const setThreadModelParams = useSetAtom(setThreadModelParamsAtom)
const activeThreadState = useAtomValue(activeThreadStateAtom) const activeThreadState = useAtomValue(activeThreadStateAtom)
const activeModelParams = useAtomValue(getActiveThreadModelParamsAtom) const activeModelParams = useAtomValue(getActiveThreadModelParamsAtom)
const selectedModel = useAtomValue(selectedModelAtom)
const updateModelParameter = async ( const updateModelParameter = async (
threadId: string, threadId: string,
name: string, settings: UpdateModelParameter
value: number | boolean | string
) => { ) => {
const thread = threads.find((thread) => thread.id === threadId) const thread = threads.find((thread) => thread.id === threadId)
if (!thread) { if (!thread) {
@ -40,21 +49,18 @@ export default function useUpdateModelParameters() {
console.error('No active thread') console.error('No active thread')
return return
} }
const params = settings.modelId
? settings.params
: { ...activeModelParams, ...settings.params }
const updatedModelParams: ModelParams = { const updatedModelParams: ModelParams = {
...activeModelParams, ...params,
// Explicitly set the value to an array if the name is 'stop'
// This is because the inference engine would only accept an array for the 'stop' parameter
[name]: name === 'stop' ? (value === '' ? [] : [value]) : value,
} }
// update the state // update the state
setThreadModelParams(thread.id, updatedModelParams) setThreadModelParams(thread.id, updatedModelParams)
if (!activeThreadState.isFinishInit) {
// if thread is not initialized, we don't need to update thread.json
return
}
const assistants = thread.assistants.map( const assistants = thread.assistants.map(
(assistant: ThreadAssistantInfo) => { (assistant: ThreadAssistantInfo) => {
const runtimeParams = toRuntimeParams(updatedModelParams) const runtimeParams = toRuntimeParams(updatedModelParams)
@ -62,6 +68,10 @@ export default function useUpdateModelParameters() {
assistant.model.parameters = runtimeParams assistant.model.parameters = runtimeParams
assistant.model.settings = settingParams assistant.model.settings = settingParams
if (selectedModel) {
assistant.model.id = settings.modelId ?? selectedModel?.id
assistant.model.engine = settings.engine ?? selectedModel?.engine
}
return assistant return assistant
} }
) )

View File

@ -1,105 +0,0 @@
import { useEffect } from 'react'
import { fs, AppConfiguration } from '@janhq/core'
import { atom, useAtom } from 'jotai'
import { useMainViewState } from './useMainViewState'
const isSameDirectoryAtom = atom(false)
const isDirectoryConfirmAtom = atom(false)
const isErrorSetNewDestAtom = atom(false)
const currentPathAtom = atom('')
const newDestinationPathAtom = atom('')
export const SUCCESS_SET_NEW_DESTINATION = 'successSetNewDestination'
export function useVaultDirectory() {
const [isSameDirectory, setIsSameDirectory] = useAtom(isSameDirectoryAtom)
const { setMainViewState } = useMainViewState()
const [isDirectoryConfirm, setIsDirectoryConfirm] = useAtom(
isDirectoryConfirmAtom
)
const [isErrorSetNewDest, setIsErrorSetNewDest] = useAtom(
isErrorSetNewDestAtom
)
const [currentPath, setCurrentPath] = useAtom(currentPathAtom)
const [newDestinationPath, setNewDestinationPath] = useAtom(
newDestinationPathAtom
)
useEffect(() => {
window.core?.api
?.getAppConfigurations()
?.then((appConfig: AppConfiguration) => {
setCurrentPath(appConfig.data_folder)
})
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])
const setNewDestination = async () => {
const destFolder = await window.core?.api?.selectDirectory()
setNewDestinationPath(destFolder)
if (destFolder) {
console.debug(`Destination folder selected: ${destFolder}`)
try {
const appConfiguration: AppConfiguration =
await window.core?.api?.getAppConfigurations()
const currentJanDataFolder = appConfiguration.data_folder
if (currentJanDataFolder === destFolder) {
console.debug(
`Destination folder is the same as current folder. Ignore..`
)
setIsSameDirectory(true)
setIsDirectoryConfirm(false)
return
} else {
setIsSameDirectory(false)
setIsDirectoryConfirm(true)
}
setIsErrorSetNewDest(false)
} catch (e) {
console.error(`Error: ${e}`)
setIsErrorSetNewDest(true)
}
}
}
const applyNewDestination = async () => {
try {
const appConfiguration: AppConfiguration =
await window.core?.api?.getAppConfigurations()
const currentJanDataFolder = appConfiguration.data_folder
appConfiguration.data_folder = newDestinationPath
await fs.syncFile(currentJanDataFolder, newDestinationPath)
await window.core?.api?.updateAppConfiguration(appConfiguration)
console.debug(
`File sync finished from ${currentPath} to ${newDestinationPath}`
)
setIsErrorSetNewDest(false)
localStorage.setItem(SUCCESS_SET_NEW_DESTINATION, 'true')
await window.core?.api?.relaunch()
} catch (e) {
console.error(`Error: ${e}`)
setIsErrorSetNewDest(true)
}
}
return {
setNewDestination,
newDestinationPath,
applyNewDestination,
isSameDirectory,
setIsDirectoryConfirm,
isDirectoryConfirm,
setIsSameDirectory,
currentPath,
isErrorSetNewDest,
setIsErrorSetNewDest,
}
}

View File

@ -25,10 +25,8 @@ const nextConfig = {
...config.plugins, ...config.plugins,
new webpack.DefinePlugin({ new webpack.DefinePlugin({
VERSION: JSON.stringify(packageJson.version), VERSION: JSON.stringify(packageJson.version),
ANALYTICS_ID: ANALYTICS_ID: JSON.stringify(process.env.ANALYTICS_ID),
JSON.stringify(process.env.ANALYTICS_ID) ?? JSON.stringify('xxx'), ANALYTICS_HOST: JSON.stringify(process.env.ANALYTICS_HOST),
ANALYTICS_HOST:
JSON.stringify(process.env.ANALYTICS_HOST) ?? JSON.stringify('xxx'),
API_BASE_URL: JSON.stringify('http://localhost:1337'), API_BASE_URL: JSON.stringify('http://localhost:1337'),
isMac: process.platform === 'darwin', isMac: process.platform === 'darwin',
isWindows: process.platform === 'win32', isWindows: process.platform === 'win32',

View File

@ -57,7 +57,7 @@ const AssistantSetting = ({
tools: [ tools: [
{ {
type: 'retrieval', type: 'retrieval',
enabled: false, enabled: true,
settings: { settings: {
...(activeThread.assistants[0].tools && ...(activeThread.assistants[0].tools &&
activeThread.assistants[0].tools[0]?.settings), activeThread.assistants[0].tools[0]?.settings),

View File

@ -8,11 +8,9 @@ import { useAtomValue } from 'jotai'
import LogoMark from '@/containers/Brand/Logo/Mark' import LogoMark from '@/containers/Brand/Logo/Mark'
import GenerateResponse from '@/containers/Loader/GenerateResponse'
import { MainViewState } from '@/constants/screens' import { MainViewState } from '@/constants/screens'
import { activeModelAtom } from '@/hooks/useActiveModel' import { loadModelErrorAtom } from '@/hooks/useActiveModel'
import { useGetDownloadedModels } from '@/hooks/useGetDownloadedModels' import { useGetDownloadedModels } from '@/hooks/useGetDownloadedModels'
import { useMainViewState } from '@/hooks/useMainViewState' import { useMainViewState } from '@/hooks/useMainViewState'
@ -21,17 +19,13 @@ import ChatItem from '../ChatItem'
import ErrorMessage from '../ErrorMessage' import ErrorMessage from '../ErrorMessage'
import { import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
generateResponseAtom,
getCurrentChatMessagesAtom,
} from '@/helpers/atoms/ChatMessage.atom'
const ChatBody: React.FC = () => { const ChatBody: React.FC = () => {
const messages = useAtomValue(getCurrentChatMessagesAtom) const messages = useAtomValue(getCurrentChatMessagesAtom)
const activeModel = useAtomValue(activeModelAtom)
const { downloadedModels } = useGetDownloadedModels() const { downloadedModels } = useGetDownloadedModels()
const { setMainViewState } = useMainViewState() const { setMainViewState } = useMainViewState()
const generateResponse = useAtomValue(generateResponseAtom) const loadModelError = useAtomValue(loadModelErrorAtom)
if (downloadedModels.length === 0) if (downloadedModels.length === 0)
return ( return (
@ -92,22 +86,14 @@ const ChatBody: React.FC = () => {
message.content.length > 0) && ( message.content.length > 0) && (
<ChatItem {...message} key={message.id} /> <ChatItem {...message} key={message.id} />
)} )}
{(message.status === MessageStatus.Error || {!loadModelError &&
message.status === MessageStatus.Stopped) && (message.status === MessageStatus.Error ||
message.status === MessageStatus.Stopped) &&
index === messages.length - 1 && ( index === messages.length - 1 && (
<ErrorMessage message={message} /> <ErrorMessage message={message} />
)} )}
</div> </div>
))} ))}
{activeModel &&
(generateResponse ||
(messages.length &&
messages[messages.length - 1].status ===
MessageStatus.Pending &&
!messages[messages.length - 1].content.length)) && (
<GenerateResponse />
)}
</ScrollToBottom> </ScrollToBottom>
)} )}
</Fragment> </Fragment>

View File

@ -1,5 +1,5 @@
/* eslint-disable @typescript-eslint/no-explicit-any */ /* eslint-disable @typescript-eslint/no-explicit-any */
import { useEffect, useRef, useState } from 'react' import { useContext, useEffect, useRef, useState } from 'react'
import { InferenceEvent, MessageStatus, events } from '@janhq/core' import { InferenceEvent, MessageStatus, events } from '@janhq/core'
@ -24,6 +24,8 @@ import { twMerge } from 'tailwind-merge'
import { currentPromptAtom, fileUploadAtom } from '@/containers/Providers/Jotai' import { currentPromptAtom, fileUploadAtom } from '@/containers/Providers/Jotai'
import { FeatureToggleContext } from '@/context/FeatureToggle'
import { useActiveModel } from '@/hooks/useActiveModel' import { useActiveModel } from '@/hooks/useActiveModel'
import { useClickOutside } from '@/hooks/useClickOutside' import { useClickOutside } from '@/hooks/useClickOutside'
@ -53,7 +55,8 @@ const ChatInput: React.FC = () => {
const textareaRef = useRef<HTMLTextAreaElement>(null) const textareaRef = useRef<HTMLTextAreaElement>(null)
const fileInputRef = useRef<HTMLInputElement>(null) const fileInputRef = useRef<HTMLInputElement>(null)
const imageInputRef = useRef<HTMLInputElement>(null) const imageInputRef = useRef<HTMLInputElement>(null)
const [ShowAttacmentMenus, setShowAttacmentMenus] = useState(false) const [showAttacmentMenus, setShowAttacmentMenus] = useState(false)
const { experimentalFeature } = useContext(FeatureToggleContext)
const onPromptChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => { const onPromptChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
setCurrentPrompt(e.target.value) setCurrentPrompt(e.target.value)
@ -64,30 +67,35 @@ const ChatInput: React.FC = () => {
useEffect(() => { useEffect(() => {
if (isWaitingToSend && activeThreadId) { if (isWaitingToSend && activeThreadId) {
setIsWaitingToSend(false) setIsWaitingToSend(false)
sendChatMessage() sendChatMessage(currentPrompt)
} }
}, [
activeThreadId,
isWaitingToSend,
currentPrompt,
setIsWaitingToSend,
sendChatMessage,
])
useEffect(() => {
if (textareaRef.current) { if (textareaRef.current) {
textareaRef.current.focus() textareaRef.current.focus()
} }
// eslint-disable-next-line react-hooks/exhaustive-deps }, [activeThreadId])
}, [waitingToSendMessage, activeThreadId])
useEffect(() => { useEffect(() => {
if (textareaRef.current) { if (textareaRef.current) {
textareaRef.current.style.height = '40px' textareaRef.current.style.height = '40px'
textareaRef.current.style.height = textareaRef.current.scrollHeight + 'px' textareaRef.current.style.height = textareaRef.current.scrollHeight + 'px'
textareaRef.current.focus()
} }
}, [currentPrompt]) }, [currentPrompt])
const onKeyDown = async (e: React.KeyboardEvent<HTMLTextAreaElement>) => { const onKeyDown = async (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
if (e.key === 'Enter') { if (e.key === 'Enter' && !e.shiftKey) {
if (!e.shiftKey) { e.preventDefault()
e.preventDefault() if (messages[messages.length - 1]?.status !== MessageStatus.Pending)
if (messages[messages.length - 1]?.status !== MessageStatus.Pending) sendChatMessage(currentPrompt)
sendChatMessage() else onStopInferenceClick()
else onStopInferenceClick()
}
} }
} }
@ -142,50 +150,52 @@ const ChatInput: React.FC = () => {
value={currentPrompt} value={currentPrompt}
onChange={onPromptChange} onChange={onPromptChange}
/> />
{experimentalFeature && (
<Tooltip> <Tooltip>
<TooltipTrigger asChild> <TooltipTrigger asChild>
<PaperclipIcon <PaperclipIcon
size={20} size={20}
className="absolute bottom-2 right-4 cursor-pointer text-muted-foreground" className="absolute bottom-2 right-4 cursor-pointer text-muted-foreground"
onClick={(e) => { onClick={(e) => {
if ( if (
fileUpload.length > 0 || fileUpload.length > 0 ||
(activeThread?.assistants[0].tools && (activeThread?.assistants[0].tools &&
!activeThread?.assistants[0].tools[0]?.enabled) !activeThread?.assistants[0].tools[0]?.enabled)
) { ) {
e.stopPropagation() e.stopPropagation()
} else { } else {
setShowAttacmentMenus(!ShowAttacmentMenus) setShowAttacmentMenus(!showAttacmentMenus)
} }
}} }}
/> />
</TooltipTrigger> </TooltipTrigger>
<TooltipPortal> <TooltipPortal>
{fileUpload.length > 0 || {fileUpload.length > 0 ||
(activeThread?.assistants[0].tools && (activeThread?.assistants[0].tools &&
!activeThread?.assistants[0].tools[0]?.enabled && ( !activeThread?.assistants[0].tools[0]?.enabled && (
<TooltipContent side="top" className="max-w-[154px] px-3"> <TooltipContent side="top" className="max-w-[154px] px-3">
{fileUpload.length !== 0 && ( {fileUpload.length !== 0 && (
<span>
Currently, we only support 1 attachment at the same time
</span>
)}
{activeThread?.assistants[0].tools &&
activeThread?.assistants[0].tools[0]?.enabled ===
false && (
<span> <span>
Turn on Retrieval in Assistant Settings to use this Currently, we only support 1 attachment at the same
feature time
</span> </span>
)} )}
<TooltipArrow /> {activeThread?.assistants[0].tools &&
</TooltipContent> activeThread?.assistants[0].tools[0]?.enabled ===
))} false && (
</TooltipPortal> <span>
</Tooltip> Turn on Retrieval in Assistant Settings to use this
feature
</span>
)}
<TooltipArrow />
</TooltipContent>
))}
</TooltipPortal>
</Tooltip>
)}
{ShowAttacmentMenus && ( {showAttacmentMenus && (
<div <div
ref={refAttachmentMenus} ref={refAttachmentMenus}
className="absolute bottom-10 right-0 w-36 cursor-pointer rounded-lg border border-border bg-background py-1 shadow" className="absolute bottom-10 right-0 w-36 cursor-pointer rounded-lg border border-border bg-background py-1 shadow"
@ -237,7 +247,7 @@ const ChatInput: React.FC = () => {
} }
themes="primary" themes="primary"
className="min-w-[100px]" className="min-w-[100px]"
onClick={sendChatMessage} onClick={() => sendChatMessage(currentPrompt)}
> >
Send Send
</Button> </Button>

View File

@ -17,7 +17,6 @@ import {
deleteMessageAtom, deleteMessageAtom,
getCurrentChatMessagesAtom, getCurrentChatMessagesAtom,
} from '@/helpers/atoms/ChatMessage.atom' } from '@/helpers/atoms/ChatMessage.atom'
import { totalRamAtom } from '@/helpers/atoms/SystemBar.atom'
import { activeThreadAtom } from '@/helpers/atoms/Thread.atom' import { activeThreadAtom } from '@/helpers/atoms/Thread.atom'
const ErrorMessage = ({ message }: { message: ThreadMessage }) => { const ErrorMessage = ({ message }: { message: ThreadMessage }) => {
@ -25,8 +24,6 @@ const ErrorMessage = ({ message }: { message: ThreadMessage }) => {
const thread = useAtomValue(activeThreadAtom) const thread = useAtomValue(activeThreadAtom)
const deleteMessage = useSetAtom(deleteMessageAtom) const deleteMessage = useSetAtom(deleteMessageAtom)
const { resendChatMessage } = useSendChatMessage() const { resendChatMessage } = useSendChatMessage()
const { activeModel } = useActiveModel()
const totalRam = useAtomValue(totalRamAtom)
const regenerateMessage = async () => { const regenerateMessage = async () => {
const lastMessageIndex = messages.length - 1 const lastMessageIndex = messages.length - 1
@ -70,33 +67,26 @@ const ErrorMessage = ({ message }: { message: ThreadMessage }) => {
{message.status === MessageStatus.Error && ( {message.status === MessageStatus.Error && (
<div key={message.id} className="mt-10 flex flex-col items-center"> <div key={message.id} className="mt-10 flex flex-col items-center">
<span className="mb-3 text-center text-sm font-medium text-gray-500"> <span className="mb-3 text-center text-sm font-medium text-gray-500">
{Number(activeModel?.metadata.size) > totalRam ? ( <>
<> <p>Apologies, something&apos;s amiss!</p>
Oops! Model size exceeds available RAM. Consider selecting a Jan&apos;s in beta. Find troubleshooting guides{' '}
smaller model or upgrading your RAM for smoother performance. <a
</> href="https://jan.ai/guides/troubleshooting"
) : ( target="_blank"
<> className="text-blue-600 hover:underline dark:text-blue-300"
<p>Apologies, something&apos;s amiss!</p> >
Jan&apos;s in beta. Find troubleshooting guides{' '} here
<a </a>{' '}
href="https://jan.ai/guides/troubleshooting" or reach out to us on{' '}
target="_blank" <a
className="text-blue-600 hover:underline dark:text-blue-300" href="https://discord.gg/AsJ8krTT3N"
> target="_blank"
here className="text-blue-600 hover:underline dark:text-blue-300"
</a>{' '} >
or reach out to us on{' '} Discord
<a </a>{' '}
href="https://discord.gg/AsJ8krTT3N" for assistance.
target="_blank" </>
className="text-blue-600 hover:underline dark:text-blue-300"
>
Discord
</a>{' '}
for assistance.
</>
)}
</span> </span>
</div> </div>
)} )}

View File

@ -0,0 +1,48 @@
import { MessageStatus, ThreadMessage } from '@janhq/core'
import { useAtomValue } from 'jotai'
import { useActiveModel } from '@/hooks/useActiveModel'
import { totalRamAtom } from '@/helpers/atoms/SystemBar.atom'
const LoadModelErrorMessage = () => {
const { activeModel } = useActiveModel()
const availableRam = useAtomValue(totalRamAtom)
return (
<>
<div className="mt-10 flex flex-col items-center">
<span className="mb-3 text-center text-sm font-medium text-gray-500">
{Number(activeModel?.metadata.size) > availableRam ? (
<>
Oops! Model size exceeds available RAM. Consider selecting a
smaller model or upgrading your RAM for smoother performance.
</>
) : (
<>
<p>Apologies, something&apos;s amiss!</p>
Jan&apos;s in beta. Find troubleshooting guides{' '}
<a
href="https://jan.ai/guides/troubleshooting"
target="_blank"
className="text-blue-600 hover:underline dark:text-blue-300"
>
here
</a>{' '}
or reach out to us on{' '}
<a
href="https://discord.gg/AsJ8krTT3N"
target="_blank"
className="text-blue-600 hover:underline dark:text-blue-300"
>
Discord
</a>{' '}
for assistance.
</>
)}
</span>
</div>
</>
)
}
export default LoadModelErrorMessage

View File

@ -1,7 +1,9 @@
import useSendChatMessage from '@/hooks/useSendChatMessage' import { useAtomValue } from 'jotai'
import { queuedMessageAtom } from '@/hooks/useSendChatMessage'
const MessageQueuedBanner: React.FC = () => { const MessageQueuedBanner: React.FC = () => {
const { queuedMessage } = useSendChatMessage() const queuedMessage = useAtomValue(queuedMessageAtom)
return ( return (
<div> <div>

View File

@ -4,6 +4,7 @@ import {
ThreadMessage, ThreadMessage,
ChatCompletionRole, ChatCompletionRole,
ConversationalExtension, ConversationalExtension,
ContentType,
} from '@janhq/core' } from '@janhq/core'
import { useAtomValue, useSetAtom } from 'jotai' import { useAtomValue, useSetAtom } from 'jotai'
import { RefreshCcw, CopyIcon, Trash2Icon, CheckIcon } from 'lucide-react' import { RefreshCcw, CopyIcon, Trash2Icon, CheckIcon } from 'lucide-react'
@ -53,7 +54,9 @@ const MessageToolbar = ({ message }: { message: ThreadMessage }) => {
<div className={twMerge('flex flex-row items-center')}> <div className={twMerge('flex flex-row items-center')}>
<div className="flex overflow-hidden rounded-md border border-border bg-background/20"> <div className="flex overflow-hidden rounded-md border border-border bg-background/20">
{message.id === messages[messages.length - 1]?.id && {message.id === messages[messages.length - 1]?.id &&
messages[messages.length - 1].status !== MessageStatus.Error && ( messages[messages.length - 1].status !== MessageStatus.Error &&
messages[messages.length - 1].content[0]?.type !==
ContentType.Pdf && (
<div <div
className="cursor-pointer border-r border-border px-2 py-2 hover:bg-background/80" className="cursor-pointer border-r border-border px-2 py-2 hover:bg-background/80"
onClick={onRegenerateClick} onClick={onRegenerateClick}

View File

@ -56,7 +56,7 @@ const SettingComponent = ({
updater?: ( updater?: (
threadId: string, threadId: string,
name: string, name: string,
value: string | number | boolean value: string | number | boolean | string[]
) => void ) => void
}) => { }) => {
const { updateModelParameter } = useUpdateModelParameters() const { updateModelParameter } = useUpdateModelParameters()
@ -73,7 +73,10 @@ const SettingComponent = ({
const { stopModel } = useActiveModel() const { stopModel } = useActiveModel()
const onValueChanged = (name: string, value: string | number | boolean) => { const onValueChanged = (
name: string,
value: string | number | boolean | string[]
) => {
if (!threadId) return if (!threadId) return
if (engineParams.some((x) => x.name.includes(name))) { if (engineParams.some((x) => x.name.includes(name))) {
setEngineParamsUpdate(true) setEngineParamsUpdate(true)
@ -83,7 +86,13 @@ const SettingComponent = ({
} }
if (updater) updater(threadId, name, value) if (updater) updater(threadId, name, value)
else { else {
updateModelParameter(threadId, name, value) // Convert stop string to array
if (name === 'stop' && typeof value === 'string') {
value = [value]
}
updateModelParameter(threadId, {
params: { [name]: value },
})
} }
} }

View File

@ -1,5 +1,5 @@
/* eslint-disable @typescript-eslint/no-explicit-any */ /* eslint-disable @typescript-eslint/no-explicit-any */
import React from 'react' import React, { useContext } from 'react'
import { InferenceEngine } from '@janhq/core' import { InferenceEngine } from '@janhq/core'
import { Input, Textarea, Switch } from '@janhq/uikit' import { Input, Textarea, Switch } from '@janhq/uikit'
@ -15,6 +15,8 @@ import DropdownListSidebar, {
selectedModelAtom, selectedModelAtom,
} from '@/containers/DropdownListSidebar' } from '@/containers/DropdownListSidebar'
import { FeatureToggleContext } from '@/context/FeatureToggle'
import { useCreateNewThread } from '@/hooks/useCreateNewThread' import { useCreateNewThread } from '@/hooks/useCreateNewThread'
import { getConfigurationsData } from '@/utils/componentSettings' import { getConfigurationsData } from '@/utils/componentSettings'
@ -39,6 +41,7 @@ const Sidebar: React.FC = () => {
const activeModelParams = useAtomValue(getActiveThreadModelParamsAtom) const activeModelParams = useAtomValue(getActiveThreadModelParamsAtom)
const selectedModel = useAtomValue(selectedModelAtom) const selectedModel = useAtomValue(selectedModelAtom)
const { updateThreadMetadata } = useCreateNewThread() const { updateThreadMetadata } = useCreateNewThread()
const { experimentalFeature } = useContext(FeatureToggleContext)
const modelEngineParams = toSettingParams(activeModelParams) const modelEngineParams = toSettingParams(activeModelParams)
const modelRuntimeParams = toRuntimeParams(activeModelParams) const modelRuntimeParams = toRuntimeParams(activeModelParams)
@ -131,78 +134,79 @@ const Sidebar: React.FC = () => {
}} }}
/> />
</div> </div>
{experimentalFeature && (
<div> <div>
{activeThread?.assistants[0]?.tools && {activeThread?.assistants[0]?.tools &&
componentDataAssistantSetting.length > 0 && ( componentDataAssistantSetting.length > 0 && (
<div className="mt-2"> <div className="mt-2">
<CardSidebar <CardSidebar
title="Retrieval" title="Retrieval"
asChild asChild
rightAction={ rightAction={
<Switch <Switch
name="retrieval" name="retrieval"
className="mr-2" className="mr-2"
checked={activeThread?.assistants[0].tools[0].enabled} checked={
onCheckedChange={(e) => { activeThread?.assistants[0].tools[0].enabled
if (activeThread) }
updateThreadMetadata({ onCheckedChange={(e) => {
...activeThread, if (activeThread)
assistants: [ updateThreadMetadata({
{ ...activeThread,
...activeThread.assistants[0], assistants: [
tools: [ {
{ ...activeThread.assistants[0],
type: 'retrieval', tools: [
enabled: e, {
settings: type: 'retrieval',
(activeThread.assistants[0].tools && enabled: e,
activeThread.assistants[0].tools[0] settings:
?.settings) ?? (activeThread.assistants[0].tools &&
{}, activeThread.assistants[0]
}, .tools[0]?.settings) ??
], {},
}, },
], ],
}) },
}} ],
/> })
} }}
>
{activeThread?.assistants[0]?.tools[0].enabled && (
<div className="px-2 py-4">
<div className="mb-4">
<label
id="tool-title"
className="mb-2 inline-block font-bold text-zinc-500 dark:text-gray-300"
>
Embedding Engine
</label>
<div className="flex items-center justify-between">
<label className="font-medium text-zinc-500 dark:text-gray-300">
{selectedModel?.engine ===
InferenceEngine.openai
? 'OpenAI'
: 'Nitro'}
</label>
</div>
</div>
<AssistantSetting
componentData={componentDataAssistantSetting}
/> />
</div> }
)} >
</CardSidebar> {activeThread?.assistants[0]?.tools[0].enabled && (
</div> <div className="px-2 py-4">
)} <div className="mb-4">
</div> <label
id="tool-title"
className="mb-2 inline-block font-bold text-zinc-500 dark:text-gray-300"
>
Embedding Engine
</label>
<div className="flex items-center justify-between">
<label className="font-medium text-zinc-500 dark:text-gray-300">
{selectedModel?.engine ===
InferenceEngine.openai
? 'OpenAI'
: 'Nitro'}
</label>
</div>
</div>
<AssistantSetting
componentData={componentDataAssistantSetting}
/>
</div>
)}
</CardSidebar>
</div>
)}
</div>
)}
</div> </div>
</CardSidebar> </CardSidebar>
<CardSidebar title="Model"> <CardSidebar title="Model">
<div className="px-2"> <div className="px-2 pt-4">
<div className="mt-4"> <DropdownListSidebar />
<DropdownListSidebar />
</div>
{componentDataRuntimeSetting.length > 0 && ( {componentDataRuntimeSetting.length > 0 && (
<div className="mt-6"> <div className="mt-6">

View File

@ -43,7 +43,7 @@ const SimpleTextMessage: React.FC<ThreadMessage> = (props) => {
text = props.content[0]?.text?.value ?? '' text = props.content[0]?.text?.value ?? ''
} }
const clipboard = useClipboard({ timeout: 1000 }) const clipboard = useClipboard({ timeout: 1000 })
const { onViewFile } = usePath() const { onViewFile, onViewFileContainer } = usePath()
const marked: Marked = new Marked( const marked: Marked = new Marked(
markedHighlight({ markedHighlight({
@ -200,13 +200,14 @@ const SimpleTextMessage: React.FC<ThreadMessage> = (props) => {
className="aspect-auto h-[300px]" className="aspect-auto h-[300px]"
alt={props.content[0]?.text.name} alt={props.content[0]?.text.name}
src={props.content[0]?.text.annotations[0]} src={props.content[0]?.text.annotations[0]}
onClick={() => onViewFile(`${props.id}.png`)}
/> />
<div className="absolute left-0 top-0 z-20 hidden h-full w-full bg-black/20 group-hover/image:inline-block" /> <div className="absolute left-0 top-0 z-20 hidden h-full w-full bg-black/20 group-hover/image:inline-block" />
<Tooltip> <Tooltip>
<TooltipTrigger asChild> <TooltipTrigger asChild>
<div <div
className="absolute right-2 top-2 z-20 hidden h-8 w-8 cursor-pointer items-center justify-center rounded-md bg-background group-hover/image:flex" className="absolute right-2 top-2 z-20 hidden h-8 w-8 cursor-pointer items-center justify-center rounded-md bg-background group-hover/image:flex"
onClick={() => onViewFile(`${props.id}.png`)} onClick={onViewFileContainer}
> >
<FolderOpenIcon size={20} /> <FolderOpenIcon size={20} />
</div> </div>
@ -223,14 +224,17 @@ const SimpleTextMessage: React.FC<ThreadMessage> = (props) => {
{props.content[0]?.type === ContentType.Pdf && ( {props.content[0]?.type === ContentType.Pdf && (
<div className="group/file relative mb-2 inline-flex w-60 cursor-pointer gap-x-3 overflow-hidden rounded-lg bg-secondary p-4"> <div className="group/file relative mb-2 inline-flex w-60 cursor-pointer gap-x-3 overflow-hidden rounded-lg bg-secondary p-4">
<div className="absolute left-0 top-0 z-20 hidden h-full w-full bg-black/20 backdrop-blur-sm group-hover/file:inline-block" /> <div
className="absolute left-0 top-0 z-20 hidden h-full w-full bg-black/20 backdrop-blur-sm group-hover/file:inline-block"
onClick={() =>
onViewFile(`${props.id}.${props.content[0]?.type}`)
}
/>
<Tooltip> <Tooltip>
<TooltipTrigger asChild> <TooltipTrigger asChild>
<div <div
className="absolute right-2 top-2 z-20 hidden h-8 w-8 cursor-pointer items-center justify-center rounded-md bg-background group-hover/file:flex" className="absolute right-2 top-2 z-20 hidden h-8 w-8 cursor-pointer items-center justify-center rounded-md bg-background group-hover/file:flex"
onClick={() => onClick={onViewFileContainer}
onViewFile(`${props.id}.${props.content[0]?.type}`)
}
> >
<FolderOpenIcon size={20} /> <FolderOpenIcon size={20} />
</div> </div>

View File

@ -1,4 +1,4 @@
import { useEffect } from 'react' import { useEffect, useState } from 'react'
import { import {
Modal, Modal,
@ -49,17 +49,19 @@ export default function ThreadList() {
const activeThread = useAtomValue(activeThreadAtom) const activeThread = useAtomValue(activeThreadAtom)
const { deleteThread, cleanThread } = useDeleteThread() const { deleteThread, cleanThread } = useDeleteThread()
const { downloadedModels } = useGetDownloadedModels() const { downloadedModels } = useGetDownloadedModels()
const [isThreadsReady, setIsThreadsReady] = useState(false)
const { activeThreadId, setActiveThread: onThreadClick } = const { activeThreadId, setActiveThread: onThreadClick } =
useSetActiveThread() useSetActiveThread()
useEffect(() => { useEffect(() => {
getThreads() getThreads().then(() => setIsThreadsReady(true))
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []) }, [])
useEffect(() => { useEffect(() => {
if ( if (
isThreadsReady &&
downloadedModels.length !== 0 && downloadedModels.length !== 0 &&
threads.length === 0 && threads.length === 0 &&
assistants.length !== 0 && assistants.length !== 0 &&
@ -68,7 +70,7 @@ export default function ThreadList() {
requestCreateNewThread(assistants[0]) requestCreateNewThread(assistants[0])
} }
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [assistants, threads, downloadedModels, activeThread]) }, [assistants, threads, downloadedModels, activeThread, isThreadsReady])
return ( return (
<div className="px-3 py-4"> <div className="px-3 py-4">
@ -84,7 +86,6 @@ export default function ThreadList() {
threads.map((thread, i) => { threads.map((thread, i) => {
const lastMessage = const lastMessage =
threadStates[thread.id]?.lastMessage ?? 'No new message' threadStates[thread.id]?.lastMessage ?? 'No new message'
return ( return (
<div <div
key={i} key={i}
@ -96,13 +97,10 @@ export default function ThreadList() {
}} }}
> >
<div className="relative z-10 p-4 py-4"> <div className="relative z-10 p-4 py-4">
<div className="flex justify-between"> <p className="line-clamp-1 text-xs leading-5 text-muted-foreground">
<h2 className="line-clamp-1 font-bold">{thread.title}</h2> {thread.updated && displayDate(thread.updated)}
<p className="mb-1 line-clamp-1 text-xs leading-5 text-muted-foreground"> </p>
{thread.updated && <h2 className="line-clamp-1 font-bold">{thread.title}</h2>
displayDate(new Date(thread.updated).getTime())}
</p>
</div>
<p className="mt-1 line-clamp-1 text-xs text-gray-700 group-hover/message:max-w-[160px] dark:text-gray-300"> <p className="mt-1 line-clamp-1 text-xs text-gray-700 group-hover/message:max-w-[160px] dark:text-gray-300">
{lastMessage || 'No new message'} {lastMessage || 'No new message'}
</p> </p>
@ -161,9 +159,9 @@ export default function ThreadList() {
<div className="flex cursor-pointer items-center space-x-2 px-4 py-2 hover:bg-secondary"> <div className="flex cursor-pointer items-center space-x-2 px-4 py-2 hover:bg-secondary">
<Trash2Icon <Trash2Icon
size={16} size={16}
className="text-muted-foreground" className="text-red-600 dark:text-red-300"
/> />
<span className="text-bold text-black dark:text-muted-foreground"> <span className="text-bold text-red-600 dark:text-red-300">
Delete thread Delete thread
</span> </span>
</div> </div>

View File

@ -1,53 +1,87 @@
/* eslint-disable @typescript-eslint/naming-convention */ /* eslint-disable @typescript-eslint/naming-convention */
import React, { useEffect, useState } from 'react' import React, { useContext, useEffect, useState } from 'react'
import { useDropzone } from 'react-dropzone' import { useDropzone } from 'react-dropzone'
import { useAtomValue, useSetAtom } from 'jotai' import { useAtomValue, useSetAtom } from 'jotai'
import { UploadCloudIcon, XIcon } from 'lucide-react' import { UploadCloudIcon } from 'lucide-react'
import { twMerge } from 'tailwind-merge' import { twMerge } from 'tailwind-merge'
import GenerateResponse from '@/containers/Loader/GenerateResponse'
import ModelReload from '@/containers/Loader/ModelReload' import ModelReload from '@/containers/Loader/ModelReload'
import ModelStart from '@/containers/Loader/ModelStart' import ModelStart from '@/containers/Loader/ModelStart'
import { currentPromptAtom, fileUploadAtom } from '@/containers/Providers/Jotai' import { currentPromptAtom, fileUploadAtom } from '@/containers/Providers/Jotai'
import { showLeftSideBarAtom } from '@/containers/Providers/KeyListener' import { showLeftSideBarAtom } from '@/containers/Providers/KeyListener'
import useSendChatMessage from '@/hooks/useSendChatMessage' import { snackbar } from '@/containers/Toast'
import { FeatureToggleContext } from '@/context/FeatureToggle'
import { activeModelAtom, loadModelErrorAtom } from '@/hooks/useActiveModel'
import { queuedMessageAtom, reloadModelAtom } from '@/hooks/useSendChatMessage'
import ChatBody from '@/screens/Chat/ChatBody' import ChatBody from '@/screens/Chat/ChatBody'
import ThreadList from '@/screens/Chat/ThreadList' import ThreadList from '@/screens/Chat/ThreadList'
import ChatInput from './ChatInput' import ChatInput from './ChatInput'
import LoadModelErrorMessage from './LoadModelErrorMessage'
import RequestDownloadModel from './RequestDownloadModel' import RequestDownloadModel from './RequestDownloadModel'
import Sidebar from './Sidebar' import Sidebar from './Sidebar'
import { import {
activeThreadAtom, activeThreadAtom,
engineParamsUpdateAtom, engineParamsUpdateAtom,
isGeneratingResponseAtom,
} from '@/helpers/atoms/Thread.atom' } from '@/helpers/atoms/Thread.atom'
const renderError = (code: string) => {
switch (code) {
case 'multiple-upload':
return 'Currently, we only support 1 attachment at the same time'
case 'retrieval-off':
return 'Turn on Retrieval in Assistant Settings to use this feature'
case 'file-invalid-type':
return 'We do not support this file type'
default:
return 'Oops, something error, please try again.'
}
}
const ChatScreen: React.FC = () => { const ChatScreen: React.FC = () => {
const setCurrentPrompt = useSetAtom(currentPromptAtom) const setCurrentPrompt = useSetAtom(currentPromptAtom)
const activeThread = useAtomValue(activeThreadAtom) const activeThread = useAtomValue(activeThreadAtom)
const showLeftSideBar = useAtomValue(showLeftSideBarAtom) const showLeftSideBar = useAtomValue(showLeftSideBarAtom)
const engineParamsUpdate = useAtomValue(engineParamsUpdateAtom) const engineParamsUpdate = useAtomValue(engineParamsUpdateAtom)
const { queuedMessage, reloadModel } = useSendChatMessage()
const [dragOver, setDragOver] = useState(false) const [dragOver, setDragOver] = useState(false)
const queuedMessage = useAtomValue(queuedMessageAtom)
const reloadModel = useAtomValue(reloadModelAtom)
const [dragRejected, setDragRejected] = useState({ code: '' }) const [dragRejected, setDragRejected] = useState({ code: '' })
const setFileUpload = useSetAtom(fileUploadAtom) const setFileUpload = useSetAtom(fileUploadAtom)
const { experimentalFeature } = useContext(FeatureToggleContext)
const activeModel = useAtomValue(activeModelAtom)
const isGeneratingResponse = useAtomValue(isGeneratingResponseAtom)
const loadModelError = useAtomValue(loadModelErrorAtom)
const { getRootProps, isDragReject } = useDropzone({ const { getRootProps, isDragReject } = useDropzone({
noClick: true, noClick: true,
multiple: false, multiple: false,
accept: { accept: {
// 'image/*': ['.png', '.jpg', '.jpeg'],
'application/pdf': ['.pdf'], 'application/pdf': ['.pdf'],
}, },
onDragOver: (e) => { onDragOver: (e) => {
// Retrieval file drag and drop is experimental feature
if (!experimentalFeature) return
if ( if (
e.dataTransfer.items.length === 1 && e.dataTransfer.items.length === 1 &&
activeThread?.assistants[0].tools && activeThread?.assistants[0].tools &&
@ -65,6 +99,8 @@ const ChatScreen: React.FC = () => {
}, },
onDragLeave: () => setDragOver(false), onDragLeave: () => setDragOver(false),
onDrop: (files, rejectFiles) => { onDrop: (files, rejectFiles) => {
// Retrieval file drag and drop is experimental feature
if (!experimentalFeature) return
if ( if (
!files || !files ||
files.length !== 1 || files.length !== 1 ||
@ -95,8 +131,13 @@ const ChatScreen: React.FC = () => {
}, },
}) })
// TODO @faisal change this until we have sneakbar component
useEffect(() => { useEffect(() => {
if (dragRejected.code) {
snackbar({
description: renderError(dragRejected.code),
type: 'error',
})
}
setTimeout(() => { setTimeout(() => {
if (dragRejected.code) { if (dragRejected.code) {
setDragRejected({ code: '' }) setDragRejected({ code: '' })
@ -104,22 +145,6 @@ const ChatScreen: React.FC = () => {
}, 2000) }, 2000)
}, [dragRejected.code]) }, [dragRejected.code])
const renderError = (code: string) => {
switch (code) {
case 'multiple-upload':
return 'Currently, we only support 1 attachment at the same time'
case 'retrieval-off':
return 'Turn on Retrieval in Assistant Settings to use this feature'
case 'file-invalid-type':
return 'We do not support this file type'
default:
return 'Oops, something error, please try again.'
}
}
return ( return (
<div className="flex h-full w-full"> <div className="flex h-full w-full">
{/* Left side bar */} {/* Left side bar */}
@ -133,33 +158,6 @@ const ChatScreen: React.FC = () => {
className="relative flex h-full w-full flex-col overflow-auto bg-background outline-none" className="relative flex h-full w-full flex-col overflow-auto bg-background outline-none"
{...getRootProps()} {...getRootProps()}
> >
{dragRejected.code !== '' && (
<div className="absolute bottom-3 left-1/2 z-50 inline-flex w-full -translate-x-1/2 justify-center px-16">
<div className="flex items-start justify-between gap-x-4 rounded-lg bg-foreground px-4 py-2 text-white dark:border dark:border-border dark:bg-zinc-900">
<svg
width="20"
height="20"
viewBox="0 0 20 20"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
fillRule="evenodd"
clipRule="evenodd"
d="M20 10C20 15.5228 15.5228 20 10 20H0.993697C0.110179 20 -0.332289 18.9229 0.292453 18.2929L2.2495 16.3195C0.843343 14.597 1.21409e-08 12.397 1.21409e-08 10C1.21409e-08 4.47715 4.47715 0 10 0C15.5228 0 20 4.47715 20 10ZM13.2071 6.79289C13.5976 7.18342 13.5976 7.81658 13.2071 8.20711L11.4142 10L13.2071 11.7929C13.5976 12.1834 13.5976 12.8166 13.2071 13.2071C12.8166 13.5976 12.1834 13.5976 11.7929 13.2071L10 11.4142L8.20711 13.2071C7.81658 13.5976 7.18342 13.5976 6.79289 13.2071C6.40237 12.8166 6.40237 12.1834 6.79289 11.7929L8.58579 10L6.79289 8.20711C6.40237 7.81658 6.40237 7.18342 6.79289 6.79289C7.18342 6.40237 7.81658 6.40237 8.20711 6.79289L10 8.58579L11.7929 6.79289C12.1834 6.40237 12.8166 6.40237 13.2071 6.79289Z"
fill="#F87171"
/>
</svg>
<p>{renderError(dragRejected.code)}</p>
<XIcon
size={24}
className="cursor-pointer"
onClick={() => setDragRejected({ code: '' })}
/>
</div>
</div>
)}
{dragOver && ( {dragOver && (
<div className="absolute z-50 mx-auto h-full w-full bg-background/50 p-8 backdrop-blur-lg"> <div className="absolute z-50 mx-auto h-full w-full bg-background/50 p-8 backdrop-blur-lg">
<div <div
@ -213,9 +211,13 @@ const ChatScreen: React.FC = () => {
</span> </span>
</div> </div>
)} )}
{activeModel && isGeneratingResponse && <GenerateResponse />}
{loadModelError && <LoadModelErrorMessage />}
<ChatInput /> <ChatInput />
</div> </div>
</div> </div>
{/* Right side bar */} {/* Right side bar */}
{activeThread && <Sidebar />} {activeThread && <Sidebar />}
</div> </div>

View File

@ -52,9 +52,12 @@ const ExploreModelsScreen = () => {
if (loading) return <Loader description="loading ..." /> if (loading) return <Loader description="loading ..." />
return ( return (
<div className="flex h-full w-full overflow-y-auto bg-background"> <div
className="flex h-full w-full overflow-y-auto bg-background"
data-testid="hub-container-test-id"
>
<div className="h-full w-full p-4"> <div className="h-full w-full p-4">
<div className="h-full" data-test-id="testid-explore-models"> <div className="h-full">
<ScrollArea> <ScrollArea>
<div className="relative"> <div className="relative">
<img <img

View File

@ -3,19 +3,26 @@ import { useEffect, useState } from 'react'
import React from 'react' import React from 'react'
import { useAtomValue } from 'jotai'
import { useServerLog } from '@/hooks/useServerLog' import { useServerLog } from '@/hooks/useServerLog'
import { serverEnabledAtom } from '@/helpers/atoms/LocalServer.atom'
const Logs = () => { const Logs = () => {
const { getServerLog } = useServerLog() const { getServerLog } = useServerLog()
const serverEnabled = useAtomValue(serverEnabledAtom)
const [logs, setLogs] = useState([]) const [logs, setLogs] = useState([])
useEffect(() => { useEffect(() => {
getServerLog().then((log) => { getServerLog().then((log) => {
if (typeof log?.split === 'function') setLogs(log.split(/\r?\n|\r|\n/g)) if (typeof log?.split === 'function') {
setLogs(log.split(/\r?\n|\r|\n/g))
}
}) })
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [logs]) }, [logs, serverEnabled])
return ( return (
<div className="overflow-hidden"> <div className="overflow-hidden">

View File

@ -1,7 +1,6 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
'use client' 'use client'
import React, { useEffect, useState } from 'react' import React, { useCallback, useEffect, useState } from 'react'
import ScrollToBottom from 'react-scroll-to-bottom' import ScrollToBottom from 'react-scroll-to-bottom'
@ -29,6 +28,7 @@ import { ExternalLinkIcon, InfoIcon } from 'lucide-react'
import { twMerge } from 'tailwind-merge' import { twMerge } from 'tailwind-merge'
import CardSidebar from '@/containers/CardSidebar' import CardSidebar from '@/containers/CardSidebar'
import DropdownListSidebar, { import DropdownListSidebar, {
selectedModelAtom, selectedModelAtom,
} from '@/containers/DropdownListSidebar' } from '@/containers/DropdownListSidebar'
@ -58,7 +58,7 @@ const portAtom = atom('1337')
const LocalServerScreen = () => { const LocalServerScreen = () => {
const [errorRangePort, setErrorRangePort] = useState(false) const [errorRangePort, setErrorRangePort] = useState(false)
const [serverEnabled, setServerEnabled] = useAtom(serverEnabledAtom) const [serverEnabled, setServerEnabled] = useAtom(serverEnabledAtom)
const showing = useAtomValue(showRightSideBarAtom) const showRightSideBar = useAtomValue(showRightSideBarAtom)
const activeModelParams = useAtomValue(getActiveThreadModelParamsAtom) const activeModelParams = useAtomValue(getActiveThreadModelParamsAtom)
const modelEngineParams = toSettingParams(activeModelParams) const modelEngineParams = toSettingParams(activeModelParams)
@ -66,43 +66,44 @@ const LocalServerScreen = () => {
const { openServerLog, clearServerLog } = useServerLog() const { openServerLog, clearServerLog } = useServerLog()
const { startModel, stateModel } = useActiveModel() const { startModel, stateModel } = useActiveModel()
const [selectedModel] = useAtom(selectedModelAtom) const selectedModel = useAtomValue(selectedModelAtom)
const [isCorsEnabled, setIsCorsEnabled] = useAtom(corsEnabledAtom) const [isCorsEnabled, setIsCorsEnabled] = useAtom(corsEnabledAtom)
const [isVerboseEnabled, setIsVerboseEnabled] = useAtom(verboseEnabledAtom) const [isVerboseEnabled, setIsVerboseEnabled] = useAtom(verboseEnabledAtom)
const [host, setHost] = useAtom(hostAtom) const [host, setHost] = useAtom(hostAtom)
const [port, setPort] = useAtom(portAtom) const [port, setPort] = useAtom(portAtom)
const hostOptions = ['127.0.0.1', '0.0.0.0']
const FIRST_TIME_VISIT_API_SERVER = 'firstTimeVisitAPIServer' const FIRST_TIME_VISIT_API_SERVER = 'firstTimeVisitAPIServer'
const [firstTimeVisitAPIServer, setFirstTimeVisitAPIServer] = const [firstTimeVisitAPIServer, setFirstTimeVisitAPIServer] =
useState<boolean>(false) useState<boolean>(false)
const handleChangePort = (value: any) => { const handleChangePort = useCallback(
if (Number(value) <= 0 || Number(value) >= 65536) { (value: string) => {
setErrorRangePort(true) if (Number(value) <= 0 || Number(value) >= 65536) {
} else { setErrorRangePort(true)
setErrorRangePort(false) } else {
} setErrorRangePort(false)
setPort(value) }
} setPort(value)
},
[setPort]
)
useEffect(() => { useEffect(() => {
if ( if (localStorage.getItem(FIRST_TIME_VISIT_API_SERVER) == null) {
localStorage.getItem(FIRST_TIME_VISIT_API_SERVER) === null ||
localStorage.getItem(FIRST_TIME_VISIT_API_SERVER) === 'true'
) {
localStorage.setItem(FIRST_TIME_VISIT_API_SERVER, 'true')
setFirstTimeVisitAPIServer(true) setFirstTimeVisitAPIServer(true)
} }
}, [firstTimeVisitAPIServer]) }, [firstTimeVisitAPIServer])
useEffect(() => { useEffect(() => {
handleChangePort(port) handleChangePort(port)
}, []) }, [handleChangePort, port])
return ( return (
<div className="flex h-full w-full"> <div className="flex h-full w-full" data-testid="local-server-testid">
{/* Left SideBar */} {/* Left SideBar */}
<div className="flex h-full w-60 flex-shrink-0 flex-col overflow-y-auto border-r border-border"> <div className="flex h-full w-60 flex-shrink-0 flex-col overflow-y-auto border-r border-border">
<div className="p-4"> <div className="p-4">
@ -116,7 +117,7 @@ const LocalServerScreen = () => {
<Button <Button
block block
themes={serverEnabled ? 'danger' : 'primary'} themes={serverEnabled ? 'danger' : 'primary'}
disabled={stateModel.loading || errorRangePort} disabled={stateModel.loading || errorRangePort || !selectedModel}
onClick={() => { onClick={() => {
if (serverEnabled) { if (serverEnabled) {
window.core?.api?.stopServer() window.core?.api?.stopServer()
@ -166,8 +167,19 @@ const LocalServerScreen = () => {
<SelectValue /> <SelectValue />
</SelectTrigger> </SelectTrigger>
<SelectContent> <SelectContent>
<SelectItem value="127.0.0.1">127.0.0.1</SelectItem> {hostOptions.map((option, i) => {
<SelectItem value="0.0.0.0">0.0.0.0</SelectItem> return (
<SelectItem
key={i}
value={option}
className={twMerge(
host === option && 'bg-secondary'
)}
>
{option}
</SelectItem>
)
})}
</SelectContent> </SelectContent>
</Select> </Select>
@ -176,6 +188,7 @@ const LocalServerScreen = () => {
'w-[70px] flex-shrink-0', 'w-[70px] flex-shrink-0',
errorRangePort && 'border-danger' errorRangePort && 'border-danger'
)} )}
type="number"
value={port} value={port}
onChange={(e) => { onChange={(e) => {
handleChangePort(e.target.value) handleChangePort(e.target.value)
@ -275,7 +288,7 @@ const LocalServerScreen = () => {
{/* Middle Bar */} {/* Middle Bar */}
<ScrollToBottom className="relative flex h-full w-full flex-col overflow-auto bg-background"> <ScrollToBottom className="relative flex h-full w-full flex-col overflow-auto bg-background">
<div className="sticky top-0 flex items-center justify-between bg-zinc-100 px-4 py-2 dark:bg-secondary/30"> <div className="sticky top-0 flex items-center justify-between bg-zinc-100 px-4 py-2 dark:bg-zinc-600">
<h2 className="font-bold">Server Logs</h2> <h2 className="font-bold">Server Logs</h2>
<div className="space-x-2"> <div className="space-x-2">
<Button <Button
@ -345,15 +358,13 @@ const LocalServerScreen = () => {
<div <div
className={twMerge( className={twMerge(
'h-full flex-shrink-0 overflow-x-hidden border-l border-border bg-background transition-all duration-100 dark:bg-background/20', 'h-full flex-shrink-0 overflow-x-hidden border-l border-border bg-background transition-all duration-100 dark:bg-background/20',
showing showRightSideBar
? 'w-80 translate-x-0 opacity-100' ? 'w-80 translate-x-0 opacity-100'
: 'w-0 translate-x-full opacity-0' : 'w-0 translate-x-full opacity-0'
)} )}
> >
<div className="px-4"> <div className="px-4 pt-4">
<div className="mt-4"> <DropdownListSidebar strictedThread={false} />
<DropdownListSidebar />
</div>
{componentDataEngineSetting.filter( {componentDataEngineSetting.filter(
(x) => x.name === 'prompt_template' (x) => x.name === 'prompt_template'

Some files were not shown because too many files have changed in this diff Show More