fix: update new api from cortex to support 0.5.0 (#3221)

* fix: update new api from cortex to support 0.5.0

Signed-off-by: James <namnh0122@gmail.com>

* fix stop button for streaming

Signed-off-by: James <namnh0122@gmail.com>

* fix stop inference for nonstreaming

Signed-off-by: James <namnh0122@gmail.com>

* chore: remove umami prevent tracking call to vercel

Signed-off-by: James <namnh0122@gmail.com>

* add warning modal when running more than 2 model concurrently

Signed-off-by: James <namnh0122@gmail.com>

* fix: skip summarize if abort

Signed-off-by: James <namnh0122@gmail.com>

* 0.5.0-3

* add inference error popup

Signed-off-by: James <namnh0122@gmail.com>

* add back import local model

Signed-off-by: James <namnh0122@gmail.com>

* fix: max token issue (#3225)

Signed-off-by: James <namnh0122@gmail.com>

* format status

Signed-off-by: James <namnh0122@gmail.com>

* fix migration missing instructions

Signed-off-by: James <namnh0122@gmail.com>

* fix: wait for cortex process overlay should be on top (#3224)

* fix: wait for cortex process overlay should be on top

* chore: update cortex.js

* Cortex 0.5.0-5

* add import model to my model screen

Signed-off-by: James <namnh0122@gmail.com>

* fix: should migrate symlink models (#3226)

* fix import on windows (#3229)

Signed-off-by: James <namnh0122@gmail.com>

* fix yarn lint

Signed-off-by: James <namnh0122@gmail.com>

* fix: clean up port before start jan (#3232)

Signed-off-by: James <namnh0122@gmail.com>

---------

Signed-off-by: James <namnh0122@gmail.com>
Co-authored-by: Van Pham <64197333+Van-QA@users.noreply.github.com>
Co-authored-by: Louis <louis@jan.ai>
This commit is contained in:
NamH 2024-08-02 09:37:04 +07:00 committed by GitHub
parent e8ee694abd
commit ec9b5bf682
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
54 changed files with 1120 additions and 838 deletions

View File

@ -69,6 +69,11 @@ export interface DownloadState2 {
*/
type: DownloadType2
/**
* Percentage of the download.
*/
progress: number
/**
* The status of the download.
*/

View File

@ -34,5 +34,5 @@ module.exports = {
{ name: 'Link', linkAttribute: 'to' },
],
},
ignorePatterns: ['build', 'renderer', 'node_modules', '@global'],
ignorePatterns: ['build', 'renderer', 'node_modules', '@global', 'playwright-report'],
}

View File

@ -1,6 +1,24 @@
@echo off
set /p CORTEX_VERSION=<./resources/version.txt
set DOWNLOAD_URL=https://github.com/janhq/cortex/releases/download/v%CORTEX_VERSION%/cortex-%CORTEX_VERSION%-amd64-windows.tar.gz
echo Downloading from %DOWNLOAD_URL%
setlocal
.\node_modules\.bin\download %DOWNLOAD_URL% -e -o ./resources/win
:: Read the version from the version.txt file
set /p CORTEX_VERSION=<./resources/version.txt
:: Set the download URL
set DOWNLOAD_URL=https://github.com/janhq/cortex/releases/download/v%CORTEX_VERSION%/cortex-%CORTEX_VERSION%-amd64-windows.tar.gz
:: Set the output directory and file name
set OUTPUT_DIR=./resources/win
set OUTPUT_FILE=%OUTPUT_DIR%/cortex.exe
echo %OUTPUT_FILE%
:: Check if the file already exists
if exist %OUTPUT_FILE% (
echo File %OUTPUT_FILE% already exists. Skipping download.
) else (
echo Downloading from %DOWNLOAD_URL%
.\node_modules\.bin\download %DOWNLOAD_URL% -e -o %OUTPUT_DIR%
)
endlocal

View File

@ -14,7 +14,7 @@ import {
writeFileSync,
readFileSync,
existsSync,
mkdirSync,
mkdirSync
} from 'fs'
import { dump } from 'js-yaml'
import os from 'os'
@ -229,25 +229,20 @@ export function handleAppIPCs() {
const cortexHomeDir = join(os.homedir(), 'cortex')
const cortexModelFolderPath = join(cortexHomeDir, 'models')
if(!existsSync(cortexModelFolderPath))
mkdirSync(cortexModelFolderPath)
console.log('cortexModelFolderPath', cortexModelFolderPath)
const reflect = require('@alumna/reflect')
for (const modelName of allModelFolders) {
const modelFolderPath = join(janModelFolderPath, modelName)
try {
const filesInModelFolder = readdirSync(modelFolderPath)
if (filesInModelFolder.length <= 1) {
// if only have model.json file or empty folder, we skip it
continue
}
const destinationPath = join(cortexModelFolderPath, modelName)
// create folder if not exist
if (!existsSync(destinationPath)) {
mkdirSync(destinationPath, { recursive: true })
}
try {
const modelJsonFullPath = join(
janModelFolderPath,
modelName,
@ -256,12 +251,25 @@ export function handleAppIPCs() {
const model = JSON.parse(readFileSync(modelJsonFullPath, 'utf-8'))
const fileNames: string[] = model.sources.map((x: any) => x.filename)
let files: string[] = []
if(filesInModelFolder.length > 1) {
// prepend fileNames with cortexModelFolderPath
const files = fileNames.map((x: string) =>
files = fileNames.map((x: string) =>
join(cortexModelFolderPath, model.id, x)
)
} else if(model.sources.length && !/^(http|https):\/\/[^/]+\/.*/.test(model.sources[0].url)) {
// Symlink case
files = [ model.sources[0].url ]
} else continue;
const engine = 'cortex.llamacpp'
// create folder if not exist
// only for local model files
if (!existsSync(destinationPath) && filesInModelFolder.length > 1) {
mkdirSync(destinationPath, { recursive: true })
}
const engine = (model.engine === 'nitro' || model.engine === 'cortex') ? 'cortex.llamacpp' : (model.engine ?? 'cortex.llamacpp')
const updatedModelFormat = {
id: model.id,
@ -288,7 +296,7 @@ export function handleAppIPCs() {
max_tokens: model.parameters?.max_tokens ?? 2048,
stream: model.parameters?.stream ?? true,
}
if(filesInModelFolder.length > 1 ) {
const { err } = await reflect({
src: modelFolderPath,
dest: destinationPath,
@ -298,14 +306,17 @@ export function handleAppIPCs() {
overwrite: true,
errorOnExist: false,
})
if (err) console.error(err)
else {
if (err) {
console.error(err);
continue;
}
}
// create the model.yml file
const modelYamlData = dump(updatedModelFormat)
const modelYamlPath = join(cortexModelFolderPath, `${modelName}.yaml`)
writeFileSync(modelYamlPath, modelYamlData)
}
} catch (err) {
console.error(err)
}
@ -316,6 +327,13 @@ export function handleAppIPCs() {
NativeRoute.getAllMessagesAndThreads,
async (_event): Promise<any> => {
const janThreadFolderPath = join(getJanDataFolderPath(), 'threads')
// check if exist
if (!existsSync(janThreadFolderPath)) {
return {
threads: [],
messages: [],
}
}
// get children of thread folder
const allThreadFolders = readdirSync(janThreadFolderPath)
const threads: any[] = []
@ -335,6 +353,8 @@ export function handleAppIPCs() {
threadFolder,
'messages.jsonl'
)
if(!existsSync(messageFullPath)) continue;
const lines = readFileSync(messageFullPath, 'utf-8')
.toString()
.split('\n')
@ -357,6 +377,10 @@ export function handleAppIPCs() {
NativeRoute.getAllLocalModels,
async (_event): Promise<boolean> => {
const janModelsFolderPath = join(getJanDataFolderPath(), 'models')
if (!existsSync(janModelsFolderPath)) {
return false
}
// get children of thread folder
const allModelsFolders = readdirSync(janModelsFolderPath)
let hasLocalModels = false

View File

@ -1,7 +1,7 @@
import { app, BrowserWindow } from 'electron'
import { join, resolve } from 'path'
import { exec } from 'child_process'
import { exec, execSync, ChildProcess } from 'child_process'
import { cortexPath } from './cortex-runner'
/**
@ -56,13 +56,18 @@ log.info('Log from the main process')
// replace all console.log to log
Object.assign(console, log.functions)
let cortexService: ChildProcess | undefined = undefined
app
.whenReady()
.then(() => killProcessesOnPort(3929))
.then(() => killProcessesOnPort(1337))
.then(() => {
log.info('Starting cortex with path:', cortexPath)
const command = `${cortexPath} -a 127.0.0.1 -p 1337`
log.info('Starting cortex with command:', command)
// init cortex
// running shell command cortex init -s
exec(`${cortexPath}`, (error, stdout, stderr) => {
cortexService = exec(`${command}`, (error, stdout, stderr) => {
if (error) {
log.error(`error: ${error.message}`)
return
@ -123,25 +128,37 @@ app.on('open-url', (_event, url) => {
})
app.once('quit', async () => {
await stopApiServer()
cleanUpAndQuit()
})
app.once('window-all-closed', async () => {
await stopApiServer()
await stopCortexService()
cleanUpAndQuit()
})
async function stopApiServer() {
async function stopCortexService() {
try {
console.log('Stopping API server')
const response = await fetch('http://localhost:1337/v1/process', {
const pid = cortexService?.pid
if (!pid) {
console.log('No cortex service to stop.')
return
}
process.kill(pid)
console.log(`Service with PID ${pid} has been terminated.`)
} catch (error) {
console.error('Error killing service:', error)
}
}
async function stopApiServer() {
// this function is not meant to be success. It will throw an error.
try {
await fetch('http://localhost:1337/v1/system', {
method: 'DELETE',
})
console.log('Response status:', response.status)
} catch (error) {
console.error('Error stopping API server:', error)
// do nothing
}
}
@ -154,6 +171,88 @@ function handleIPCs() {
handleAppIPCs()
}
function killProcessesOnPort(port: number): void {
try {
console.log(`Killing processes on port ${port}...`)
if (process.platform === 'win32') {
killProcessesOnWindowsPort(port)
} else {
killProcessesOnUnixPort(port)
}
} catch (error) {
console.error(
`Failed to kill process(es) on port ${port}: ${(error as Error).message}`
)
}
}
function killProcessesOnWindowsPort(port: number): void {
let result: string
try {
result = execSync(`netstat -ano | findstr :${port}`).toString()
} catch (error) {
console.log(`No processes found on port ${port}.`)
return
}
const lines = result.split('\n').filter(Boolean)
if (lines.length === 0) {
console.log(`No processes found on port ${port}.`)
return
}
const pids = lines
.map((line) => {
const parts = line.trim().split(/\s+/)
return parts[parts.length - 1]
})
.filter((pid): pid is string => Boolean(pid) && !isNaN(Number(pid)))
if (pids.length === 0) {
console.log(`No valid PIDs found for port ${port}.`)
return
}
const uniquePids = Array.from(new Set(pids))
console.log('uniquePids', uniquePids)
uniquePids.forEach((pid) => {
try {
execSync(`taskkill /PID ${pid} /F`)
console.log(
`Process with PID ${pid} on port ${port} has been terminated.`
)
} catch (error) {
console.error(
`Failed to kill process with PID ${pid}: ${(error as Error).message}`
)
}
})
}
function killProcessesOnUnixPort(port: number): void {
let pids: string[]
try {
pids = execSync(`lsof -ti tcp:${port}`)
.toString()
.trim()
.split('\n')
.filter(Boolean)
} catch (error) {
if ((error as { status?: number }).status === 1) {
console.log(`No processes found on port ${port}.`)
return
}
throw error // Re-throw if it's not the "no processes found" error
}
pids.forEach((pid) => {
process.kill(parseInt(pid), 'SIGTERM')
console.log(`Process with PID ${pid} on port ${port} has been terminated.`)
})
}
/**
* Suppress Node error messages
*/

View File

@ -32,6 +32,7 @@ class WindowManager {
x: bounds.x,
y: bounds.y,
webPreferences: {
allowRunningInsecureContent: true,
nodeIntegration: true,
preload: preloadPath,
webSecurity: false,

View File

@ -1 +1 @@
0.5.0-1
0.5.0-5

View File

@ -1,9 +1,11 @@
import { useCallback } from 'react'
import { Model } from '@janhq/core'
import { Button, Badge } from '@janhq/joi'
import { useAtomValue } from 'jotai'
import useModels from '@/hooks/useModels'
import useModelStop from '@/hooks/useModelStop'
import {
activeModelsAtom,
@ -13,7 +15,7 @@ import {
const Column = ['Name', 'Engine', '']
const TableActiveModel: React.FC = () => {
const { stopModel } = useModels()
const stopModelMutation = useModelStop()
const activeModels = useAtomValue(activeModelsAtom)
const downloadedModels = useAtomValue(downloadedModelsAtom)
@ -25,6 +27,13 @@ const TableActiveModel: React.FC = () => {
}
})
const onStopModelClick = useCallback(
(modelId: string) => {
stopModelMutation.mutate(modelId)
},
[stopModelMutation]
)
return (
<div className="m-4 mr-0 w-1/2">
<div className="overflow-hidden rounded-lg border border-[hsla(var(--app-border))]">
@ -58,7 +67,7 @@ const TableActiveModel: React.FC = () => {
<td className="px-4 py-2 text-center">
<Button
theme="destructive"
onClick={() => stopModel(model.model)}
onClick={() => onStopModelClick(model.model)}
>
Stop
</Button>

View File

@ -57,7 +57,7 @@ const SystemMonitor: React.FC = () => {
const register = useCallback(async () => {
if (abortControllerRef.current) return
abortControllerRef.current = new AbortController()
await fetchEventSource(`${host}/events/resources`, {
await fetchEventSource(`${host}/system/events/resources`, {
onmessage(ev) {
if (!ev.data || ev.data === '') return
try {

View File

@ -16,8 +16,10 @@ import TopPanel from '@/containers/Layout/TopPanel'
import { getImportModelStageAtom } from '@/hooks/useImportModel'
import DownloadLocalModelModal from '@/screens/HubScreen2/components/DownloadLocalModelModal'
import InferenceErrorModal from '@/screens/HubScreen2/components/InferenceErrorModal'
import SetUpApiKeyModal from '@/screens/HubScreen2/components/SetUpApiKeyModal'
import SetUpRemoteModelModal from '@/screens/HubScreen2/components/SetUpRemoteModelModal'
import WarningMultipleModelModal from '@/screens/HubScreen2/components/WarningMultipleModelModal'
import { SUCCESS_SET_NEW_DESTINATION } from '@/screens/Settings/Advanced/DataFolder'
import CancelModelImportModal from '@/screens/Settings/CancelModelImportModal'
import ChooseWhatToImportModal from '@/screens/Settings/ChooseWhatToImportModal'
@ -82,6 +84,8 @@ const BaseLayout = () => {
{importModelStage === 'EDIT_MODEL_INFO' && <EditModelInfoModal />}
{importModelStage === 'CONFIRM_CANCEL' && <CancelModelImportModal />}
<InferenceErrorModal />
<WarningMultipleModelModal />
<DownloadLocalModelModal />
<SetUpRemoteModelModal />
<SetUpApiKeyModal />

View File

@ -6,8 +6,14 @@ import { useAtomValue, useSetAtom } from 'jotai'
import { downloadStateListAtom } from '@/hooks/useDownloadState'
import useModels from '@/hooks/useModels'
import { waitingForCortexAtom } from '@/helpers/atoms/App.atom'
import { hostAtom } from '@/helpers/atoms/AppConfig.atom'
import {
setImportingModelSuccessAtom,
updateImportingModelProgressAtom,
} from '@/helpers/atoms/Model.atom'
const DownloadEventListener: React.FC = () => {
const host = useAtomValue(hostAtom)
@ -15,15 +21,52 @@ const DownloadEventListener: React.FC = () => {
const abortController = useRef(new AbortController())
const setDownloadStateList = useSetAtom(downloadStateListAtom)
const setWaitingForCortex = useSetAtom(waitingForCortexAtom)
const { getModels } = useModels()
const updateImportingModelProgress = useSetAtom(
updateImportingModelProgressAtom
)
const setImportingModelSuccess = useSetAtom(setImportingModelSuccessAtom)
const handleLocalImportModels = useCallback(
(events: DownloadState2[]) => {
if (events.length === 0) return
for (const event of events) {
if (event.progress === 100) {
setImportingModelSuccess(event.id)
} else {
updateImportingModelProgress(event.id, event.progress)
}
}
getModels()
},
[setImportingModelSuccess, updateImportingModelProgress, getModels]
)
const subscribeDownloadEvent = useCallback(async () => {
if (isRegistered.current) return
await fetchEventSource(`${host}/events/download`, {
await fetchEventSource(`${host}/system/events/download`, {
onmessage(ev) {
if (!ev.data || ev.data === '') return
try {
const downloadEvent = JSON.parse(ev.data) as DownloadState2[]
setDownloadStateList(downloadEvent)
const downloadEvents = JSON.parse(ev.data) as DownloadState2[]
const remoteDownloadEvents: DownloadState2[] = []
const localImportEvents: DownloadState2[] = []
// filter out the import local events
for (const event of downloadEvents) {
console.debug('Receiving event', event)
if (
isAbsolutePath(event.id) &&
event.type === 'model' &&
event.children.length === 0
) {
localImportEvents.push(event)
} else {
remoteDownloadEvents.push(event)
}
}
handleLocalImportModels(localImportEvents)
setDownloadStateList(remoteDownloadEvents)
} catch (err) {
console.error(err)
}
@ -40,7 +83,7 @@ const DownloadEventListener: React.FC = () => {
})
console.log('Download event subscribed')
isRegistered.current = true
}, [host, setDownloadStateList, setWaitingForCortex])
}, [host, setDownloadStateList, setWaitingForCortex, handleLocalImportModels])
const unsubscribeDownloadEvent = useCallback(() => {
if (!isRegistered.current) return
@ -60,4 +103,22 @@ const DownloadEventListener: React.FC = () => {
return null
}
const isAbsolutePath = (path: string): boolean => {
// Trim any leading or trailing whitespace
const trimmedPath = path.trim()
// Check for Unix-like absolute path
if (trimmedPath.startsWith('/')) {
return true
}
// Check for Windows absolute path (with drive letter)
if (/^[A-Za-z]:[/\\]/.test(trimmedPath)) {
return true
}
// All other paths are not considered absolute local paths
return false
}
export default DownloadEventListener

View File

@ -9,7 +9,6 @@ import DownloadEventListener from './DownloadEventListener'
import KeyListener from './KeyListener'
import ModelEventListener from './ModelEventListener'
import ModelImportListener from './ModelImportListener'
const EventListenerWrapper: React.FC = () => (
<Fragment>
@ -19,7 +18,6 @@ const EventListenerWrapper: React.FC = () => (
<ModelEventListener />
<ClipboardListener />
<DeepLinkListener />
<ModelImportListener />
</Fragment>
)

View File

@ -91,7 +91,7 @@ function ModelEventListener() {
if (abortController.current) return
abortController.current = new AbortController()
await fetchEventSource(`${host}/events/model`, {
await fetchEventSource(`${host}/system/events/model`, {
onmessage(ev) {
if (!ev.data || ev.data === '') return
try {

View File

@ -1,103 +0,0 @@
import { useCallback, useEffect } from 'react'
import { ImportingModel, Model } from '@janhq/core'
import { useSetAtom } from 'jotai'
import { snackbar } from '../Toast'
import {
setImportingModelErrorAtom,
setImportingModelSuccessAtom,
updateImportingModelProgressAtom,
} from '@/helpers/atoms/Model.atom'
const ModelImportListener: React.FC = () => {
const updateImportingModelProgress = useSetAtom(
updateImportingModelProgressAtom
)
const setImportingModelSuccess = useSetAtom(setImportingModelSuccessAtom)
const setImportingModelFailed = useSetAtom(setImportingModelErrorAtom)
const onImportModelUpdate = useCallback(
async (state: ImportingModel) => {
if (!state.importId) return
updateImportingModelProgress(state.importId, state.percentage ?? 0)
},
[updateImportingModelProgress]
)
const onImportModelFailed = useCallback(
async (state: ImportingModel) => {
if (!state.importId) return
setImportingModelFailed(state.importId, state.error ?? '')
},
[setImportingModelFailed]
)
const onImportModelSuccess = useCallback(
(state: ImportingModel) => {
if (!state.modelId) return
// events.emit(ModelEvent.OnModelsUpdate, {})
setImportingModelSuccess(state.importId, state.modelId)
},
[setImportingModelSuccess]
)
const onImportModelFinished = useCallback((importedModels: Model[]) => {
const modelText = importedModels.length === 1 ? 'model' : 'models'
snackbar({
description: `Successfully imported ${importedModels.length} ${modelText}`,
type: 'success',
})
}, [])
useEffect(() => {
console.debug('ModelImportListener: registering event listeners..')
// events.on(
// LocalImportModelEvent.onLocalImportModelUpdate,
// onImportModelUpdate
// )
// events.on(
// LocalImportModelEvent.onLocalImportModelSuccess,
// onImportModelSuccess
// )
// events.on(
// LocalImportModelEvent.onLocalImportModelFinished,
// onImportModelFinished
// )
// events.on(
// LocalImportModelEvent.onLocalImportModelFailed,
// onImportModelFailed
// )
// return () => {
// console.debug('ModelImportListener: unregistering event listeners...')
// events.off(
// LocalImportModelEvent.onLocalImportModelUpdate,
// onImportModelUpdate
// )
// events.off(
// LocalImportModelEvent.onLocalImportModelSuccess,
// onImportModelSuccess
// )
// events.off(
// LocalImportModelEvent.onLocalImportModelFinished,
// onImportModelFinished
// )
// events.off(
// LocalImportModelEvent.onLocalImportModelFailed,
// onImportModelFailed
// )
// }
}, [
onImportModelUpdate,
onImportModelSuccess,
onImportModelFinished,
onImportModelFailed,
])
return null
}
export default ModelImportListener

View File

@ -13,8 +13,6 @@ import ThemeWrapper from '@/containers/Providers/Theme'
import { setupCoreServices } from '@/services/coreService'
import Umami from '@/utils/umami'
import DataLoader from './DataLoader'
import ModalMigrations from './ModalMigrations'
@ -35,7 +33,7 @@ const Providers = ({ children }: PropsWithChildren) => {
<ThemeWrapper>
<JotaiWrapper>
<QueryClientProvider client={queryClient}>
<Umami />
{/* <Umami /> */}
{setupCore && (
<Fragment>
<DataLoader />

View File

@ -1,5 +1,9 @@
import { useCallback, useEffect } from 'react'
import { Modal } from '@janhq/joi'
import { useAtomValue } from 'jotai'
import { useAtom, useAtomValue } from 'jotai'
import useCortex from '@/hooks/useCortex'
import Spinner from '../Loader/Spinner'
@ -8,12 +12,22 @@ import { hostAtom } from '@/helpers/atoms/AppConfig.atom'
const WaitingForCortexModal: React.FC = () => {
const host = useAtomValue(hostAtom)
const open = useAtomValue(waitingForCortexAtom)
const [waitingForCortex, setWaitingForCortex] = useAtom(waitingForCortexAtom)
const { isSystemAlive } = useCortex()
const checkSystemAlive = useCallback(async () => {
setWaitingForCortex(!(await isSystemAlive()))
}, [setWaitingForCortex, isSystemAlive])
// Check health for the first time on mount
useEffect(() => {
checkSystemAlive()
}, [checkSystemAlive])
return (
<Modal
hideClose
open={open}
open={waitingForCortex}
title={'Waiting for cortex'}
content={
<div className="flex gap-x-2">

View File

@ -11,7 +11,7 @@ export const mainViewStateAtom = atom<MainViewState>(MainViewState.Thread)
export const defaultJanDataFolderAtom = atom<string>('')
export const waitingForCortexAtom = atom<boolean>(false)
export const waitingForCortexAtom = atom<boolean>(true)
// Store panel atom
export const showLeftPanelAtom = atom<boolean>(true)

View File

@ -61,14 +61,14 @@ export const setImportingModelErrorAtom = atom(
export const setImportingModelSuccessAtom = atom(
null,
(get, set, importId: string, modelId: string) => {
(get, set, importId: string) => {
const model = get(importingModelsAtom).find((x) => x.importId === importId)
if (!model) return
const newModel: ImportingModel = {
...model,
modelId,
modelId: undefined,
status: 'IMPORTED',
percentage: 1,
percentage: 100,
}
const newList = get(importingModelsAtom).map((x) =>
x.importId === importId ? newModel : x

View File

@ -61,17 +61,16 @@ export const setActiveThreadIdAtom = atom(
}
)
export const waitingToSendMessage = atom<boolean | undefined>(undefined)
export const isLoadingModelAtom = atom<boolean | undefined>(undefined)
export const isGeneratingResponseAtom = atom<boolean | undefined>(undefined)
export const isGeneratingResponseAtom = atom<boolean>(false)
/**
* Stores all threads for the current user
*/
export const threadsAtom = atom<Thread[]>([])
export const deleteThreadAtom = atom(null, (get, set, threadId: string) => {
export const deleteThreadAtom = atom(null, (_get, set, threadId: string) => {
set(threadsAtom, (threads) => {
// set active thread to the latest
const allThreads = threads.filter((c) => c.id !== threadId)

View File

@ -1,6 +1,7 @@
import 'cortexso-node/shims/web'
import { useCallback } from 'react'
import { Cortex } from '@cortexso/cortex.js'
import { Engine } from '@cortexso/cortex.js/resources'
import {
Assistant,
Model,
@ -11,10 +12,9 @@ import {
AssistantCreateParams,
AssistantUpdateParams,
LlmEngine,
LlmEngines,
} from '@janhq/core'
import { Cortex } from 'cortexso-node'
import { useAtomValue } from 'jotai'
import { UpdateConfigMutationVariables } from './useEngineMutation'
@ -24,22 +24,6 @@ import { MessageUpdateMutationVariables } from './useMessageUpdateMutation'
import { hostAtom } from '@/helpers/atoms/AppConfig.atom'
const EngineInitStatuses = [
'ready',
'not_initialized',
'missing_configuration',
'not_supported',
] as const
export type EngineInitStatus = (typeof EngineInitStatuses)[number]
export type EngineStatus = {
name: LlmEngine
description: string
version: string
productName: string
status: EngineInitStatus
}
const useCortex = () => {
const host = useAtomValue(hostAtom)
@ -49,50 +33,33 @@ const useCortex = () => {
dangerouslyAllowBrowser: true,
})
// TODO: put in to cortexso-node?
const getEngineStatuses = useCallback(async (): Promise<EngineStatus[]> => {
const response = await fetch(`${host}/engines`, {
method: 'GET',
})
const data = await response.json()
const engineStatuses: EngineStatus[] = []
data.data.forEach((engineStatus: EngineStatus) => {
engineStatuses.push(engineStatus)
})
return engineStatuses
}, [host])
// TODO: put in to cortexso-node?
const getEngineStatus = useCallback(
async (engine: LlmEngine): Promise<EngineStatus | undefined> => {
try {
const response = await fetch(`${host}/engines/${engine}`, {
method: 'GET',
})
const data = (await response.json()) as EngineStatus
return data
} catch (err) {
console.error(err)
const getEngineStatuses = useCallback(async (): Promise<Engine[]> => {
const engineResponse = await cortex.engines.list()
// @ts-expect-error incompatible types
const engineStatuses: Engine[] = engineResponse.body.data.map(
(engine: Engine) => {
return {
name: engine.name,
description: engine.description,
version: engine.version,
productName: engine.productName,
status: engine.status,
}
}
},
[host]
)
// TODO: put in to cortexso-node?
return engineStatuses
}, [cortex.engines])
const initializeEngine = useCallback(
async (engine: LlmEngine) => {
try {
await fetch(`${host}/engines/${engine}/init/`, {
method: 'POST',
headers: {
accept: 'application/json',
},
})
await cortex.engines.init(engine)
} catch (err) {
console.error(err)
}
},
[host]
[cortex.engines]
)
const fetchAssistants = useCallback(async () => {
@ -132,8 +99,15 @@ const useCortex = () => {
console.debug('Model id is empty, skipping', model)
continue
}
const engine = LlmEngines.find((engine) => engine === model.engine)
if (!engine) {
console.error(`Model ${modelId} has an invalid engine ${model.engine}`)
continue
}
models.push({
...model,
engine: engine,
model: modelId,
// @ts-expect-error each model must have associated files
files: model['files'],
@ -263,26 +237,18 @@ const useCortex = () => {
const updateModel = useCallback(
async (modelId: string, options: Record<string, unknown>) => {
try {
return await fetch(`${host}/models/${modelId}`, {
method: 'PATCH',
headers: {
'accept': 'application/json',
// eslint-disable-next-line @typescript-eslint/naming-convention
'Content-Type': 'application/json',
},
body: JSON.stringify(options),
})
return await cortex.models.update(modelId, options)
} catch (err) {
console.error(err)
}
},
[host]
[cortex.models]
)
// TODO: put this into cortexso-node
const downloadModel = useCallback(
async (modelId: string, fileName?: string, persistedModelId?: string) => {
try {
// return await cortex.models.download(modelId)
return await fetch(`${host}/models/${modelId}/pull`, {
method: 'POST',
headers: {
@ -305,19 +271,12 @@ const useCortex = () => {
const abortDownload = useCallback(
async (downloadId: string) => {
try {
return await fetch(`${host}/models/${downloadId}/pull`, {
method: 'DELETE',
headers: {
'accept': 'application/json',
// eslint-disable-next-line @typescript-eslint/naming-convention
'Content-Type': 'application/json',
},
})
return await cortex.models.abortDownload(downloadId)
} catch (err) {
console.error(err)
}
},
[host]
[cortex.models]
)
const createAssistant = useCallback(
@ -335,22 +294,14 @@ const useCortex = () => {
// TODO: add this to cortex-node
const registerEngineConfig = useCallback(
async (variables: UpdateConfigMutationVariables) => {
try {
const { engine, config } = variables
await fetch(`${host}/engines/${engine}`, {
method: 'PATCH',
headers: {
'accept': 'application/json',
// eslint-disable-next-line @typescript-eslint/naming-convention
'Content-Type': 'application/json',
},
body: JSON.stringify(config),
})
try {
await cortex.engines.update(engine, config)
} catch (err) {
console.error(err)
}
},
[host]
[cortex.engines]
)
// add this to cortex-node?
@ -368,6 +319,15 @@ const useCortex = () => {
[host]
)
const isSystemAlive = useCallback(async () => {
try {
await cortex.system.status()
return true
} catch {
return false
}
}, [cortex.system])
return {
fetchAssistants,
fetchThreads,
@ -392,9 +352,9 @@ const useCortex = () => {
chatCompletionNonStreaming,
registerEngineConfig,
createModel,
getEngineStatus,
initializeEngine,
getEngineStatuses,
isSystemAlive,
}
}

View File

@ -10,6 +10,7 @@ export const addDownloadModelStateAtom = atom(
id: modelId,
title: modelId,
type: DownloadType2.Model,
progress: 0,
status: DownloadStatus.Downloading,
children: [
{

View File

@ -3,8 +3,6 @@ import { useCallback } from 'react'
import { ImportingModel } from '@janhq/core'
import { useSetAtom } from 'jotai'
import { v4 as uuidv4 } from 'uuid'
import { snackbar } from '@/containers/Toast'
import { getFileInfoFromFile } from '@/utils/file'
@ -26,17 +24,23 @@ export default function useDropModelBinaries() {
)
const supportedFiles = files.filter((file) => file.path.endsWith('.gguf'))
const importingModels: ImportingModel[] = supportedFiles.map((file) => ({
importId: uuidv4(),
const importingModels: ImportingModel[] = supportedFiles.map((file) => {
const normalizedPath = isWindows
? file.path.replace(/\\/g, '/')
: file.path
return {
importId: normalizedPath,
modelId: undefined,
name: file.name.replace('.gguf', ''),
name: normalizedPath.replace('.gguf', ''),
description: '',
path: file.path,
tags: [],
size: file.size,
status: 'PREPARING',
format: 'gguf',
}))
}
})
if (unsupportedFiles.length > 0) {
snackbar({
description: `Only files with .gguf extension can be imported.`,

View File

@ -1,6 +1,7 @@
import { Engine } from '@cortexso/cortex.js/resources'
import { useMutation, useQueryClient } from '@tanstack/react-query'
import useCortex, { EngineStatus } from './useCortex'
import useCortex from './useCortex'
import { engineQueryKey } from './useEngineQuery'
const useEngineInit = () => {
@ -10,21 +11,21 @@ const useEngineInit = () => {
return useMutation({
mutationFn: initializeEngine,
onSuccess: async (data, variables) => {
console.debug(`Engine ${variables} initialized`, data)
onSuccess: async (data, engineName) => {
console.debug(`Engine ${engineName} initialized`, data)
// optimistically set the engine status to 'ready'
const queryCacheData = await queryClient.getQueryData(engineQueryKey)
if (!queryCacheData) {
return queryClient.invalidateQueries({ queryKey: engineQueryKey })
}
const engineStatuses = queryCacheData as EngineStatus[]
const engineStatuses = queryCacheData as Engine[]
engineStatuses.forEach((engine) => {
if (engine.name === variables) {
if (engine.name === engineName) {
engine.status = 'ready'
}
})
console.log(`Updated engine status: ${engineStatuses}`)
console.debug(`Updated engine status: ${engineStatuses}`)
await queryClient.setQueryData(engineQueryKey, engineStatuses)
},

View File

@ -68,13 +68,12 @@ const useMigratingData = () => {
continue
}
const threadTitle: string = thread.title ?? 'New Thread'
const instruction: string = thread.assistants[0]?.instruction ?? ''
const instructions: string = thread.assistants[0]?.instructions ?? ''
// currently, we don't have api support for creating thread with messages
const cortexThread = await createThread(modelId, assistants[0])
console.log('createThread', cortexThread)
// update instruction
cortexThread.assistants[0].instructions = instruction
cortexThread.assistants[0].instructions = instructions
cortexThread.title = threadTitle
// update thread name

21
web/hooks/useModelStop.ts Normal file
View File

@ -0,0 +1,21 @@
import { useMutation } from '@tanstack/react-query'
import useCortex from './useCortex'
const useModelStop = () => {
const { stopModel } = useCortex()
return useMutation({
mutationFn: stopModel,
onSuccess: (data, modelId) => {
console.debug(`Model ${modelId} stopped successfully`, data)
},
onError: (error, modelId) => {
console.debug(`Stop model ${modelId} error`, error)
},
})
}
export default useModelStop

View File

@ -16,7 +16,6 @@ const useModels = () => {
const removeDownloadedModel = useSetAtom(removeDownloadedModelAtom)
const {
fetchModels,
stopModel: cortexStopModel,
deleteModel: cortexDeleteModel,
updateModel: cortexUpdateModel,
} = useCortex()
@ -29,11 +28,6 @@ const useModels = () => {
getDownloadedModels()
}, [setDownloadedModels, fetchModels])
const stopModel = useCallback(
async (modelId: string) => cortexStopModel(modelId),
[cortexStopModel]
)
const deleteModel = useCallback(
async (modelId: string) => {
await cortexDeleteModel(modelId)
@ -54,7 +48,7 @@ const useModels = () => {
[cortexUpdateModel]
)
return { getModels, stopModel, deleteModel, updateModel }
return { getModels, deleteModel, updateModel }
}
export default useModels

View File

@ -17,6 +17,11 @@ import { currentPromptAtom, editPromptAtom } from '@/containers/Providers/Jotai'
import { toaster } from '@/containers/Toast'
import { inferenceErrorAtom } from '@/screens/HubScreen2/components/InferenceErrorModal'
import { showWarningMultipleModelModalAtom } from '@/screens/HubScreen2/components/WarningMultipleModelModal'
import { concurrentModelWarningThreshold } from '@/screens/Settings/MyModels/ModelItem'
import useCortex from './useCortex'
import useEngineInit from './useEngineInit'
@ -89,6 +94,11 @@ const useSendMessage = () => {
const startModel = useModelStart()
const abortControllerRef = useRef<AbortController | undefined>(undefined)
const didUserAborted = useRef<boolean>(false)
const setInferenceErrorAtom = useSetAtom(inferenceErrorAtom)
const setShowWarningMultipleModelModal = useSetAtom(
showWarningMultipleModelModalAtom
)
const validatePrerequisite = useCallback(async (): Promise<boolean> => {
const errorTitle = 'Failed to send message'
@ -195,10 +205,17 @@ const useSendMessage = () => {
const stopInference = useCallback(() => {
abortControllerRef.current?.abort()
didUserAborted.current = true
}, [])
const summarizeThread = useCallback(
async (messages: string[], modelId: string, thread: Thread) => {
// if its a local model, and is not started, skip summarization
if (LocalEngines.find((e) => e === selectedModel!.engine) != null) {
if (!activeModels.map((model) => model.model).includes(modelId)) {
return
}
}
const maxWordForThreadTitle = 10
const summarizeMessages: ChatCompletionMessageParam[] = [
{
@ -223,6 +240,8 @@ const useSendMessage = () => {
updateThreadTitle(thread.id, summarizedText)
},
[
activeModels,
selectedModel,
addThreadIdShouldAnimateTitle,
chatCompletionNonStreaming,
updateThreadTitle,
@ -241,6 +260,11 @@ const useSendMessage = () => {
if (LocalEngines.find((e) => e === selectedModel!.engine) != null) {
// start model if local and not started
if (!activeModels.map((model) => model.model).includes(modelId)) {
if (activeModels.length >= concurrentModelWarningThreshold) {
// if max concurrent models reached, stop the first model
// display popup
setShowWarningMultipleModelModal(true)
}
await startModel.mutateAsync(modelId)
}
}
@ -268,7 +292,10 @@ const useSendMessage = () => {
case 'assistant':
return {
role: msg.role,
content: (msg.content[0] as TextContentBlock).text.value,
content:
msg.content[0] != null
? (msg.content[0] as TextContentBlock).text.value
: '',
}
// we will need to support other roles in the future
@ -300,6 +327,7 @@ const useSendMessage = () => {
...modelOptions,
})
didUserAborted.current = false
abortControllerRef.current = stream.controller
const assistantMessage = await createMessage.mutateAsync({
@ -366,6 +394,7 @@ const useSendMessage = () => {
},
})
} else {
didUserAborted.current = false
const abortController = new AbortController()
const response = await chatCompletionNonStreaming(
{
@ -427,9 +456,18 @@ const useSendMessage = () => {
}
} catch (err) {
console.error(err)
// @ts-expect-error error message should be there
const errorMessage = err['message']
if (errorMessage != null) {
setInferenceErrorAtom({
engine: selectedModel!.engine,
message: errorMessage,
})
}
toaster({
title: 'Failed to generate response',
title: `Error with ${selectedModel!.model}`,
description: 'Failed to generate response',
type: 'error',
})
}
@ -442,13 +480,15 @@ const useSendMessage = () => {
selectedModel,
updateMessage,
createMessage,
validatePrerequisite,
startModel,
setInferenceErrorAtom,
validatePrerequisite,
updateMessageState,
addNewMessage,
chatCompletionNonStreaming,
chatCompletionStreaming,
setIsGeneratingResponse,
setShowWarningMultipleModelModal,
])
const sendMessage = useCallback(
@ -479,6 +519,11 @@ const useSendMessage = () => {
if (LocalEngines.find((e) => e === selectedModel!.engine) != null) {
// start model if local and not started
if (!activeModels.map((model) => model.model).includes(modelId)) {
if (activeModels.length >= concurrentModelWarningThreshold) {
// if max concurrent models reached, stop the first model
// display popup
setShowWarningMultipleModelModal(true)
}
await startModel.mutateAsync(modelId)
}
}
@ -502,7 +547,10 @@ const useSendMessage = () => {
case 'assistant':
return {
role: msg.role,
content: (msg.content[0] as TextContentBlock).text.value,
content:
msg.content[0] != null
? (msg.content[0] as TextContentBlock).text.value
: '',
}
// we will need to support other roles in the future
@ -536,7 +584,7 @@ const useSendMessage = () => {
top_p: selectedModel!.top_p ?? 1,
...modelOptions,
})
didUserAborted.current = false
abortControllerRef.current = stream.controller
const assistantMessage = await createMessage.mutateAsync({
@ -606,7 +654,10 @@ const useSendMessage = () => {
},
})
} else {
didUserAborted.current = false
const abortController = new AbortController()
abortControllerRef.current = abortController
const response = await chatCompletionNonStreaming(
{
messages,
@ -663,7 +714,7 @@ const useSendMessage = () => {
content: responseMessage.content,
},
})
abortControllerRef.current = undefined
if (responseMessage) {
setIsGeneratingResponse(false)
}
@ -672,17 +723,27 @@ const useSendMessage = () => {
}
} catch (err) {
console.error(err)
// @ts-expect-error error message should be there
const errorMessage = err['message']
if (errorMessage != null) {
setInferenceErrorAtom({
engine: selectedModel!.engine,
message: errorMessage,
})
}
setIsGeneratingResponse(false)
shouldSummarize = false
toaster({
title: 'Failed to generate response',
title: `Error with ${selectedModel!.model}`,
description: 'Failed to generate response',
type: 'error',
})
}
try {
if (!shouldSummarize) return
if (!shouldSummarize || didUserAborted.current === true) return
// summarize if needed
const textMessages: string[] = messages
.map((msg) => {
@ -702,16 +763,18 @@ const useSendMessage = () => {
selectedModel,
updateMessage,
createMessage,
startModel,
setInferenceErrorAtom,
validatePrerequisite,
setCurrentPrompt,
setEditPrompt,
setIsGeneratingResponse,
updateMessageState,
addNewMessage,
startModel,
chatCompletionNonStreaming,
chatCompletionStreaming,
summarizeThread,
setShowWarningMultipleModelModal,
]
)

View File

@ -17,7 +17,7 @@
"yaml": "^2.4.5",
"@huggingface/hub": "^0.15.1",
"embla-carousel-react": "^8.1.5",
"cortexso-node": "^0.0.4",
"@cortexso/cortex.js": "^0.1.6",
"@microsoft/fetch-event-source": "^2.0.1",
"@janhq/core": "link:./core",
"@janhq/joi": "link:./joi",
@ -44,7 +44,6 @@
"sass": "^1.69.4",
"tailwind-merge": "^2.0.0",
"tailwindcss": "3.3.5",
"uuid": "^9.0.1",
"use-debounce": "^10.0.0"
},
"devDependencies": {

View File

@ -0,0 +1,45 @@
import { Fragment, useCallback } from 'react'
import { LlmEngine } from '@janhq/core'
import { Button, Modal, ModalClose } from '@janhq/joi'
import { atom, useAtom } from 'jotai'
export type InferenceError = {
message: string
engine?: LlmEngine
}
export const inferenceErrorAtom = atom<InferenceError | undefined>(undefined)
const InferenceErrorModal: React.FC = () => {
const [inferenceError, setInferenceError] = useAtom(inferenceErrorAtom)
const onClose = useCallback(() => {
setInferenceError(undefined)
}, [setInferenceError])
return (
<Modal
hideClose
open={inferenceError != null}
onOpenChange={onClose}
title={'Inference error'}
content={
<Fragment>
<p className="text-[hsla(var(--text-secondary))]">
{inferenceError?.message}
</p>
<div className="mt-4 flex justify-end">
<ModalClose asChild>
<Button onClick={onClose} autoFocus theme="destructive">
OK
</Button>
</ModalClose>
</div>
</Fragment>
}
/>
)
}
export default InferenceErrorModal

View File

@ -2,7 +2,7 @@ import React, { useCallback, useState } from 'react'
import { Button, Input } from '@janhq/joi'
import { useSetAtom } from 'jotai'
import { SearchIcon } from 'lucide-react'
import { ImportIcon, SearchIcon } from 'lucide-react'
import { FoldersIcon } from 'lucide-react'
import { useDebouncedCallback } from 'use-debounce'
@ -10,6 +10,8 @@ import { toaster } from '@/containers/Toast'
import { useGetHFRepoData } from '@/hooks/useGetHFRepoData'
import { setImportModelStageAtom } from '@/hooks/useImportModel'
import { MainViewState, mainViewStateAtom } from '@/helpers/atoms/App.atom'
import {
importHuggingFaceModelStageAtom,
@ -26,6 +28,7 @@ const ModelSearchBar: React.FC<Props> = ({ onSearchChanged }) => {
const { getHfRepoData } = useGetHFRepoData()
const setMainViewState = useSetAtom(mainViewStateAtom)
const setSelectedSetting = useSetAtom(selectedSettingAtom)
const setImportModelStage = useSetAtom(setImportModelStageAtom)
const setImportingHuggingFaceRepoData = useSetAtom(
importingHuggingFaceRepoDataAtom
@ -34,6 +37,10 @@ const ModelSearchBar: React.FC<Props> = ({ onSearchChanged }) => {
importHuggingFaceModelStageAtom
)
const onImportModelClick = useCallback(() => {
setImportModelStage('SELECTING_MODEL')
}, [setImportModelStage])
const debounced = useDebouncedCallback(async (searchText: string) => {
if (searchText.indexOf('/') === -1) {
// If we don't find / in the text, perform a local search
@ -90,6 +97,14 @@ const ModelSearchBar: React.FC<Props> = ({ onSearchChanged }) => {
<FoldersIcon size={16} />
<span>My models</span>
</Button>
<Button
className="flex gap-2 bg-[hsla(var(--app-bg))] text-[hsla(var(--text-primary))]"
theme="ghost"
onClick={onImportModelClick}
>
<ImportIcon size={16} />
<span>Import model</span>
</Button>
</div>
)
}

View File

@ -38,6 +38,9 @@ const SetUpApiKeyModal: React.FC = () => {
alert('Does not have engine')
return
}
const normalizedApiKey = apiKey.trim().replaceAll('*', '')
if (normalizedApiKey.length === 0) return
updateEngineConfig.mutate({
engine: remoteEngine,
config: {

View File

@ -0,0 +1,49 @@
import { Fragment, useCallback, useMemo } from 'react'
import { Button, Modal, ModalClose } from '@janhq/joi'
import { atom, useAtom, useAtomValue } from 'jotai'
import { activeModelsAtom } from '@/helpers/atoms/Model.atom'
export const showWarningMultipleModelModalAtom = atom<boolean>(false)
const WarningMultipleModelModal: React.FC = () => {
const [showWarningMultipleModelModal, setShowWarningMultipleModelModal] =
useAtom(showWarningMultipleModelModalAtom)
const activeModels = useAtomValue(activeModelsAtom)
const onClose = useCallback(() => {
setShowWarningMultipleModelModal(false)
}, [setShowWarningMultipleModelModal])
const title = useMemo(
() => `${activeModels.length} models running`,
[activeModels]
)
return (
<Modal
hideClose
open={showWarningMultipleModelModal}
onOpenChange={onClose}
title={title}
content={
<Fragment>
<p className="text-[hsla(var(--text-secondary))]">
This may affect performance. Please review them via System Monitor
in the lower right conner of Jan app.
</p>
<div className="mt-4 flex justify-end">
<ModalClose asChild>
<Button onClick={onClose} autoFocus theme="destructive">
OK
</Button>
</ModalClose>
</div>
</Fragment>
}
/>
)
}
export default WarningMultipleModelModal

View File

@ -10,7 +10,7 @@ import { useAtom, useAtomValue } from 'jotai'
import { toaster } from '@/containers/Toast'
import useModels from '@/hooks/useModels'
import useModelStop from '@/hooks/useModelStop'
import { useSettings } from '@/hooks/useSettings'
import {
@ -53,7 +53,7 @@ const Advanced = () => {
const { readSettings, saveSettings } = useSettings()
const activeModels = useAtomValue(activeModelsAtom)
// const [open, setOpen] = useState(false)
const { stopModel } = useModels()
const stopModel = useModelStop()
// const selectedGpu = gpuList
// .filter((x) => gpusInUse.includes(x.id))
@ -92,7 +92,7 @@ const Advanced = () => {
})
for (const model of activeModels) {
await stopModel(model.model)
await stopModel.mutateAsync(model.model)
}
setVulkanEnabled(e)

View File

@ -1,18 +1,22 @@
import { useCallback } from 'react'
import { SelectFileOption } from '@janhq/core'
import { ImportingModel, SelectFileOption } from '@janhq/core'
import { Button, Modal } from '@janhq/joi'
import { useSetAtom, useAtomValue } from 'jotai'
import useImportModel, {
import { snackbar } from '@/containers/Toast'
import {
setImportModelStageAtom,
getImportModelStageAtom,
} from '@/hooks/useImportModel'
import { importingModelsAtom } from '@/helpers/atoms/Model.atom'
const ChooseWhatToImportModal = () => {
const setImportModelStage = useSetAtom(setImportModelStageAtom)
const setImportingModels = useSetAtom(importingModelsAtom)
const importModelStage = useAtomValue(getImportModelStageAtom)
const { sanitizeFilePaths } = useImportModel()
const onImportFileClick = useCallback(async () => {
const options: SelectFileOption = {
@ -24,10 +28,36 @@ const ChooseWhatToImportModal = () => {
{ name: 'All Files', extensions: ['*'] },
],
}
const filePaths = await window.core?.api?.selectFiles(options)
const filePaths: string[] = await window.core?.api?.selectFiles(options)
if (!filePaths || filePaths.length === 0) return
sanitizeFilePaths(filePaths)
}, [sanitizeFilePaths])
const importingModels: ImportingModel[] = filePaths
.filter((path) => path.endsWith('.gguf'))
.map((path) => {
const normalizedPath = isWindows ? path.replace(/\\/g, '/') : path
return {
importId: normalizedPath,
modelId: undefined,
name: normalizedPath.replace('.gguf', ''),
description: '',
path: path,
tags: [],
size: 0,
status: 'PREPARING',
format: 'gguf',
}
})
if (importingModels.length < 1) {
snackbar({
description: `Only files with .gguf extension can be imported.`,
type: 'error',
})
return
}
setImportingModels(importingModels)
setImportModelStage('MODEL_SELECTED')
}, [setImportingModels, setImportModelStage])
const onImportFolderClick = useCallback(async () => {
const options: SelectFileOption = {
@ -36,10 +66,37 @@ const ChooseWhatToImportModal = () => {
allowMultiple: true,
selectDirectory: true,
}
const filePaths = await window.core?.api?.selectFiles(options)
const filePaths: string[] = await window.core?.api?.selectFiles(options)
if (!filePaths || filePaths.length === 0) return
sanitizeFilePaths(filePaths)
}, [sanitizeFilePaths])
console.log('filePaths folder', filePaths)
const importingModels: ImportingModel[] = filePaths
.filter((path) => path.endsWith('.gguf'))
.map((path) => {
const normalizedPath = isWindows ? path.replace(/\\/g, '/') : path
return {
importId: normalizedPath,
modelId: undefined,
name: normalizedPath.replace('.gguf', ''),
description: '',
path: path,
tags: [],
size: 0,
status: 'PREPARING',
format: 'gguf',
}
})
if (importingModels.length < 1) {
snackbar({
description: `Only files with .gguf extension can be imported.`,
type: 'error',
})
return
}
setImportingModels(importingModels)
setImportModelStage('MODEL_SELECTED')
}, [setImportingModels, setImportModelStage])
return (
<Modal

View File

@ -13,6 +13,11 @@ import useEngineQuery from '@/hooks/useEngineQuery'
import LoadingIndicator from '@/screens/HubScreen2/components/LoadingIndicator'
const getStatusTitle = (status: string) => {
const normalized = status.charAt(0).toUpperCase() + status.slice(1)
return normalized.replaceAll('_', ' ')
}
const EngineSetting: React.FC = () => {
const { isLoading, data } = useEngineQuery()
@ -56,7 +61,7 @@ const EngineSetting: React.FC = () => {
<TableCell className="text-center">
{engineStatus.version}
</TableCell>
<TableCell>{engineStatus.status}</TableCell>
<TableCell>{getStatusTitle(engineStatus.status)}</TableCell>
</TableRow>
)
})}

View File

@ -20,12 +20,12 @@ const importOptions: ModelImportOption[] = [
description:
'You maintain your model files outside of Jan. Keeping your files where they are, and Jan will create a smart link to them.',
},
{
type: 'MOVE_BINARY_FILE',
title: 'Move model binary file',
description:
'Jan will move your model binary file from your current folder into Jan Data Folder.',
},
// {
// type: 'MOVE_BINARY_FILE',
// title: 'Move model binary file',
// description:
// 'Jan will move your model binary file from your current folder into Jan Data Folder.',
// },
]
const ImportModelOptionModal = () => {

View File

@ -1,6 +1,6 @@
import React, { useCallback, useState } from 'react'
import React, { useState } from 'react'
import { Check, Pencil } from 'lucide-react'
import { Check } from 'lucide-react'
type Props = {
onEditModelClick: () => void
@ -9,6 +9,8 @@ type Props = {
const ImportSuccessIcon: React.FC<Props> = ({ onEditModelClick }) => {
const [isHovered, setIsHovered] = useState(false)
console.log(isHovered, onEditModelClick)
const onMouseOver = () => {
setIsHovered(true)
}
@ -19,34 +21,34 @@ const ImportSuccessIcon: React.FC<Props> = ({ onEditModelClick }) => {
return (
<div onMouseOver={onMouseOver} onMouseOut={onMouseOut}>
{isHovered ? (
{/* {isHovered ? (
<EditIcon onEditModelClick={onEditModelClick} />
) : (
) : ( */}
<SuccessIcon />
)}
{/* )} */}
</div>
)
}
const SuccessIcon = React.memo(() => (
<div className="bg-primary flex h-8 w-8 items-center justify-center rounded-full text-white">
<div className="bg-primary flex h-8 w-8 items-center justify-center rounded-full">
<Check size={20} />
</div>
))
const EditIcon: React.FC<Props> = React.memo(({ onEditModelClick }) => {
const onClick = useCallback(() => {
onEditModelClick()
}, [onEditModelClick])
// const EditIcon: React.FC<Props> = React.memo(({ onEditModelClick }) => {
// const onClick = useCallback(() => {
// onEditModelClick()
// }, [onEditModelClick])
return (
<div
className="bg-secondary flex h-8 w-8 cursor-pointer items-center justify-center rounded-lg"
onClick={onClick}
>
<Pencil size={20} />
</div>
)
})
// return (
// <div
// className="bg-secondary flex h-8 w-8 cursor-pointer items-center justify-center rounded-lg"
// onClick={onClick}
// >
// <Pencil size={20} />
// </div>
// )
// })
export default ImportSuccessIcon

View File

@ -1,15 +1,11 @@
import { useCallback, useMemo } from 'react'
import { ImportingModel } from '@janhq/core'
import { useSetAtom } from 'jotai'
import { AlertCircle } from 'lucide-react'
import { setImportModelStageAtom } from '@/hooks/useImportModel'
import { toGibibytes } from '@/utils/converter'
import { editingModelIdAtom } from '../EditModelInfoModal'
import ImportInProgressIcon from '../ImportInProgressIcon'
import ImportSuccessIcon from '../ImportSuccessIcon'
@ -18,16 +14,13 @@ type Props = {
}
const ImportingModelItem = ({ model }: Props) => {
const setImportModelStage = useSetAtom(setImportModelStageAtom)
const setEditingModelId = useSetAtom(editingModelIdAtom)
const onEditModelInfoClick = useCallback(() => {
setEditingModelId(model.importId)
setImportModelStage('EDIT_MODEL_INFO')
}, [setImportModelStage, setEditingModelId, model.importId])
// setEditingModelId(model.importId)
// setImportModelStage('EDIT_MODEL_INFO')
}, [])
const onDeleteModelClick = useCallback(() => {}, [])
console.log('namh model', model)
const displayStatus = useMemo(() => {
if (model.status === 'FAILED') {
return 'Failed'

View File

@ -1,50 +1,47 @@
import { useCallback, useEffect, useState } from 'react'
import { useEffect } from 'react'
import { Button, Modal } from '@janhq/joi'
import { Modal } from '@janhq/joi'
import { useAtomValue, useSetAtom } from 'jotai'
import { AlertCircle } from 'lucide-react'
import useCortex from '@/hooks/useCortex'
import {
getImportModelStageAtom,
setImportModelStageAtom,
} from '@/hooks/useImportModel'
import { openFileTitle } from '@/utils/titleUtils'
import ImportingModelItem from './ImportingModelItem'
import { janDataFolderPathAtom } from '@/helpers/atoms/AppConfig.atom'
import { importingModelsAtom } from '@/helpers/atoms/Model.atom'
const ImportingModelModal = () => {
const { downloadModel } = useCortex()
const setImportModelStage = useSetAtom(setImportModelStageAtom)
const importingModels = useAtomValue(importingModelsAtom)
const importModelStage = useAtomValue(getImportModelStageAtom)
const setImportModelStage = useSetAtom(setImportModelStageAtom)
const janDataFolder = useAtomValue(janDataFolderPathAtom)
const [modelFolder, setModelFolder] = useState('')
useEffect(() => {
const getModelPath = async () => {
// const modelPath = await joinPath([janDataFolder, 'models'])
setModelFolder('')
}
getModelPath()
}, [janDataFolder])
const finishedImportModel = importingModels.filter(
(model) => model.status === 'IMPORTED'
).length
const onOpenModelFolderClick = useCallback(
() => {
// openFileExplorer(modelFolder)
},
[
/*modelFolder*/
]
)
useEffect(() => {
const importModels = async () => {
for (const model of importingModels) {
await downloadModel(model.path)
// const parsedResult = await result?.json()
// if (
// parsedResult['message'] &&
// parsedResult['message'] === 'Download model started successfully.'
// ) {
// // update importingModels
// }
// console.log(`NamH result ${JSON.stringify(parsedResult)}`)
}
}
importModels()
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [downloadModel])
return (
<Modal
@ -54,7 +51,7 @@ const ImportingModelModal = () => {
content={
<div>
<div className="flex flex-row items-center space-x-2 pb-3">
<label className="text-[hsla(var(--text-secondary)] text-xs">
{/* <label className="text-[hsla(var(--text-secondary)] text-xs">
{modelFolder}
</label>
<Button
@ -64,10 +61,10 @@ const ImportingModelModal = () => {
onClick={onOpenModelFolderClick}
>
{openFileTitle()}
</Button>
</Button> */}
</div>
<div className="space-y-3">
<div className="mb-2 space-y-3">
{importingModels.map((model) => (
<ImportingModelItem key={model.importId} model={model} />
))}

View File

@ -3,7 +3,7 @@ import { memo, useCallback, useMemo, useState } from 'react'
import { LocalEngines, Model } from '@janhq/core'
import { Badge, Button, useClickOutside } from '@janhq/joi'
import { useAtomValue } from 'jotai'
import { useAtomValue, useSetAtom } from 'jotai'
import {
MoreVerticalIcon,
PlayIcon,
@ -13,22 +13,32 @@ import {
import { twMerge } from 'tailwind-merge'
import useModelStart from '@/hooks/useModelStart'
import useModelStop from '@/hooks/useModelStop'
import useModels from '@/hooks/useModels'
import { showWarningMultipleModelModalAtom } from '@/screens/HubScreen2/components/WarningMultipleModelModal'
import { activeModelsAtom } from '@/helpers/atoms/Model.atom'
type Props = {
model: Model
}
// If more than this number of models are running, show a warning modal.
export const concurrentModelWarningThreshold = 2
const ModelItem: React.FC<Props> = ({ model }) => {
const activeModels = useAtomValue(activeModelsAtom)
const startModel = useModelStart()
const stopModel = useModelStop()
const [more, setMore] = useState(false)
const { stopModel, deleteModel } = useModels()
const { deleteModel } = useModels()
const [menu, setMenu] = useState<HTMLDivElement | null>(null)
const [toggle, setToggle] = useState<HTMLDivElement | null>(null)
const setShowWarningMultipleModelModal = useSetAtom(
showWarningMultipleModelModalAtom
)
useClickOutside(() => setMore(false), null, [menu, toggle])
const isActive = useMemo(
@ -39,17 +49,30 @@ const ModelItem: React.FC<Props> = ({ model }) => {
const onModelActionClick = useCallback(
(modelId: string) => {
if (isActive) {
stopModel(modelId)
} else {
startModel.mutate(modelId)
// if model already active, stop it
stopModel.mutate(modelId)
return
}
if (activeModels.length >= concurrentModelWarningThreshold) {
// if max concurrent models reached, stop the first model
// display popup
setShowWarningMultipleModelModal(true)
}
startModel.mutate(modelId)
},
[isActive, startModel, stopModel]
[
isActive,
startModel,
stopModel,
activeModels.length,
setShowWarningMultipleModelModal,
]
)
const onDeleteModelClicked = useCallback(
async (modelId: string) => {
await stopModel(modelId)
await stopModel.mutateAsync(modelId)
await deleteModel(modelId)
},
[stopModel, deleteModel]

View File

@ -6,7 +6,7 @@ import { LlmEngines } from '@janhq/core'
import { Button, ScrollArea } from '@janhq/joi'
import { useAtomValue, useSetAtom } from 'jotai'
import { UploadCloudIcon } from 'lucide-react'
import { ImportIcon, UploadCloudIcon } from 'lucide-react'
import { twMerge } from 'tailwind-merge'
@ -16,6 +16,8 @@ import ModelSearch from '@/containers/ModelSearch'
import useDropModelBinaries from '@/hooks/useDropModelBinaries'
import { setImportModelStageAtom } from '@/hooks/useImportModel'
import ModelItem from './ModelItem'
import { MainViewState, mainViewStateAtom } from '@/helpers/atoms/App.atom'
@ -26,6 +28,11 @@ const MyModels = () => {
const downloadedModels = useAtomValue(downloadedModelsAtom)
const { onDropModels } = useDropModelBinaries()
const [searchText, setSearchText] = useState('')
const setImportModelStage = useSetAtom(setImportModelStageAtom)
const onImportModelClick = useCallback(() => {
setImportModelStage('SELECTING_MODEL')
}, [setImportModelStage])
const filteredDownloadedModels = useMemo(
() =>
@ -75,14 +82,14 @@ const MyModels = () => {
<div className="w-full sm:w-[300px]">
<ModelSearch onSearchLocal={onSearchChange} />
</div>
{/* <Button
<Button
variant="outline"
theme="ghost"
onClick={onImportModelClick}
>
<UploadIcon size={16} className="mr-2" />
<ImportIcon size={16} className="mr-2" />
<p>Import Model</p>
</Button> */}
</Button>
</div>
{!filteredDownloadedModels.length ? (

View File

@ -1,39 +1,109 @@
import { useCallback } from 'react'
import { useDropzone } from 'react-dropzone'
import { ImportingModel, SelectFileOption } from '@janhq/core'
import { Modal } from '@janhq/joi'
import { useAtomValue, useSetAtom } from 'jotai'
import { UploadCloudIcon } from 'lucide-react'
import { snackbar } from '@/containers/Toast'
import useDropModelBinaries from '@/hooks/useDropModelBinaries'
import {
getImportModelStageAtom,
setImportModelStageAtom,
} from '@/hooks/useImportModel'
const SelectingModelModal = () => {
import { importingModelsAtom } from '@/helpers/atoms/Model.atom'
const SelectingModelModal: React.FC = () => {
const setImportModelStage = useSetAtom(setImportModelStageAtom)
const setImportingModels = useSetAtom(importingModelsAtom)
const importModelStage = useAtomValue(getImportModelStageAtom)
const { onDropModels } = useDropModelBinaries()
// const { sanitizeFilePaths } = useImportModel()
const onImportFileWindowsClick = useCallback(async () => {
const options: SelectFileOption = {
title: 'Select model files',
buttonLabel: 'Select',
allowMultiple: true,
filters: [
{ name: 'GGUF Files', extensions: ['gguf'] },
{ name: 'All Files', extensions: ['*'] },
],
}
const filePaths: string[] = await window.core?.api?.selectFiles(options)
if (!filePaths || filePaths.length === 0) return
const importingModels: ImportingModel[] = filePaths
.filter((path) => path.endsWith('.gguf'))
.map((path) => {
const normalizedPath = isWindows ? path.replace(/\\/g, '/') : path
return {
importId: normalizedPath,
modelId: undefined,
name: normalizedPath.replace('.gguf', ''),
description: '',
path: path,
tags: [],
size: 0,
status: 'PREPARING',
format: 'gguf',
}
})
if (importingModels.length < 1) {
snackbar({
description: `Only files with .gguf extension can be imported.`,
type: 'error',
})
return
}
setImportingModels(importingModels)
setImportModelStage('MODEL_SELECTED')
}, [setImportingModels, setImportModelStage])
const onSelectFileClick = useCallback(async () => {
// const platform = (await systemInformation()).osInfo?.platform
// if (platform === 'win32') {
// setImportModelStage('CHOOSE_WHAT_TO_IMPORT')
// return
// }
// const options: SelectFileOption = {
// title: 'Select model folders',
// buttonLabel: 'Select',
// allowMultiple: true,
// selectDirectory: true,
// }
// const filePaths = await window.core?.api?.selectFiles(options)
// if (!filePaths || filePaths.length === 0) return
// sanitizeFilePaths(filePaths)
}, [])
if (isWindows) {
return onImportFileWindowsClick()
}
const options: SelectFileOption = {
title: 'Select model folders',
buttonLabel: 'Select',
allowMultiple: true,
selectDirectory: true,
}
const filePaths: string[] = await window.core?.api?.selectFiles(options)
if (!filePaths || filePaths.length === 0) return
const importingModels: ImportingModel[] = filePaths
.filter((path) => path.endsWith('.gguf'))
.map((path) => {
const normalizedPath = isWindows ? path.replace(/\\/g, '/') : path
return {
importId: normalizedPath,
modelId: undefined,
name: normalizedPath.replace('.gguf', ''),
description: '',
path: path,
tags: [],
size: 0,
status: 'PREPARING',
format: 'gguf',
}
})
if (importingModels.length < 1) {
snackbar({
description: `Only files with .gguf extension can be imported.`,
type: 'error',
})
return
}
setImportingModels(importingModels)
setImportModelStage('MODEL_SELECTED')
}, [setImportModelStage, setImportingModels, onImportFileWindowsClick])
const { isDragActive, getRootProps } = useDropzone({
noClick: true,
@ -52,9 +122,7 @@ const SelectingModelModal = () => {
return (
<Modal
open={importModelStage === 'SELECTING_MODEL'}
onOpenChange={() => {
setImportModelStage('NONE')
}}
onOpenChange={() => setImportModelStage('NONE')}
title="Import Model"
content={
<div>

View File

@ -3,7 +3,7 @@ import { useCallback } from 'react'
import { SettingComponentProps } from '@janhq/core'
import { useAtomValue } from 'jotai'
import useModels from '@/hooks/useModels'
import useModelStop from '@/hooks/useModelStop'
import SettingComponentBuilder from '../../../../containers/ModelSetting/SettingComponent'
@ -17,7 +17,7 @@ type Props = {
const AssistantSetting: React.FC<Props> = ({ componentData }) => {
const activeThread = useAtomValue(activeThreadAtom)
const activeModels = useAtomValue(activeModelsAtom)
const { stopModel } = useModels()
const stopModel = useModelStop()
const onValueChanged = useCallback(
(key: string, value: string | number | boolean) => {
@ -29,7 +29,7 @@ const AssistantSetting: React.FC<Props> = ({ componentData }) => {
const model = activeModels.find(
(model) => activeThread.assistants[0]?.model === model.model
)
if (model) stopModel(model.model)
if (model) stopModel.mutate(model.model)
}
// if (

View File

@ -8,7 +8,11 @@ import EmptyThread from './EmptyThread'
import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
const ChatBody: React.FC = () => {
type Props = {
onResendMessage: () => void
}
const ChatBody: React.FC<Props> = ({ onResendMessage }) => {
const messages = useAtomValue(getCurrentChatMessagesAtom)
if (!messages.length) return <EmptyThread />
@ -22,6 +26,7 @@ const ChatBody: React.FC = () => {
key={message.id}
msg={message}
isLatestMessage={isLatestMessage}
onResendMessage={onResendMessage}
/>
)
})}

View File

@ -0,0 +1,47 @@
import { useMemo } from 'react'
import React from 'react'
import { Button } from '@janhq/joi'
import { useAtomValue } from 'jotai'
import { currentPromptAtom } from '@/containers/Providers/Jotai'
type Props = {
onSendMessageClick: (message: string) => void
}
const SendMessageButton: React.FC<Props> = ({ onSendMessageClick }) => {
const currentPrompt = useAtomValue(currentPromptAtom)
const showSendButton = useMemo(() => {
if (currentPrompt.trim().length === 0) return false
return true
}, [currentPrompt])
if (!showSendButton) return null
return (
<Button
className="h-8 w-8 rounded-lg p-0"
data-testid="btn-send-chat"
onClick={() => onSendMessageClick(currentPrompt)}
>
<svg
width="16"
height="16"
viewBox="0 0 16 16"
fill="none"
xmlns="http://www.w3.org/2000/svg"
className="fill-white stroke-white"
>
<path
d="M3.93098 4.26171L3.93108 4.26168L12.9041 1.27032C12.9041 1.27031 12.9041 1.27031 12.9041 1.27031C13.7983 0.972243 14.3972 0.77445 14.8316 0.697178C15.0428 0.659595 15.1663 0.660546 15.2355 0.671861C15.2855 0.680033 15.296 0.690905 15.3015 0.696542C15.3018 0.696895 15.3022 0.697228 15.3025 0.697538C15.3028 0.697847 15.3031 0.698168 15.3035 0.698509C15.3091 0.703965 15.32 0.71449 15.3282 0.764538C15.3395 0.8338 15.3405 0.957246 15.3029 1.16844C15.2258 1.60268 15.0282 2.20131 14.7307 3.09505L11.7383 12.0689L11.7383 12.069C11.3184 13.3293 11.0242 14.2078 10.7465 14.7789C10.6083 15.063 10.4994 15.2158 10.4215 15.292C10.3948 15.3182 10.3774 15.3295 10.3698 15.3338C10.3622 15.3295 10.3449 15.3181 10.3184 15.2921C10.2404 15.2158 10.1314 15.0629 9.99319 14.7788C9.71539 14.2077 9.42091 13.3291 9.00105 12.069L9.00094 12.0687L8.34059 10.0903L12.6391 5.79172L12.6392 5.7918L12.6472 5.78348C12.9604 5.45927 13.1337 5.02503 13.1297 4.57431C13.1258 4.12358 12.945 3.69242 12.6263 3.3737C12.3076 3.05497 11.8764 2.87418 11.4257 2.87027C10.975 2.86635 10.5407 3.03962 10.2165 3.35276L10.2165 3.35268L10.2083 3.36086L5.9106 7.65853L3.93098 6.99895C2.67072 6.57904 1.79218 6.28485 1.22115 6.00715C0.937001 5.86898 0.784237 5.76011 0.707981 5.68215C0.681839 5.65542 0.670463 5.63807 0.666163 5.63051C0.670529 5.62288 0.681934 5.60558 0.707909 5.57904C0.784233 5.50103 0.937088 5.3921 1.22125 5.25386C1.79226 4.97606 2.67087 4.68157 3.93098 4.26171Z"
strokeWidth="1.33"
/>
</svg>
</Button>
)
}
export default React.memo(SendMessageButton)

View File

@ -0,0 +1,21 @@
import React from 'react'
import { Button } from '@janhq/joi'
import { StopCircle } from 'lucide-react'
type Props = {
onStopInferenceClick: () => void
}
const StopInferenceButton: React.FC<Props> = ({ onStopInferenceClick }) => (
<Button
theme="destructive"
onClick={onStopInferenceClick}
className="h-8 w-8 rounded-lg p-0"
>
<StopCircle size={20} />
</Button>
)
export default React.memo(StopInferenceButton)

View File

@ -0,0 +1,40 @@
import { useMemo } from 'react'
import { useAtomValue } from 'jotai'
import SendMessageButton from './SendMessageButton'
import StopInferenceButton from './StopInferenceButton'
import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
import { isGeneratingResponseAtom } from '@/helpers/atoms/Thread.atom'
type Props = {
onStopInferenceClick: () => void
onSendMessageClick: (message: string) => void
}
const ChatActionButton: React.FC<Props> = ({
onStopInferenceClick,
onSendMessageClick,
}) => {
const messages = useAtomValue(getCurrentChatMessagesAtom)
const isGeneratingResponse = useAtomValue(isGeneratingResponseAtom)
const showStopButton = useMemo(() => {
if (isGeneratingResponse) return true
const lastMessage = messages[messages.length - 1]
if (!lastMessage) return false
if (lastMessage.status === 'in_progress') return true
return false
}, [isGeneratingResponse, messages])
if (showStopButton) {
return <StopInferenceButton onStopInferenceClick={onStopInferenceClick} />
}
return <SendMessageButton onSendMessageClick={onSendMessageClick} />
}
export default ChatActionButton

View File

@ -0,0 +1,95 @@
import { useCallback, useEffect, useMemo, useRef } from 'react'
import { TextArea } from '@janhq/joi'
import { useAtom, useAtomValue } from 'jotai'
import { twMerge } from 'tailwind-merge'
import { currentPromptAtom } from '@/containers/Providers/Jotai'
import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
import { spellCheckAtom } from '@/helpers/atoms/Setting.atom'
import {
getActiveThreadIdAtom,
isGeneratingResponseAtom,
} from '@/helpers/atoms/Thread.atom'
type Props = {
isSettingActive: boolean
onSendMessageClick: (message: string) => void
}
const ChatTextInput: React.FC<Props> = ({
isSettingActive,
onSendMessageClick,
}) => {
const messages = useAtomValue(getCurrentChatMessagesAtom)
const [currentPrompt, setCurrentPrompt] = useAtom(currentPromptAtom)
const textareaRef = useRef<HTMLTextAreaElement>(null)
const activeThreadId = useAtomValue(getActiveThreadIdAtom)
const spellCheck = useAtomValue(spellCheckAtom)
const isGeneratingResponse = useAtomValue(isGeneratingResponseAtom)
const disabled = useMemo(() => {
return !activeThreadId
}, [activeThreadId])
const onChange = useCallback(
(e: React.ChangeEvent<HTMLTextAreaElement>) => {
setCurrentPrompt(e.target.value)
},
[setCurrentPrompt]
)
useEffect(() => {
if (textareaRef.current) {
textareaRef.current.focus()
}
}, [activeThreadId])
useEffect(() => {
if (textareaRef.current?.clientHeight) {
textareaRef.current.style.height = isSettingActive ? '100px' : '40px'
textareaRef.current.style.height = textareaRef.current.scrollHeight + 'px'
textareaRef.current.style.overflow =
textareaRef.current.clientHeight >= 390 ? 'auto' : 'hidden'
}
}, [textareaRef.current?.clientHeight, currentPrompt, isSettingActive])
const onKeyDown = useCallback(
(e: React.KeyboardEvent<HTMLTextAreaElement>) => {
if (e.key === 'Enter' && !e.shiftKey && !e.nativeEvent.isComposing) {
e.preventDefault()
if (isGeneratingResponse) return
const lastMessage = messages[messages.length - 1]
if (!lastMessage || lastMessage.status !== 'in_progress') {
onSendMessageClick(currentPrompt)
return
}
}
},
[messages, isGeneratingResponse, currentPrompt, onSendMessageClick]
)
return (
<TextArea
className={twMerge(
'relative max-h-[400px] resize-none pr-20',
isSettingActive && 'pb-14 pr-16'
)}
spellCheck={spellCheck}
data-testid="txt-input-chat"
style={{ height: isSettingActive ? '100px' : '40px' }}
ref={textareaRef}
onKeyDown={onKeyDown}
placeholder="Ask me anything"
disabled={disabled}
value={currentPrompt}
onChange={onChange}
/>
)
}
export default ChatTextInput

View File

@ -1,28 +1,19 @@
import { useEffect, useRef, useState } from 'react'
import { useState } from 'react'
import { TextArea, Button, useMediaQuery } from '@janhq/joi'
import { Button, useMediaQuery } from '@janhq/joi'
import { useAtom, useAtomValue, useSetAtom } from 'jotai'
import {
StopCircle,
SettingsIcon,
ChevronUpIcon,
Settings2Icon,
} from 'lucide-react'
import { SettingsIcon, ChevronUpIcon, Settings2Icon } from 'lucide-react'
import { twMerge } from 'tailwind-merge'
import ModelDropdown from '@/containers/ModelDropdown'
import { currentPromptAtom } from '@/containers/Providers/Jotai'
import ChatActionButton from './ChatActionButton'
import ChatTextInput from './ChatTextInput'
import { showRightPanelAtom } from '@/helpers/atoms/App.atom'
import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
import { spellCheckAtom } from '@/helpers/atoms/Setting.atom'
import {
activeThreadAtom,
getActiveThreadIdAtom,
isGeneratingResponseAtom,
waitingToSendMessage,
} from '@/helpers/atoms/Thread.atom'
import { getActiveThreadIdAtom } from '@/helpers/atoms/Thread.atom'
import { activeTabThreadRightPanelAtom } from '@/helpers/atoms/ThreadRightPanel.atom'
type Props = {
@ -31,170 +22,20 @@ type Props = {
}
const ChatInput: React.FC<Props> = ({ sendMessage, stopInference }) => {
const activeThread = useAtomValue(activeThreadAtom)
const messages = useAtomValue(getCurrentChatMessagesAtom)
const [activeSetting, setActiveSetting] = useState(false)
const spellCheck = useAtomValue(spellCheckAtom)
const [currentPrompt, setCurrentPrompt] = useAtom(currentPromptAtom)
const activeThreadId = useAtomValue(getActiveThreadIdAtom)
const [isWaitingToSend, setIsWaitingToSend] = useAtom(waitingToSendMessage)
// const [fileUpload, setFileUpload] = useAtom(fileUploadAtom)
const textareaRef = useRef<HTMLTextAreaElement>(null)
const isGeneratingResponse = useAtomValue(isGeneratingResponseAtom)
const setActiveTabThreadRightPanel = useSetAtom(activeTabThreadRightPanelAtom)
const onPromptChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
setCurrentPrompt(e.target.value)
}
const activeThreadId = useAtomValue(getActiveThreadIdAtom)
const [activeSetting, setActiveSetting] = useState(false)
const [showRightPanel, setShowRightPanel] = useAtom(showRightPanelAtom)
const matches = useMediaQuery('(max-width: 880px)')
useEffect(() => {
if (isWaitingToSend && activeThreadId) {
setIsWaitingToSend(false)
sendMessage(currentPrompt)
}
}, [
activeThreadId,
isWaitingToSend,
currentPrompt,
setIsWaitingToSend,
sendMessage,
])
useEffect(() => {
if (textareaRef.current) {
textareaRef.current.focus()
}
}, [activeThreadId])
useEffect(() => {
if (textareaRef.current?.clientHeight) {
textareaRef.current.style.height = activeSetting ? '100px' : '40px'
textareaRef.current.style.height = textareaRef.current.scrollHeight + 'px'
textareaRef.current.style.overflow =
textareaRef.current.clientHeight >= 390 ? 'auto' : 'hidden'
}
}, [textareaRef.current?.clientHeight, currentPrompt, activeSetting])
const onKeyDown = async (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
if (e.key === 'Enter' && !e.shiftKey && !e.nativeEvent.isComposing) {
e.preventDefault()
if (isGeneratingResponse) return
if (messages[messages.length - 1]?.status !== 'in_progress')
sendMessage(currentPrompt)
else stopInference()
}
}
/**
* Handles the change event of the extension file input element by setting the file name state.
* Its to be used to display the extension file name of the selected file.
* @param event - The change event object.
*/
// const handleFileChange = (event: React.ChangeEvent<HTMLInputElement>) => {
// const file = event.target.files?.[0]
// if (!file) return
// setFileUpload([{ file: file, type: 'pdf' }])
// }
// const handleImageChange = (event: React.ChangeEvent<HTMLInputElement>) => {
// const file = event.target.files?.[0]
// if (!file) return
// setFileUpload([{ file: file, type: 'image' }])
// }
// const renderPreview = (fileUpload: any) => {
// if (fileUpload.length > 0) {
// if (fileUpload[0].type === 'image') {
// return <ImageUploadPreview file={fileUpload[0].file} />
// } else {
// return <FileUploadPreview />
// }
// }
// }
return (
<div className="relative p-4 pb-2">
<div className="relative flex w-full flex-col">
{/* {renderPreview(fileUpload)} */}
<TextArea
className={twMerge(
'relative max-h-[400px] resize-none pr-20',
// fileUpload.length && 'rounded-t-none',
activeSetting && 'pb-14 pr-16'
)}
spellCheck={spellCheck}
data-testid="txt-input-chat"
style={{ height: activeSetting ? '100px' : '40px' }}
ref={textareaRef}
onKeyDown={onKeyDown}
placeholder="Ask me anything"
disabled={!activeThread}
value={currentPrompt}
onChange={onPromptChange}
<ChatTextInput
isSettingActive={activeSetting}
onSendMessageClick={sendMessage}
/>
{/* {experimentalFeature && (
<Tooltip
trigger={
<Button
theme="icon"
className="absolute left-3 top-2.5"
onClick={(e) => {
if (
fileUpload.length > 0 ||
(activeThread?.assistants[0].tools &&
!activeThread?.assistants[0].tools[0]?.enabled &&
!isVisionModel)
) {
e.stopPropagation()
} else {
setShowAttacmentMenus(!showAttacmentMenus)
}
}}
>
<PaperclipIcon
size={18}
className="text-[hsla(var(--text-secondary))]"
/>
</Button>
}
disabled={
activeThread?.assistants[0].tools &&
activeThread?.assistants[0].tools[0]?.enabled
}
content={
<>
{fileUpload.length > 0 ||
(activeThread?.assistants[0].tools &&
!activeThread?.assistants[0].tools[0]?.enabled &&
!isVisionModel && (
<>
{fileUpload.length !== 0 && (
<span>
Currently, we only support 1 attachment at the same
time.
</span>
)}
{activeThread?.assistants[0].tools &&
activeThread?.assistants[0].tools[0]?.enabled ===
false && (
<span>
Turn on Retrieval in Assistant Settings to use
this feature.
</span>
)}
</>
))}
</>
}
/>
)} */}
<div className={twMerge('absolute right-3 top-1.5')}>
<div className="flex items-center gap-x-4">
@ -202,9 +43,7 @@ const ChatInput: React.FC<Props> = ({ sendMessage, stopInference }) => {
<div className="flex h-8 items-center">
<Button
theme="icon"
onClick={() => {
setActiveSetting(!activeSetting)
}}
onClick={() => setActiveSetting(!activeSetting)}
>
<SettingsIcon
size={18}
@ -213,43 +52,10 @@ const ChatInput: React.FC<Props> = ({ sendMessage, stopInference }) => {
</Button>
</div>
)}
{messages[messages.length - 1]?.status !== 'in_progress' &&
isGeneratingResponse ? (
<Button
theme="destructive"
onClick={stopInference}
className="h-8 w-8 rounded-lg p-0"
>
<StopCircle size={20} />
</Button>
) : (
<>
{currentPrompt.length !== 0 && (
<Button
disabled={
!activeThread || currentPrompt.trim().length === 0
}
className="h-8 w-8 rounded-lg p-0"
data-testid="btn-send-chat"
onClick={() => sendMessage(currentPrompt)}
>
<svg
width="16"
height="16"
viewBox="0 0 16 16"
fill="none"
xmlns="http://www.w3.org/2000/svg"
className="fill-white stroke-white"
>
<path
d="M3.93098 4.26171L3.93108 4.26168L12.9041 1.27032C12.9041 1.27031 12.9041 1.27031 12.9041 1.27031C13.7983 0.972243 14.3972 0.77445 14.8316 0.697178C15.0428 0.659595 15.1663 0.660546 15.2355 0.671861C15.2855 0.680033 15.296 0.690905 15.3015 0.696542C15.3018 0.696895 15.3022 0.697228 15.3025 0.697538C15.3028 0.697847 15.3031 0.698168 15.3035 0.698509C15.3091 0.703965 15.32 0.71449 15.3282 0.764538C15.3395 0.8338 15.3405 0.957246 15.3029 1.16844C15.2258 1.60268 15.0282 2.20131 14.7307 3.09505L11.7383 12.0689L11.7383 12.069C11.3184 13.3293 11.0242 14.2078 10.7465 14.7789C10.6083 15.063 10.4994 15.2158 10.4215 15.292C10.3948 15.3182 10.3774 15.3295 10.3698 15.3338C10.3622 15.3295 10.3449 15.3181 10.3184 15.2921C10.2404 15.2158 10.1314 15.0629 9.99319 14.7788C9.71539 14.2077 9.42091 13.3291 9.00105 12.069L9.00094 12.0687L8.34059 10.0903L12.6391 5.79172L12.6392 5.7918L12.6472 5.78348C12.9604 5.45927 13.1337 5.02503 13.1297 4.57431C13.1258 4.12358 12.945 3.69242 12.6263 3.3737C12.3076 3.05497 11.8764 2.87418 11.4257 2.87027C10.975 2.86635 10.5407 3.03962 10.2165 3.35276L10.2165 3.35268L10.2083 3.36086L5.9106 7.65853L3.93098 6.99895C2.67072 6.57904 1.79218 6.28485 1.22115 6.00715C0.937001 5.86898 0.784237 5.76011 0.707981 5.68215C0.681839 5.65542 0.670463 5.63807 0.666163 5.63051C0.670529 5.62288 0.681934 5.60558 0.707909 5.57904C0.784233 5.50103 0.937088 5.3921 1.22125 5.25386C1.79226 4.97606 2.67087 4.68157 3.93098 4.26171Z"
strokeWidth="1.33"
<ChatActionButton
onStopInferenceClick={stopInference}
onSendMessageClick={sendMessage}
/>
</svg>
</Button>
)}
</>
)}
</div>
</div>
@ -257,7 +63,7 @@ const ChatInput: React.FC<Props> = ({ sendMessage, stopInference }) => {
<div
className={twMerge(
'absolute bottom-[6px] left-[1px] flex w-[calc(100%-2px)] items-center justify-between rounded-lg bg-[hsla(var(--textarea-bg))] p-3',
!activeThread && 'bg-transparent'
!activeThreadId && 'bg-transparent'
)}
>
<div className="flex items-center gap-x-3">
@ -278,26 +84,6 @@ const ChatInput: React.FC<Props> = ({ sendMessage, stopInference }) => {
className="flex-shrink-0 cursor-pointer text-[hsla(var(--text-secondary))]"
/>
</Button>
{/* {experimentalFeature && (
<Badge
className="flex cursor-pointer items-center gap-x-1"
theme="secondary"
onClick={() => {
setActiveTabThreadRightPanel('tools')
if (matches) {
setShowRightPanel(!showRightPanel)
} else if (!showRightPanel) {
setShowRightPanel(true)
}
}}
>
<ShapesIcon
size={16}
className="flex-shrink-0 text-[hsla(var(--text-secondary))]"
/>
<span>Tools</span>
</Badge>
)} */}
</div>
<Button theme="icon" onClick={() => setActiveSetting(false)}>
<ChevronUpIcon

View File

@ -14,7 +14,6 @@ import {
import { useClipboard } from '@/hooks/useClipboard'
import useMessageDeleteMutation from '@/hooks/useMessageDeleteMutation'
import useSendMessage from '@/hooks/useSendMessage'
import {
deleteMessageAtom,
@ -24,12 +23,16 @@ import {
type Props = {
isLastMessage: boolean
message: Message
onResendMessage: () => void
}
const MessageToolbar: React.FC<Props> = ({ isLastMessage, message }) => {
const MessageToolbar: React.FC<Props> = ({
isLastMessage,
message,
onResendMessage,
}) => {
const deleteMessage = useSetAtom(deleteMessageAtom)
const setEditMessage = useSetAtom(editMessageAtom)
const { resendMessage } = useSendMessage()
const clipboard = useClipboard({ timeout: 1000 })
const deleteCortexMessage = useMessageDeleteMutation()
@ -61,8 +64,8 @@ const MessageToolbar: React.FC<Props> = ({ isLastMessage, message }) => {
messageId: message.id,
})
deleteMessage(message.id)
await resendMessage()
}, [deleteCortexMessage, deleteMessage, resendMessage, message])
onResendMessage()
}, [deleteCortexMessage, deleteMessage, onResendMessage, message])
const allowRegenerate = useMemo(
() => isLastMessage && message.role === 'assistant',

View File

@ -33,9 +33,14 @@ import { editMessageAtom } from '@/helpers/atoms/ChatMessage.atom'
type Props = {
isLatestMessage: boolean
msg: Message
onResendMessage: () => void
}
const SimpleTextMessage: React.FC<Props> = ({ isLatestMessage, msg }) => {
const SimpleTextMessage: React.FC<Props> = ({
isLatestMessage,
msg,
onResendMessage,
}) => {
const [text, setText] = useState('')
const { data: assistants } = useAssistantQuery()
const editMessage = useAtomValue(editMessageAtom)
@ -188,7 +193,11 @@ const SimpleTextMessage: React.FC<Props> = ({ isLatestMessage, msg }) => {
: 'hidden group-hover:absolute group-hover:right-4 group-hover:top-4 group-hover:flex'
)}
>
<MessageToolbar message={msg} isLastMessage={isLatestMessage} />
<MessageToolbar
message={msg}
isLastMessage={isLatestMessage}
onResendMessage={onResendMessage}
/>
</div>
{isLatestMessage &&
(msg.status === 'in_progress' || tokenSpeed > 0) && (
@ -204,7 +213,7 @@ const SimpleTextMessage: React.FC<Props> = ({ isLatestMessage, msg }) => {
!isUser && !text.includes(' ') && 'break-all'
)}
>
<>
<Fragment>
{msg.content[0]?.type === 'image_file' && (
<div className="group/image relative mb-2 inline-flex cursor-pointer overflow-hidden rounded-xl">
<div className="left-0 top-0 z-20 h-full w-full group-hover/image:inline-block">
@ -254,7 +263,7 @@ const SimpleTextMessage: React.FC<Props> = ({ isLatestMessage, msg }) => {
dangerouslySetInnerHTML={{ __html: parsedText }}
/>
)}
</>
</Fragment>
</div>
</div>
)

View File

@ -1,20 +1,8 @@
/* eslint-disable @typescript-eslint/naming-convention */
import { useEffect, useState } from 'react'
import { Accept, useDropzone } from 'react-dropzone'
import { useAtomValue, useSetAtom } from 'jotai'
import { UploadCloudIcon } from 'lucide-react'
import { twMerge } from 'tailwind-merge'
import { useAtomValue } from 'jotai'
import CenterPanelContainer from '@/containers/CenterPanelContainer'
import GenerateResponse from '@/containers/Loader/GenerateResponse'
import ModelStart from '@/containers/Loader/ModelStart'
import { fileUploadAtom } from '@/containers/Providers/Jotai'
import { snackbar } from '@/containers/Toast'
import useSendMessage from '@/hooks/useSendMessage'
@ -22,165 +10,26 @@ import ChatBody from '@/screens/Thread/ThreadCenterPanel/ChatBody'
import ChatInput from './ChatInput'
import { experimentalFeatureEnabledAtom } from '@/helpers/atoms/AppConfig.atom'
import {
isGeneratingResponseAtom,
activeThreadAtom,
isLoadingModelAtom,
} from '@/helpers/atoms/Thread.atom'
const renderError = (code: string) => {
switch (code) {
case 'multiple-upload':
return 'Currently, we only support 1 attachment at the same time'
case 'retrieval-off':
return 'Turn on Retrieval in Assistant Settings to use this feature'
case 'file-invalid-type':
return 'We do not support this file type'
default:
return 'Oops, something error, please try again.'
}
}
const ThreadCenterPanel: React.FC = () => {
const { sendMessage, stopInference } = useSendMessage()
const [dragRejected, setDragRejected] = useState({ code: '' })
const setFileUpload = useSetAtom(fileUploadAtom)
const experimentalFeature = useAtomValue(experimentalFeatureEnabledAtom)
const { sendMessage, stopInference, resendMessage } = useSendMessage()
const activeThread = useAtomValue(activeThreadAtom)
const isLoadingModel = useAtomValue(isLoadingModelAtom)
const isVisionModel = false // activeThread?.assistants[0].model?.settings.vision_model
const acceptedFormat: Accept = isVisionModel
? {
'application/pdf': ['.pdf'],
'image/jpeg': ['.jpeg'],
'image/png': ['.png'],
'image/jpg': ['.jpg'],
}
: {
'application/pdf': ['.pdf'],
}
const { getRootProps, isDragReject } = useDropzone({
noClick: true,
multiple: false,
accept: acceptedFormat,
onDragOver: (e) => {
// Retrieval file drag and drop is experimental feature
if (!experimentalFeature) return
if (
e.dataTransfer.items.length === 1 &&
((activeThread?.assistants[0].tools &&
activeThread?.assistants[0].tools[0]?.enabled) ||
isVisionModel)
) {
setDragOver(true)
} else if (
activeThread?.assistants[0].tools &&
!activeThread?.assistants[0].tools[0]?.enabled
) {
setDragRejected({ code: 'retrieval-off' })
} else {
setDragRejected({ code: 'multiple-upload' })
}
},
onDragLeave: () => setDragOver(false),
onDrop: (files, rejectFiles) => {
// Retrieval file drag and drop is experimental feature
if (!experimentalFeature) return
if (
!files ||
files.length !== 1 ||
rejectFiles.length !== 0 ||
(activeThread?.assistants[0].tools &&
!activeThread?.assistants[0].tools[0]?.enabled &&
!isVisionModel)
)
return
const imageType = files[0]?.type.includes('image')
setFileUpload([{ file: files[0], type: imageType ? 'image' : 'pdf' }])
setDragOver(false)
},
onDropRejected: (e) => {
if (
activeThread?.assistants[0].tools &&
!activeThread?.assistants[0].tools[0]?.enabled
) {
setDragRejected({ code: 'retrieval-off' })
} else {
setDragRejected({ code: e[0].errors[0].code })
}
setDragOver(false)
},
})
useEffect(() => {
if (dragRejected.code) {
snackbar({
description: renderError(dragRejected.code),
type: 'error',
})
}
setTimeout(() => {
if (dragRejected.code) {
setDragRejected({ code: '' })
}
}, 2000)
}, [dragRejected.code])
const [dragOver, setDragOver] = useState(false)
const isGeneratingResponse = useAtomValue(isGeneratingResponseAtom)
return (
<CenterPanelContainer>
<div
className="relative flex h-full w-full flex-col outline-none"
{...getRootProps()}
>
{dragOver && (
<div className="absolute z-50 mx-auto h-full w-full p-8 backdrop-blur-lg">
<div
className={twMerge(
'flex h-full w-full items-center justify-center rounded-lg border border-dashed border-[hsla(var(--primary-bg))]',
isDragReject && 'border-[hsla(var(--destructive-bg))]'
)}
>
<div className="mx-auto w-1/2 text-center">
<div className="mx-auto inline-flex h-12 w-12 items-center justify-center rounded-full">
<UploadCloudIcon
size={24}
className="text-[hsla(var(--primary-bg))]"
/>
</div>
<div className="mt-4 text-[hsla(var(--primary-bg))]">
<h6 className="font-bold">
{isDragReject
? `Currently, we only support 1 attachment at the same time with ${
isVisionModel ? 'PDF, JPEG, JPG, PNG' : 'PDF'
} format`
: 'Drop file here'}
</h6>
{!isDragReject && (
<p className="mt-2">
{isVisionModel ? 'PDF, JPEG, JPG, PNG' : 'PDF'}
</p>
)}
</div>
</div>
</div>
</div>
)}
<div className="relative flex h-full w-full flex-col outline-none">
<div className="flex h-full w-full flex-col justify-between">
{activeThread && (
<div className="flex h-full w-full overflow-x-hidden">
<ChatBody />
<ChatBody onResendMessage={resendMessage} />
</div>
)}

View File

@ -9,6 +9,7 @@ import { useDebouncedCallback } from 'use-debounce'
import EngineSetting from '@/containers/EngineSetting'
import ModelSetting from '@/containers/ModelSetting'
import useModelStop from '@/hooks/useModelStop'
import useModels from '@/hooks/useModels'
import { getConfigurationsData } from '@/utils/componentSettings'
@ -24,7 +25,8 @@ import {
} from '@/helpers/atoms/Model.atom'
const ModelSettingContainer: React.FC = () => {
const { stopModel, updateModel } = useModels()
const stopModel = useModelStop()
const { updateModel } = useModels()
const setSelectedModel = useSetAtom(updateSelectedModelAtom)
const selectedModel = useAtomValue(getSelectedModelAtom)
@ -33,21 +35,17 @@ const ModelSettingContainer: React.FC = () => {
if (!selectedModel) return
// runtime setting
const modelRuntimeParams = toRuntimeParams(selectedModel)
const componentDataRuntimeSetting = getConfigurationsData(
modelRuntimeParams,
selectedModel
)
const componentDataRuntimeSetting =
getConfigurationsData(modelRuntimeParams)
// engine setting
const modelEngineParams = toSettingParams(selectedModel)
const componentDataEngineSetting = getConfigurationsData(
modelEngineParams,
selectedModel
modelEngineParams
).filter((x) => x.key !== 'prompt_template' && x.key !== 'embedding')
const promptTemplateSettings = getConfigurationsData(
modelEngineParams,
selectedModel
modelEngineParams
).filter((x) => x.key === 'prompt_template')
// the max value of max token has to follow context length
@ -57,6 +55,7 @@ const ModelSettingContainer: React.FC = () => {
const contextLength = componentDataEngineSetting.find(
(x) => x.key === 'ctx_len'
)
if (maxTokens && contextLength) {
// replace maxToken to componentDataRuntimeSetting
const updatedComponentDataRuntimeSetting: SettingComponentProps[] =
@ -123,7 +122,7 @@ const ModelSettingContainer: React.FC = () => {
presetConfiguration[key]?.requireModelReload ?? true
if (shouldStopModel) {
stopModel(selectedModel.model)
stopModel.mutate(selectedModel.model)
}
},
[selectedModel, debounceUpdateModel, stopModel, setSelectedModel]

View File

@ -1,10 +1,9 @@
import { Model, SettingComponentProps } from '@janhq/core'
import { SettingComponentProps } from '@janhq/core'
import { presetConfiguration } from './predefinedComponent'
export const getConfigurationsData = (
settings: object,
selectedModel?: Model
settings: object
): SettingComponentProps[] => {
const componentData: SettingComponentProps[] = []
@ -23,15 +22,11 @@ export const getConfigurationsData = (
switch (key) {
case 'max_tokens':
componentSetting.controllerProps.max =
selectedModel?.max_tokens ||
componentSetting.controllerProps.max ||
4096
componentSetting.controllerProps.max ?? 4096
break
case 'ctx_len':
componentSetting.controllerProps.max =
selectedModel?.ctx_len ||
componentSetting.controllerProps.max ||
2048
componentSetting.controllerProps.max ?? 2048
break
}
}