Merge pull request #4184 from janhq/dev

Release cut 0.5.10
This commit is contained in:
Louis 2024-12-02 19:19:45 +07:00 committed by GitHub
commit 0401ae7805
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
111 changed files with 2390 additions and 1206 deletions

26
.github/ISSUE_TEMPLATE/roadmap.md vendored Normal file
View File

@ -0,0 +1,26 @@
## Goal
## Tasklist
### Frontend
- [ ] link to janhq/jan epics
**Bugs**
- [ ] link to bugs
### Backend
- [ ] link to janhq/cortex.cpp epics
**Bugs**
- [ ] link to bug issues
### Infra
- [ ] link to infra issues
### Administrative / Management
- [ ] link to infra issues
### Marketing
-------
## Resources

View File

@ -3,7 +3,6 @@ import { joinPath } from './core'
import { openFileExplorer } from './core' import { openFileExplorer } from './core'
import { getJanDataFolderPath } from './core' import { getJanDataFolderPath } from './core'
import { abortDownload } from './core' import { abortDownload } from './core'
import { getFileSize } from './core'
import { executeOnMain } from './core' import { executeOnMain } from './core'
describe('test core apis', () => { describe('test core apis', () => {
@ -66,18 +65,6 @@ describe('test core apis', () => {
expect(result).toBe('aborted') expect(result).toBe('aborted')
}) })
it('should get file size', async () => {
const url = 'http://example.com/file'
globalThis.core = {
api: {
getFileSize: jest.fn().mockResolvedValue(1024),
},
}
const result = await getFileSize(url)
expect(globalThis.core.api.getFileSize).toHaveBeenCalledWith(url)
expect(result).toBe(1024)
})
it('should execute function on main process', async () => { it('should execute function on main process', async () => {
const extension = 'testExtension' const extension = 'testExtension'
const method = 'testMethod' const method = 'testMethod'

View File

@ -28,15 +28,6 @@ const downloadFile: (downloadRequest: DownloadRequest, network?: NetworkConfig)
network network
) => globalThis.core?.api?.downloadFile(downloadRequest, network) ) => globalThis.core?.api?.downloadFile(downloadRequest, network)
/**
* Get unit in bytes for a remote file.
*
* @param url - The url of the file.
* @returns {Promise<number>} - A promise that resolves with the file size.
*/
const getFileSize: (url: string) => Promise<number> = (url: string) =>
globalThis.core.api?.getFileSize(url)
/** /**
* Aborts the download of a specific file. * Aborts the download of a specific file.
* @param {string} fileName - The name of the file whose download is to be aborted. * @param {string} fileName - The name of the file whose download is to be aborted.
@ -167,7 +158,6 @@ export {
getUserHomePath, getUserHomePath,
systemInformation, systemInformation,
showToast, showToast,
getFileSize,
dirName, dirName,
FileStat, FileStat,
} }

View File

@ -23,6 +23,11 @@ jest.mock('fs', () => ({
createWriteStream: jest.fn(), createWriteStream: jest.fn(),
})) }))
const requestMock = jest.fn((options, callback) => {
callback(new Error('Test error'), null)
})
jest.mock('request', () => requestMock)
jest.mock('request-progress', () => { jest.mock('request-progress', () => {
return jest.fn().mockImplementation(() => { return jest.fn().mockImplementation(() => {
return { return {
@ -54,18 +59,6 @@ describe('Downloader', () => {
beforeEach(() => { beforeEach(() => {
jest.resetAllMocks() jest.resetAllMocks()
}) })
it('should handle getFileSize errors correctly', async () => {
const observer = jest.fn()
const url = 'http://example.com/file'
const downloader = new Downloader(observer)
const requestMock = jest.fn((options, callback) => {
callback(new Error('Test error'), null)
})
jest.mock('request', () => requestMock)
await expect(downloader.getFileSize(observer, url)).rejects.toThrow('Test error')
})
it('should pause download correctly', () => { it('should pause download correctly', () => {
const observer = jest.fn() const observer = jest.fn()

View File

@ -135,25 +135,4 @@ export class Downloader implements Processor {
pauseDownload(_observer: any, fileName: any) { pauseDownload(_observer: any, fileName: any) {
DownloadManager.instance.networkRequests[fileName]?.pause() DownloadManager.instance.networkRequests[fileName]?.pause()
} }
async getFileSize(_observer: any, url: string): Promise<number> {
return new Promise((resolve, reject) => {
const request = require('request')
request(
{
url,
method: 'HEAD',
},
function (err: any, response: any) {
if (err) {
console.error('Getting file size failed:', err)
reject(err)
} else {
const size: number = response.headers['content-length'] ?? -1
resolve(size)
}
}
)
})
}
} }

View File

@ -1,7 +1,6 @@
import { HttpServer } from '../HttpServer' import { HttpServer } from '../HttpServer'
import { import {
chatCompletions, chatCompletions,
deleteBuilder,
downloadModel, downloadModel,
getBuilder, getBuilder,
retrieveBuilder, retrieveBuilder,
@ -14,8 +13,6 @@ import {
} from './helper/builder' } from './helper/builder'
import { JanApiRouteConfiguration } from './helper/configuration' import { JanApiRouteConfiguration } from './helper/configuration'
import { startModel, stopModel } from './helper/startStopModel'
import { ModelSettingParams } from '../../../types'
export const commonRouter = async (app: HttpServer) => { export const commonRouter = async (app: HttpServer) => {
const normalizeData = (data: any) => { const normalizeData = (data: any) => {
@ -28,19 +25,25 @@ export const commonRouter = async (app: HttpServer) => {
// Read & Delete :: Threads | Models | Assistants // Read & Delete :: Threads | Models | Assistants
Object.keys(JanApiRouteConfiguration).forEach((key) => { Object.keys(JanApiRouteConfiguration).forEach((key) => {
app.get(`/${key}`, async (_req, _res) => { app.get(`/${key}`, async (_req, _res) => {
if (key === 'models') { if (key.includes('models')) {
return models(_req, _res) return models(_req, _res)
} }
return getBuilder(JanApiRouteConfiguration[key]).then(normalizeData) return getBuilder(JanApiRouteConfiguration[key]).then(normalizeData)
}) })
app.get(`/${key}/:id`, async (request: any) => app.get(`/${key}/:id`, async (_req: any, _res: any) => {
retrieveBuilder(JanApiRouteConfiguration[key], request.params.id) if (key.includes('models')) {
) return models(_req, _res)
}
return retrieveBuilder(JanApiRouteConfiguration[key], _req.params.id)
})
app.delete(`/${key}/:id`, async (request: any) => app.delete(`/${key}/:id`, async (_req: any, _res: any) => {
deleteBuilder(JanApiRouteConfiguration[key], request.params.id) if (key.includes('models')) {
) return models(_req, _res)
}
return retrieveBuilder(JanApiRouteConfiguration[key], _req.params.id)
})
}) })
// Threads // Threads
@ -70,16 +73,9 @@ export const commonRouter = async (app: HttpServer) => {
}) })
) )
app.put(`/models/:modelId/start`, async (request: any) => { app.post(`/models/start`, async (request: any, reply: any) => models(request, reply))
let settingParams: ModelSettingParams | undefined = undefined
if (Object.keys(request.body).length !== 0) {
settingParams = JSON.parse(request.body) as ModelSettingParams
}
return startModel(request.params.modelId, settingParams) app.post(`/models/stop`, async (request: any, reply: any) => models(request, reply))
})
app.put(`/models/:modelId/stop`, async (request: any) => stopModel(request.params.modelId))
// Chat Completion // Chat Completion
app.post(`/chat/completions`, async (request: any, reply: any) => chatCompletions(request, reply)) app.post(`/chat/completions`, async (request: any, reply: any) => chatCompletions(request, reply))

View File

@ -1,17 +1,7 @@
import { import { existsSync, readdirSync, readFileSync, writeFileSync, mkdirSync, appendFileSync } from 'fs'
existsSync,
readdirSync,
readFileSync,
writeFileSync,
mkdirSync,
appendFileSync,
rmdirSync,
} from 'fs'
import { join } from 'path'
import { import {
getBuilder, getBuilder,
retrieveBuilder, retrieveBuilder,
deleteBuilder,
getMessages, getMessages,
retrieveMessage, retrieveMessage,
createThread, createThread,
@ -82,34 +72,6 @@ describe('builder helper functions', () => {
}) })
}) })
describe('deleteBuilder', () => {
it('should return a message if trying to delete Jan assistant', async () => {
const result = await deleteBuilder({ ...mockConfiguration, dirName: 'assistants' }, 'jan')
expect(result).toEqual({ message: 'Cannot delete Jan assistant' })
})
it('should return a message if data is not found', async () => {
;(existsSync as jest.Mock).mockReturnValue(true)
;(readdirSync as jest.Mock).mockReturnValue(['file1'])
;(readFileSync as jest.Mock).mockReturnValue(JSON.stringify({ id: 'model1' }))
const result = await deleteBuilder(mockConfiguration, 'nonexistentId')
expect(result).toEqual({ message: 'Not found' })
})
it('should delete the directory and return success message', async () => {
;(existsSync as jest.Mock).mockReturnValue(true)
;(readdirSync as jest.Mock).mockReturnValue(['file1'])
;(readFileSync as jest.Mock).mockReturnValue(JSON.stringify({ id: 'model1' }))
const result = await deleteBuilder(mockConfiguration, 'model1')
expect(rmdirSync).toHaveBeenCalledWith(join('/mock/path', 'mockDir', 'model1'), {
recursive: true,
})
expect(result).toEqual({ id: 'model1', object: 'mockObject', deleted: true })
})
})
describe('getMessages', () => { describe('getMessages', () => {
it('should return an empty array if message file does not exist', async () => { it('should return an empty array if message file does not exist', async () => {
;(existsSync as jest.Mock).mockReturnValue(false) ;(existsSync as jest.Mock).mockReturnValue(false)

View File

@ -73,34 +73,6 @@ export const retrieveBuilder = async (configuration: RouteConfiguration, id: str
return filteredData return filteredData
} }
export const deleteBuilder = async (configuration: RouteConfiguration, id: string) => {
if (configuration.dirName === 'assistants' && id === 'jan') {
return {
message: 'Cannot delete Jan assistant',
}
}
const directoryPath = join(getJanDataFolderPath(), configuration.dirName)
try {
const data = await retrieveBuilder(configuration, id)
if (!data) {
return {
message: 'Not found',
}
}
const objectPath = join(directoryPath, id)
rmdirSync(objectPath, { recursive: true })
return {
id: id,
object: configuration.delete.object,
deleted: true,
}
} catch (ex) {
console.error(ex)
}
}
export const getMessages = async (threadId: string): Promise<ThreadMessage[]> => { export const getMessages = async (threadId: string): Promise<ThreadMessage[]> => {
const threadDirPath = join(getJanDataFolderPath(), 'threads', threadId) const threadDirPath = join(getJanDataFolderPath(), 'threads', threadId)
const messageFile = 'messages.jsonl' const messageFile = 'messages.jsonl'
@ -308,7 +280,7 @@ export const models = async (request: any, reply: any) => {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
} }
const response = await fetch(`${CORTEX_API_URL}/models`, { const response = await fetch(`${CORTEX_API_URL}/models${request.url.split('/models')[1] ?? ""}`, {
method: request.method, method: request.method,
headers: headers, headers: headers,
body: JSON.stringify(request.body), body: JSON.stringify(request.body),

View File

@ -1,10 +0,0 @@
import { startModel } from './startStopModel'
describe('startModel', () => {
it('test_startModel_error', async () => {
const modelId = 'testModelId'
const settingParams = undefined
expect(startModel(modelId, settingParams)).resolves.toThrow()
})
})

View File

@ -1,25 +0,0 @@
import { ModelSettingParams } from '../../../../types'
import { CORTEX_DEFAULT_PORT, LOCAL_HOST } from './consts'
/**
* Start a model
* @param modelId
* @param settingParams
* @returns
*/
export const startModel = async (modelId: string, settingParams?: ModelSettingParams) => {
return fetch(`http://${LOCAL_HOST}:${CORTEX_DEFAULT_PORT}/v1/models/start`, {
method: 'POST',
body: JSON.stringify({ model: modelId, ...settingParams }),
})
}
/*
* Stop model.
*/
export const stopModel = async (modelId: string) => {
return fetch(`http://${LOCAL_HOST}:${CORTEX_DEFAULT_PORT}/v1/models/stop`, {
method: 'POST',
body: JSON.stringify({ model: modelId }),
})
}

View File

@ -27,6 +27,7 @@ export enum NativeRoute {
quickAskSizeUpdated = 'quickAskSizeUpdated', quickAskSizeUpdated = 'quickAskSizeUpdated',
ackDeepLink = 'ackDeepLink', ackDeepLink = 'ackDeepLink',
factoryReset = 'factoryReset'
} }
/** /**
@ -65,7 +66,6 @@ export enum DownloadRoute {
pauseDownload = 'pauseDownload', pauseDownload = 'pauseDownload',
resumeDownload = 'resumeDownload', resumeDownload = 'resumeDownload',
getDownloadProgress = 'getDownloadProgress', getDownloadProgress = 'getDownloadProgress',
getFileSize = 'getFileSize',
} }
export enum DownloadEvent { export enum DownloadEvent {

View File

@ -12,7 +12,7 @@ export type SettingComponentProps = {
export type ConfigType = 'runtime' | 'setting' export type ConfigType = 'runtime' | 'setting'
export type ControllerType = 'slider' | 'checkbox' | 'input' export type ControllerType = 'slider' | 'checkbox' | 'input' | 'tag'
export type InputType = 'password' | 'text' | 'email' | 'number' | 'tel' | 'url' export type InputType = 'password' | 'text' | 'email' | 'number' | 'tel' | 'url'
@ -22,7 +22,7 @@ export type InputAction = InputActionsTuple[number]
export type InputComponentProps = { export type InputComponentProps = {
placeholder: string placeholder: string
value: string value: string | string[]
type?: InputType type?: InputType
textAlign?: 'left' | 'right' textAlign?: 'left' | 'right'
inputActions?: InputAction[] inputActions?: InputAction[]

View File

@ -13,6 +13,7 @@
}, },
"desktop": "Desktop", "desktop": "Desktop",
"data-folder": "Jan Data Folder", "data-folder": "Jan Data Folder",
"privacy": "Privacy",
"user-guides": { "user-guides": {
"title": "BASIC USAGE", "title": "BASIC USAGE",
"type": "separator" "type": "separator"

View File

@ -0,0 +1,63 @@
---
title: Jan Privacy
description: Jan is an app that allows you to own your AI. We prioritize your control over your data and explain what data we collect and why.
keywords:
[
Jan AI,
Jan,
ChatGPT alternative,
local AI,
private AI,
conversational AI,
OpenAI platform alternative,
no-subscription fee,
large language model,
about Jan,
desktop application,
thinking machine,
jan vision,
]
---
# Privacy
Jan is an app that allows you to own your AI. We prioritize your control over your data and explain what data we collect and why.
- Jan can't see your chats with AI
- You're free to opt out
## Why and what we track
To build a reliable, user-friendly AI that you own, we need to understand how Jan is used. We collect two types of data: performance data and usage data.
### Performance data
We track app crashes and collect technical details about what went wrong, along with basic information about the hardware youre using.
When Jan crashes, we collect technical details about what went wrong.
- Specific AI model in use during the crash
- Hardware: `CPU`, `GPU`, `RAM`
- Logs: `Date/Time`, `OS & version`, `app version`, `error codes & messages`.
### Usage data
We track data like how often the app is opened to check:
- **Active Users**: How many people use Jan daily to measure engagement
- **Retention Rates**: To understand if users are finding value in Jan over time
Usage data is tied to a randomly generated telemetry ID. None of our usage data can be linked to your personal identity.
## What we **dont** track:
- Your conversations with Jan. Those stay on your device.
- Your files. We dont scan, upload, or even look at them.
- Anything tied to your identity.
## Using Cloud Models
Jan allows you to connect cloud model APIs. If you choose to use cloud-based models (e.g. GPT, Claude models), the API provider handling the model will have access to your messages as part of processing the request. Again, Jan doesn't see or store these messages - they go directly to the provider. Remember: With local models, everything stays on your device, so no one - not even us- can see your messages.
## Where we store & process data
We use [PostHog](https://posthog.com/eu) EU for analytics, ensuring all data is processed within the European Union. This setup complies with GDPR and other strict privacy regulations. PostHog lets us self-host and securely manage the data we collect. Read more [on PostHog's GDPR doc](https://posthog.com/docs/privacy/gdpr-compliance).
For a detailed breakdown of the analytics data we collect, you can check out our analytics repo. If you have any questions or concerns, feel free to reach out to us at hi@jan.ai.

Binary file not shown.

After

Width:  |  Height:  |  Size: 262 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 618 KiB

View File

@ -0,0 +1,104 @@
---
title: Tabby
description: A step-by-step guide on integrating Jan with Tabby and VSCode, JetBrains, or other IDEs.
keywords:
[
Jan,
Customizable Intelligence, LLM,
local AI,
privacy focus,
free and open source,
private and offline,
conversational AI,
no-subscription fee,
large language models,
Tabby integration,
VSCode integration,
JetBrains integration,
]
---
import { Tabs, Steps } from 'nextra/components'
# Tabby
## Integrate Jan with Tabby and Your Favorite IDEs
[Tabby](https://www.tabbyml.com/) is an open-source, self-hosted AI coding assistant.
With Tabby, teams can easily set up their own LLM-powered code completion server.
Tabby provides integrations with VSCode, JetBrains, and other IDEs to help developers code more efficiently,
and it can be used with various LLM services, including Jan.
To integrate Jan with Tabby, follow these steps:
<Steps>
### Step 1: Enable the Jan API Server
To set up Tabby with Jan's Local Server, you must activate the Jan API Server with your chosen model.
1. Click the `Local API Server` (`<>`) button above the Settings. Jan will direct you to the **Local API Server** section.
2. Configure the server, including the **IP Port**, **Cross-Origin Resource Sharing (CORS)**, and **Verbose Server Logs**.
3. Press the **Start Server** button.
### Step 2: Find the Model ID and Ensure the Model is Activated
1. Go to `Settings` > `My Models`.
2. Models are listed with their **Model ID** beneath their names.
3. Click the **three dots (⋮)** button next to the model.
4. Select **Start Model** to activate the model.
### Step 3: Installing Tabby Server
Use the following documentation to install the Tabby server:
- [Docker](https://tabby.tabbyml.com/docs/quick-start/installation/docker/)
- [Apple Silicon](https://tabby.tabbyml.com/docs/quick-start/installation/apple/)
- [Linux](https://tabby.tabbyml.com/docs/quick-start/installation/linux/)
- [Windows](https://tabby.tabbyml.com/docs/quick-start/installation/windows/)
Then, follow the steps to connect Jan with the Tabby server:
[Connect Jan with Tabby](https://tabby.tabbyml.com/docs/references/models-http-api/jan.ai/).
For example, to connect Jan with Tabby, save the following configuration under `~/.tabby/config.toml`:
```toml title="~/.tabby/config.toml"
# Chat model
[model.chat.http]
kind = "openai/chat"
model_name = "model_id"
api_endpoint = "http://localhost:1337/v1"
api_key = ""
```
Currently, the Jan completion and embedding API is under construction.
Once completed, you can also connect Jan with Tabby for completion and embedding tasks.
### Step 4: Installing Tabby on Your Favorite IDEs
Refer to the following documentation to install the Tabby extension on your favorite IDEs:
- [Visual Studio Code](https://tabby.tabbyml.com/docs/extensions/installation/vscode/)
- [JetBrains IntelliJ Platform](https://tabby.tabbyml.com/docs/extensions/installation/intellij/)
- [VIM / NeoVIM](https://tabby.tabbyml.com/docs/extensions/installation/vim/)
</Steps>
## How to Use Tabby with Jan Integration
### Answer Engine: Chat with Your Codes and Documentation
Tabby offers an [Answer Engine](https://tabby.tabbyml.com/docs/administration/answer-engine/) on the homepage,
which can leverage the Jan LLM and related contexts like code, documentation, and web pages to answer user questions.
Simply open the Tabby homepage at [localhost:8080](http://localhost:8080) and ask your questions.
![Answer Engine](./_assets/tabby-answer-engine.png)
### IDE Chat Sidebar
After installing the Tabby extension on your preferred IDEs, you can engage in a conversation with Jan to:
1. Discuss your code, receive suggestions, and seek assistance.
2. Request Jan to inline edit your code, and then review and accept the proposed changes.
![Chat Sidebar](./_assets/tabby-chat-sidebar.png)

View File

@ -12,6 +12,9 @@ import {
} from '@janhq/core/node' } from '@janhq/core/node'
import { SelectFileOption } from '@janhq/core' import { SelectFileOption } from '@janhq/core'
import { menu } from '../utils/menu' import { menu } from '../utils/menu'
import { migrate } from '../utils/migration'
import { createUserSpace } from '../utils/path'
import { setupExtensions } from '../utils/extension'
const isMac = process.platform === 'darwin' const isMac = process.platform === 'darwin'
@ -33,14 +36,28 @@ export function handleAppIPCs() {
nativeTheme.themeSource = 'light' nativeTheme.themeSource = 'light'
}) })
/**
* Handles the "setCloseApp" IPC message by closing the main application window.
* This effectively closes the application if no other windows are open.
*/
ipcMain.handle(NativeRoute.setCloseApp, () => { ipcMain.handle(NativeRoute.setCloseApp, () => {
windowManager.mainWindow?.close() windowManager.mainWindow?.close()
}) })
/**
* Handles the "setMinimizeApp" IPC message by minimizing the main application window.
* The window will be minimized to the system's taskbar or dock.
*/
ipcMain.handle(NativeRoute.setMinimizeApp, () => { ipcMain.handle(NativeRoute.setMinimizeApp, () => {
windowManager.mainWindow?.minimize() windowManager.mainWindow?.minimize()
}) })
/**
* Handles the "setMaximizeApp" IPC message. It toggles the maximization state of the main window.
* If the window is currently maximized, it will be un-maximized (restored to its previous size).
* If the window is not maximized, it will be maximized to fill the screen.
* @param _event - The IPC event object.
*/
ipcMain.handle(NativeRoute.setMaximizeApp, async (_event) => { ipcMain.handle(NativeRoute.setMaximizeApp, async (_event) => {
if (windowManager.mainWindow?.isMaximized()) { if (windowManager.mainWindow?.isMaximized()) {
windowManager.mainWindow.unmaximize() windowManager.mainWindow.unmaximize()
@ -104,6 +121,11 @@ export function handleAppIPCs() {
} }
}) })
/**
* Handles the "selectDirectory" IPC message to open a dialog for selecting a directory.
* If no main window is found, logs an error and exits.
* @returns {string} The path of the selected directory, or nothing if canceled.
*/
ipcMain.handle(NativeRoute.selectDirectory, async () => { ipcMain.handle(NativeRoute.selectDirectory, async () => {
const mainWindow = windowManager.mainWindow const mainWindow = windowManager.mainWindow
if (!mainWindow) { if (!mainWindow) {
@ -122,6 +144,14 @@ export function handleAppIPCs() {
} }
}) })
/**
* Handles the "selectFiles" IPC message to open a dialog for selecting files.
* Allows options for setting the dialog title, button label, and selection properties.
* Logs an error if no main window is found.
* @param _event - The IPC event object.
* @param option - Options for customizing file selection dialog.
* @returns {string[]} An array of selected file paths, or nothing if canceled.
*/
ipcMain.handle( ipcMain.handle(
NativeRoute.selectFiles, NativeRoute.selectFiles,
async (_event, option?: SelectFileOption) => { async (_event, option?: SelectFileOption) => {
@ -156,11 +186,20 @@ export function handleAppIPCs() {
} }
) )
/**
* Handles the "hideQuickAskWindow" IPC message to hide the quick ask window.
* @returns A promise that resolves when the window is hidden.
*/
ipcMain.handle( ipcMain.handle(
NativeRoute.hideQuickAskWindow, NativeRoute.hideQuickAskWindow,
async (): Promise<void> => windowManager.hideQuickAskWindow() async (): Promise<void> => windowManager.hideQuickAskWindow()
) )
/**
* Handles the "sendQuickAskInput" IPC message to send user input to the main window.
* @param _event - The IPC event object.
* @param input - User input string to be sent.
*/
ipcMain.handle( ipcMain.handle(
NativeRoute.sendQuickAskInput, NativeRoute.sendQuickAskInput,
async (_event, input: string): Promise<void> => { async (_event, input: string): Promise<void> => {
@ -171,6 +210,12 @@ export function handleAppIPCs() {
} }
) )
/**
* Handles the "showOpenMenu" IPC message to show the context menu at given coordinates.
* Only applicable on non-Mac platforms.
* @param e - The event object.
* @param args - Contains coordinates where the menu should appear.
*/
ipcMain.handle(NativeRoute.showOpenMenu, function (e, args) { ipcMain.handle(NativeRoute.showOpenMenu, function (e, args) {
if (!isMac && windowManager.mainWindow) { if (!isMac && windowManager.mainWindow) {
menu.popup({ menu.popup({
@ -181,23 +226,55 @@ export function handleAppIPCs() {
} }
}) })
/**
* Handles the "hideMainWindow" IPC message to hide the main application window.
* @returns A promise that resolves when the window is hidden.
*/
ipcMain.handle( ipcMain.handle(
NativeRoute.hideMainWindow, NativeRoute.hideMainWindow,
async (): Promise<void> => windowManager.hideMainWindow() async (): Promise<void> => windowManager.hideMainWindow()
) )
/**
* Handles the "showMainWindow" IPC message to show the main application window.
* @returns A promise that resolves when the window is shown.
*/
ipcMain.handle( ipcMain.handle(
NativeRoute.showMainWindow, NativeRoute.showMainWindow,
async (): Promise<void> => windowManager.showMainWindow() async (): Promise<void> => windowManager.showMainWindow()
) )
/**
* Handles the "quickAskSizeUpdated" IPC message to update the size of the quick ask window.
* Resizes window by the given height offset.
* @param _event - The IPC event object.
* @param heightOffset - The amount of height to increase.
* @returns A promise that resolves when the window is resized.
*/
ipcMain.handle( ipcMain.handle(
NativeRoute.quickAskSizeUpdated, NativeRoute.quickAskSizeUpdated,
async (_event, heightOffset: number): Promise<void> => async (_event, heightOffset: number): Promise<void> =>
windowManager.expandQuickAskWindow(heightOffset) windowManager.expandQuickAskWindow(heightOffset)
) )
/**
* Handles the "ackDeepLink" IPC message to acknowledge a deep link.
* Triggers handling of deep link in the application.
* @param _event - The IPC event object.
* @returns A promise that resolves when the deep link is acknowledged.
*/
ipcMain.handle(NativeRoute.ackDeepLink, async (_event): Promise<void> => { ipcMain.handle(NativeRoute.ackDeepLink, async (_event): Promise<void> => {
windowManager.ackDeepLink() windowManager.ackDeepLink()
}) })
/**
* Handles the "factoryReset" IPC message to reset the application to its initial state.
* Clears loaded modules, recreates user space, runs migrations, and sets up extensions.
* @param _event - The IPC event object.
* @returns A promise that resolves after the reset operations are complete.
*/
ipcMain.handle(NativeRoute.factoryReset, async (_event): Promise<void> => {
ModuleManager.instance.clearImportedModules()
return createUserSpace().then(migrate).then(setupExtensions)
})
} }

View File

@ -25,7 +25,7 @@ test('Select GPT model from Hub and Chat with Invalid API Key', async ({
{ timeout: TIMEOUT } { timeout: TIMEOUT }
) )
const APIKeyError = page.getByTestId('invalid-API-key-error') const APIKeyError = page.getByTestId('passthrough-error-message')
await expect(APIKeyError).toBeVisible({ await expect(APIKeyError).toBeVisible({
timeout: TIMEOUT, timeout: TIMEOUT,
}) })

View File

@ -3,7 +3,6 @@ import { app } from 'electron'
import { join } from 'path' import { join } from 'path'
import { import {
rmdirSync, rmdirSync,
readFileSync,
existsSync, existsSync,
mkdirSync, mkdirSync,
readdirSync, readdirSync,

View File

@ -1,7 +1,7 @@
{ {
"name": "@janhq/inference-anthropic-extension", "name": "@janhq/inference-anthropic-extension",
"productName": "Anthropic Inference Engine", "productName": "Anthropic Inference Engine",
"version": "1.0.2", "version": "1.0.3",
"description": "This extension enables Anthropic chat completion API calls", "description": "This extension enables Anthropic chat completion API calls",
"main": "dist/index.js", "main": "dist/index.js",
"module": "dist/module.js", "module": "dist/module.js",

View File

@ -5,9 +5,9 @@
"url": "https://www.anthropic.com/" "url": "https://www.anthropic.com/"
} }
], ],
"id": "claude-3-opus-20240229", "id": "claude-3-opus-latest",
"object": "model", "object": "model",
"name": "Claude 3 Opus", "name": "Claude 3 Opus Latest",
"version": "1.0", "version": "1.0",
"description": "Claude 3 Opus is a powerful model suitables for highly complex task.", "description": "Claude 3 Opus is a powerful model suitables for highly complex task.",
"format": "api", "format": "api",
@ -29,15 +29,15 @@
"url": "https://www.anthropic.com/" "url": "https://www.anthropic.com/"
} }
], ],
"id": "claude-3-sonnet-20240229", "id": "claude-3-5-haiku-latest",
"object": "model", "object": "model",
"name": "Claude 3 Sonnet", "name": "Claude 3.5 Haiku Latest",
"version": "1.0", "version": "1.0",
"description": "Claude 3 Sonnet is an ideal model balance of intelligence and speed for enterprise workloads.", "description": "Claude 3.5 Haiku is the fastest model provides near-instant responsiveness.",
"format": "api", "format": "api",
"settings": {}, "settings": {},
"parameters": { "parameters": {
"max_tokens": 4096, "max_tokens": 8192,
"temperature": 0.7, "temperature": 0.7,
"stream": false "stream": false
}, },
@ -53,39 +53,15 @@
"url": "https://www.anthropic.com/" "url": "https://www.anthropic.com/"
} }
], ],
"id": "claude-3-haiku-20240307", "id": "claude-3-5-sonnet-latest",
"object": "model", "object": "model",
"name": "Claude 3 Haiku", "name": "Claude 3.5 Sonnet Latest",
"version": "1.0",
"description": "Claude 3 Haiku is the fastest model provides near-instant responsiveness.",
"format": "api",
"settings": {},
"parameters": {
"max_tokens": 4096,
"temperature": 0.7,
"stream": false
},
"metadata": {
"author": "Anthropic",
"tags": ["General", "Big Context Length"]
},
"engine": "anthropic"
},
{
"sources": [
{
"url": "https://www.anthropic.com/"
}
],
"id": "claude-3-5-sonnet-20240620",
"object": "model",
"name": "Claude 3.5 Sonnet",
"version": "1.0", "version": "1.0",
"description": "Claude 3.5 Sonnet raises the industry bar for intelligence, outperforming competitor models and Claude 3 Opus on a wide range of evaluations, with the speed and cost of our mid-tier model, Claude 3 Sonnet.", "description": "Claude 3.5 Sonnet raises the industry bar for intelligence, outperforming competitor models and Claude 3 Opus on a wide range of evaluations, with the speed and cost of our mid-tier model, Claude 3 Sonnet.",
"format": "api", "format": "api",
"settings": {}, "settings": {},
"parameters": { "parameters": {
"max_tokens": 4096, "max_tokens": 8192,
"temperature": 0.7, "temperature": 0.7,
"stream": true "stream": true
}, },

View File

@ -1 +1 @@
1.0.3-rc5 1.0.4

View File

@ -2,12 +2,11 @@
set BIN_PATH=./bin set BIN_PATH=./bin
set SHARED_PATH=./../../electron/shared set SHARED_PATH=./../../electron/shared
set /p CORTEX_VERSION=<./bin/version.txt set /p CORTEX_VERSION=<./bin/version.txt
set ENGINE_VERSION=0.1.39 set ENGINE_VERSION=0.1.40
@REM Download cortex.llamacpp binaries @REM Download cortex.llamacpp binaries
set VERSION=v0.1.39 set DOWNLOAD_URL=https://github.com/janhq/cortex.llamacpp/releases/download/v%ENGINE_VERSION%/cortex.llamacpp-%ENGINE_VERSION%-windows-amd64
set DOWNLOAD_URL=https://github.com/janhq/cortex.llamacpp/releases/download/%VERSION%/cortex.llamacpp-0.1.39-windows-amd64 set CUDA_DOWNLOAD_URL=https://github.com/janhq/cortex.llamacpp/releases/download/v%ENGINE_VERSION%
set CUDA_DOWNLOAD_URL=https://github.com/janhq/cortex.llamacpp/releases/download/%VERSION%
set SUBFOLDERS=windows-amd64-noavx-cuda-12-0 windows-amd64-noavx-cuda-11-7 windows-amd64-avx2-cuda-12-0 windows-amd64-avx2-cuda-11-7 windows-amd64-noavx windows-amd64-avx windows-amd64-avx2 windows-amd64-avx512 windows-amd64-vulkan set SUBFOLDERS=windows-amd64-noavx-cuda-12-0 windows-amd64-noavx-cuda-11-7 windows-amd64-avx2-cuda-12-0 windows-amd64-avx2-cuda-11-7 windows-amd64-noavx windows-amd64-avx windows-amd64-avx2 windows-amd64-avx512 windows-amd64-vulkan
call .\node_modules\.bin\download -e --strip 1 -o %BIN_PATH% https://github.com/janhq/cortex.cpp/releases/download/v%CORTEX_VERSION%/cortex-%CORTEX_VERSION%-windows-amd64.tar.gz call .\node_modules\.bin\download -e --strip 1 -o %BIN_PATH% https://github.com/janhq/cortex.cpp/releases/download/v%CORTEX_VERSION%/cortex-%CORTEX_VERSION%-windows-amd64.tar.gz

View File

@ -2,7 +2,7 @@
# Read CORTEX_VERSION # Read CORTEX_VERSION
CORTEX_VERSION=$(cat ./bin/version.txt) CORTEX_VERSION=$(cat ./bin/version.txt)
ENGINE_VERSION=0.1.39 ENGINE_VERSION=0.1.40
CORTEX_RELEASE_URL="https://github.com/janhq/cortex.cpp/releases/download" CORTEX_RELEASE_URL="https://github.com/janhq/cortex.cpp/releases/download"
ENGINE_DOWNLOAD_URL="https://github.com/janhq/cortex.llamacpp/releases/download/v${ENGINE_VERSION}/cortex.llamacpp-${ENGINE_VERSION}" ENGINE_DOWNLOAD_URL="https://github.com/janhq/cortex.llamacpp/releases/download/v${ENGINE_VERSION}/cortex.llamacpp-${ENGINE_VERSION}"
CUDA_DOWNLOAD_URL="https://github.com/janhq/cortex.llamacpp/releases/download/v${ENGINE_VERSION}" CUDA_DOWNLOAD_URL="https://github.com/janhq/cortex.llamacpp/releases/download/v${ENGINE_VERSION}"
@ -42,8 +42,8 @@ elif [ "$OS_TYPE" == "Darwin" ]; then
chmod +x "./bin/cortex-server" chmod +x "./bin/cortex-server"
# Download engines for macOS # Download engines for macOS
download "${ENGINE_DOWNLOAD_URL}-mac-arm64.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/mac-arm64/v0.1.39" download "${ENGINE_DOWNLOAD_URL}-mac-arm64.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/mac-arm64/v${ENGINE_VERSION}"
download "${ENGINE_DOWNLOAD_URL}-mac-amd64.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/mac-amd64/v0.1.39" download "${ENGINE_DOWNLOAD_URL}-mac-amd64.tar.gz" -e --strip 1 -o "${SHARED_PATH}/engines/cortex.llamacpp/mac-amd64/v${ENGINE_VERSION}"
else else
echo "Unsupported operating system: $OS_TYPE" echo "Unsupported operating system: $OS_TYPE"

View File

@ -1,7 +1,7 @@
{ {
"name": "@janhq/inference-cortex-extension", "name": "@janhq/inference-cortex-extension",
"productName": "Cortex Inference Engine", "productName": "Cortex Inference Engine",
"version": "1.0.22", "version": "1.0.23",
"description": "This extension embeds cortex.cpp, a lightweight inference engine written in C++. See https://jan.ai.\nAdditional dependencies could be installed to run without Cuda Toolkit installation.", "description": "This extension embeds cortex.cpp, a lightweight inference engine written in C++. See https://jan.ai.\nAdditional dependencies could be installed to run without Cuda Toolkit installation.",
"main": "dist/index.js", "main": "dist/index.js",
"node": "dist/node/index.cjs.js", "node": "dist/node/index.cjs.js",

View File

@ -1,33 +1,59 @@
[ [
{ {
"key": "test", "key": "cont_batching",
"title": "Test", "title": "Continuous batching",
"description": "Test", "description": "The number of parallel operations",
"controllerType": "input",
"controllerProps": {
"placeholder": "Test",
"value": ""
}
},
{
"key": "embedding",
"title": "Embedding",
"description": "Whether to enable embedding.",
"controllerType": "checkbox", "controllerType": "checkbox",
"controllerProps": { "controllerProps": {
"value": true "value": true
} }
}, },
{ {
"key": "ctx_len", "key": "n_parallel",
"title": "Context Length", "title": "Parallel operations",
"description": "The context length for model operations varies; the maximum depends on the specific model used.", "description": "The number of parallel operations",
"controllerType": "slider", "controllerType": "input",
"controllerProps": { "controllerProps": {
"min": 0, "value": "4",
"max": 4096, "placeholder": "4"
"step": 128, }
"value": 2048 },
{
"key": "flash_attn",
"title": "Flash Attention enabled",
"description": "To enable Flash Attention, default is true",
"controllerType": "checkbox",
"controllerProps": {
"value": true
}
},
{
"key": "caching_enabled",
"title": "Caching enabled",
"description": "To enable prompt caching or not",
"controllerType": "checkbox",
"controllerProps": {
"value": true
}
},
{
"key": "cache_type",
"title": "KV Cache Type",
"description": "KV cache type: f16, q8_0, q4_0, default is f16 (change this could break the model).",
"controllerType": "input",
"controllerProps": {
"placeholder": "f16",
"value": "f16"
}
},
{
"key": "use_mmap",
"title": "To enable mmap",
"description": "To enable mmap, default is true",
"controllerType": "checkbox",
"controllerProps": {
"value": true
} }
} }
] ]

View File

@ -117,10 +117,10 @@ export default [
qwen2572bJson, qwen2572bJson,
]), ]),
NODE: JSON.stringify(`${packageJson.name}/${packageJson.node}`), NODE: JSON.stringify(`${packageJson.name}/${packageJson.node}`),
DEFAULT_SETTINGS: JSON.stringify(defaultSettingJson), SETTINGS: JSON.stringify(defaultSettingJson),
CORTEX_API_URL: JSON.stringify('http://127.0.0.1:39291'), CORTEX_API_URL: JSON.stringify('http://127.0.0.1:39291'),
CORTEX_SOCKET_URL: JSON.stringify('ws://127.0.0.1:39291'), CORTEX_SOCKET_URL: JSON.stringify('ws://127.0.0.1:39291'),
CORTEX_ENGINE_VERSION: JSON.stringify('v0.1.39'), CORTEX_ENGINE_VERSION: JSON.stringify('v0.1.40'),
}), }),
// Allow json resolution // Allow json resolution
json(), json(),

View File

@ -2,7 +2,7 @@ declare const NODE: string
declare const CORTEX_API_URL: string declare const CORTEX_API_URL: string
declare const CORTEX_SOCKET_URL: string declare const CORTEX_SOCKET_URL: string
declare const CORTEX_ENGINE_VERSION: string declare const CORTEX_ENGINE_VERSION: string
declare const DEFAULT_SETTINGS: Array<any> declare const SETTINGS: Array<any>
declare const MODELS: Array<any> declare const MODELS: Array<any>
/** /**

View File

@ -20,6 +20,7 @@ import {
ModelEvent, ModelEvent,
SystemInformation, SystemInformation,
dirName, dirName,
AppConfigurationEventName,
} from '@janhq/core' } from '@janhq/core'
import PQueue from 'p-queue' import PQueue from 'p-queue'
import ky from 'ky' import ky from 'ky'
@ -35,6 +36,15 @@ enum DownloadTypes {
DownloadStarted = 'onFileDownloadStarted', DownloadStarted = 'onFileDownloadStarted',
} }
export enum Settings {
n_parallel = 'n_parallel',
cont_batching = 'cont_batching',
caching_enabled = 'caching_enabled',
flash_attn = 'flash_attn',
cache_type = 'cache_type',
use_mmap = 'use_mmap',
}
/** /**
* A class that implements the InferenceExtension interface from the @janhq/core package. * A class that implements the InferenceExtension interface from the @janhq/core package.
* The class provides methods for initializing and stopping a model, and for making inference requests. * The class provides methods for initializing and stopping a model, and for making inference requests.
@ -49,6 +59,14 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
shouldReconnect = true shouldReconnect = true
/** Default Engine model load settings */
n_parallel: number = 4
cont_batching: boolean = true
caching_enabled: boolean = true
flash_attn: boolean = true
use_mmap: boolean = true
cache_type: string = 'f16'
/** /**
* The URL for making inference requests. * The URL for making inference requests.
*/ */
@ -59,6 +77,8 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
*/ */
socket?: WebSocket = undefined socket?: WebSocket = undefined
abortControllers = new Map<string, AbortController>()
/** /**
* Subscribes to events emitted by the @janhq/core package. * Subscribes to events emitted by the @janhq/core package.
*/ */
@ -69,8 +89,25 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
super.onLoad() super.onLoad()
// Register Settings
this.registerSettings(SETTINGS)
this.n_parallel =
Number(await this.getSetting<string>(Settings.n_parallel, '4')) ?? 4
this.cont_batching = await this.getSetting<boolean>(
Settings.cont_batching,
true
)
this.caching_enabled = await this.getSetting<boolean>(
Settings.caching_enabled,
true
)
this.flash_attn = await this.getSetting<boolean>(Settings.flash_attn, true)
this.use_mmap = await this.getSetting<boolean>(Settings.use_mmap, true)
this.cache_type = await this.getSetting<string>(Settings.cache_type, 'f16')
this.queue.add(() => this.clean()) this.queue.add(() => this.clean())
// Run the process watchdog // Run the process watchdog
const systemInfo = await systemInformation() const systemInfo = await systemInformation()
this.queue.add(() => executeOnMain(NODE, 'run', systemInfo)) this.queue.add(() => executeOnMain(NODE, 'run', systemInfo))
@ -81,6 +118,15 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
window.addEventListener('beforeunload', () => { window.addEventListener('beforeunload', () => {
this.clean() this.clean()
}) })
const currentMode = systemInfo.gpuSetting?.run_mode
events.on(AppConfigurationEventName.OnConfigurationUpdate, async () => {
const systemInfo = await systemInformation()
// Update run mode on settings update
if (systemInfo.gpuSetting?.run_mode !== currentMode)
this.queue.add(() => this.setDefaultEngine(systemInfo))
})
} }
async onUnload() { async onUnload() {
@ -91,6 +137,22 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
super.onUnload() super.onUnload()
} }
onSettingUpdate<T>(key: string, value: T): void {
if (key === Settings.n_parallel && typeof value === 'string') {
this.n_parallel = Number(value) ?? 1
} else if (key === Settings.cont_batching && typeof value === 'boolean') {
this.cont_batching = value as boolean
} else if (key === Settings.caching_enabled && typeof value === 'boolean') {
this.caching_enabled = value as boolean
} else if (key === Settings.flash_attn && typeof value === 'boolean') {
this.flash_attn = value as boolean
} else if (key === Settings.cache_type && typeof value === 'string') {
this.cache_type = value as string
} else if (key === Settings.use_mmap && typeof value === 'boolean') {
this.use_mmap = value as boolean
}
}
override async loadModel( override async loadModel(
model: Model & { file_path?: string } model: Model & { file_path?: string }
): Promise<void> { ): Promise<void> {
@ -124,6 +186,10 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
const { mmproj, ...settings } = model.settings const { mmproj, ...settings } = model.settings
model.settings = settings model.settings = settings
} }
const controller = new AbortController()
const { signal } = controller
this.abortControllers.set(model.id, controller)
return await this.queue.add(() => return await this.queue.add(() =>
ky ky
@ -135,13 +201,21 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
model.engine === InferenceEngine.nitro // Legacy model cache model.engine === InferenceEngine.nitro // Legacy model cache
? InferenceEngine.cortex_llamacpp ? InferenceEngine.cortex_llamacpp
: model.engine, : model.engine,
cont_batching: this.cont_batching,
n_parallel: this.n_parallel,
caching_enabled: this.caching_enabled,
flash_attn: this.flash_attn,
cache_type: this.cache_type,
use_mmap: this.use_mmap,
}, },
timeout: false, timeout: false,
signal,
}) })
.json() .json()
.catch(async (e) => { .catch(async (e) => {
throw (await e.response?.json()) ?? e throw (await e.response?.json()) ?? e
}) })
.finally(() => this.abortControllers.delete(model.id))
.then() .then()
) )
} }
@ -152,6 +226,9 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
json: { model: model.id }, json: { model: model.id },
}) })
.json() .json()
.finally(() => {
this.abortControllers.get(model.id)?.abort()
})
.then() .then()
} }
@ -180,12 +257,20 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
'engineVariant', 'engineVariant',
systemInfo.gpuSetting systemInfo.gpuSetting
) )
return ky return (
.post( ky
`${CORTEX_API_URL}/v1/engines/${InferenceEngine.cortex_llamacpp}/default?version=${CORTEX_ENGINE_VERSION}&variant=${variant}`, // Fallback support for legacy API
{ json: {} } .post(
) `${CORTEX_API_URL}/v1/engines/${InferenceEngine.cortex_llamacpp}/default?version=${CORTEX_ENGINE_VERSION}&variant=${variant}`,
.then(() => {}) {
json: {
version: CORTEX_ENGINE_VERSION,
variant,
},
}
)
.then(() => {})
)
} }
/** /**
@ -251,6 +336,7 @@ export default class JanInferenceCortexExtension extends LocalOAIEngine {
this.socket.onclose = (event) => { this.socket.onclose = (event) => {
console.log('WebSocket closed:', event) console.log('WebSocket closed:', event)
events.emit(ModelEvent.OnModelStopped, {})
if (this.shouldReconnect) { if (this.shouldReconnect) {
console.log(`Attempting to reconnect...`) console.log(`Attempting to reconnect...`)
setTimeout(() => this.subscribeToEvents(), 1000) setTimeout(() => this.subscribeToEvents(), 1000)

View File

@ -2,6 +2,7 @@ import { describe, expect, it } from '@jest/globals'
import { engineVariant, executableCortexFile } from './execute' import { engineVariant, executableCortexFile } from './execute'
import { GpuSetting } from '@janhq/core/node' import { GpuSetting } from '@janhq/core/node'
import { cpuInfo } from 'cpu-instructions' import { cpuInfo } from 'cpu-instructions'
import { fork } from 'child_process'
let testSettings: GpuSetting = { let testSettings: GpuSetting = {
run_mode: 'cpu', run_mode: 'cpu',
@ -31,9 +32,13 @@ let mockCpuInfo = cpuInfo.cpuInfo as jest.Mock
mockCpuInfo.mockReturnValue([]) mockCpuInfo.mockReturnValue([])
jest.mock('@janhq/core/node', () => ({ jest.mock('@janhq/core/node', () => ({
appResourcePath: () => ".", appResourcePath: () => '.',
log: jest.fn() log: jest.fn(),
})) }))
jest.mock('child_process', () => ({
fork: jest.fn(),
}))
const mockFork = fork as jest.Mock
describe('test executable cortex file', () => { describe('test executable cortex file', () => {
afterAll(function () { afterAll(function () {
@ -43,6 +48,14 @@ describe('test executable cortex file', () => {
}) })
it('executes on MacOS', () => { it('executes on MacOS', () => {
const mockProcess = {
on: jest.fn((event, callback) => {
if (event === 'message') {
callback('noavx')
}
}),
send: jest.fn(),
}
Object.defineProperty(process, 'platform', { Object.defineProperty(process, 'platform', {
value: 'darwin', value: 'darwin',
}) })
@ -51,7 +64,7 @@ describe('test executable cortex file', () => {
}) })
expect(executableCortexFile(testSettings)).toEqual( expect(executableCortexFile(testSettings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining("shared"), enginePath: expect.stringContaining('shared'),
executablePath: executablePath:
originalPlatform === 'darwin' originalPlatform === 'darwin'
? expect.stringContaining(`cortex-server`) ? expect.stringContaining(`cortex-server`)
@ -60,13 +73,35 @@ describe('test executable cortex file', () => {
vkVisibleDevices: '', vkVisibleDevices: '',
}) })
) )
expect(engineVariant(testSettings)).toEqual('mac-arm64')
mockFork.mockReturnValue(mockProcess)
expect(engineVariant(testSettings)).resolves.toEqual('mac-arm64')
})
it('executes on MacOS', () => {
Object.defineProperty(process, 'platform', {
value: 'darwin',
})
Object.defineProperty(process, 'arch', {
value: 'arm64',
})
const mockProcess = {
on: jest.fn((event, callback) => {
if (event === 'message') {
callback('noavx')
}
}),
send: jest.fn(),
}
mockFork.mockReturnValue(mockProcess)
Object.defineProperty(process, 'arch', { Object.defineProperty(process, 'arch', {
value: 'x64', value: 'x64',
}) })
expect(executableCortexFile(testSettings)).toEqual( expect(executableCortexFile(testSettings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining("shared"), enginePath: expect.stringContaining('shared'),
executablePath: executablePath:
originalPlatform === 'darwin' originalPlatform === 'darwin'
? expect.stringContaining(`cortex-server`) ? expect.stringContaining(`cortex-server`)
@ -75,7 +110,7 @@ describe('test executable cortex file', () => {
vkVisibleDevices: '', vkVisibleDevices: '',
}) })
) )
expect(engineVariant(testSettings)).toEqual('mac-amd64') expect(engineVariant(testSettings)).resolves.toEqual('mac-amd64')
}) })
it('executes on Windows CPU', () => { it('executes on Windows CPU', () => {
@ -86,16 +121,25 @@ describe('test executable cortex file', () => {
...testSettings, ...testSettings,
run_mode: 'cpu', run_mode: 'cpu',
} }
mockCpuInfo.mockReturnValue(['avx']) const mockProcess = {
on: jest.fn((event, callback) => {
if (event === 'message') {
callback('avx')
}
}),
send: jest.fn(),
}
mockFork.mockReturnValue(mockProcess)
expect(executableCortexFile(settings)).toEqual( expect(executableCortexFile(settings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining("shared"), enginePath: expect.stringContaining('shared'),
executablePath: expect.stringContaining(`cortex-server.exe`), executablePath: expect.stringContaining(`cortex-server.exe`),
cudaVisibleDevices: '', cudaVisibleDevices: '',
vkVisibleDevices: '', vkVisibleDevices: '',
}) })
) )
expect(engineVariant()).toEqual('windows-amd64-avx') expect(engineVariant()).resolves.toEqual('windows-amd64-avx')
}) })
it('executes on Windows Cuda 11', () => { it('executes on Windows Cuda 11', () => {
@ -122,16 +166,27 @@ describe('test executable cortex file', () => {
}, },
], ],
} }
mockCpuInfo.mockReturnValue(['avx2'])
const mockProcess = {
on: jest.fn((event, callback) => {
if (event === 'message') {
callback('avx2')
}
}),
send: jest.fn(),
}
mockFork.mockReturnValue(mockProcess)
expect(executableCortexFile(settings)).toEqual( expect(executableCortexFile(settings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining("shared"), enginePath: expect.stringContaining('shared'),
executablePath: expect.stringContaining(`cortex-server.exe`), executablePath: expect.stringContaining(`cortex-server.exe`),
cudaVisibleDevices: '0', cudaVisibleDevices: '0',
vkVisibleDevices: '0', vkVisibleDevices: '0',
}) })
) )
expect(engineVariant(settings)).toEqual('windows-amd64-avx2-cuda-11-7') expect(engineVariant(settings)).resolves.toEqual(
'windows-amd64-avx2-cuda-11-7'
)
}) })
it('executes on Windows Cuda 12', () => { it('executes on Windows Cuda 12', () => {
@ -158,18 +213,36 @@ describe('test executable cortex file', () => {
}, },
], ],
} }
mockCpuInfo.mockReturnValue(['noavx']) mockFork.mockReturnValue({
on: jest.fn((event, callback) => {
if (event === 'message') {
callback('noavx')
}
}),
send: jest.fn(),
})
expect(executableCortexFile(settings)).toEqual( expect(executableCortexFile(settings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining("shared"), enginePath: expect.stringContaining('shared'),
executablePath: expect.stringContaining(`cortex-server.exe`), executablePath: expect.stringContaining(`cortex-server.exe`),
cudaVisibleDevices: '0', cudaVisibleDevices: '0',
vkVisibleDevices: '0', vkVisibleDevices: '0',
}) })
) )
expect(engineVariant(settings)).toEqual('windows-amd64-noavx-cuda-12-0') expect(engineVariant(settings)).resolves.toEqual(
mockCpuInfo.mockReturnValue(['avx512']) 'windows-amd64-noavx-cuda-12-0'
expect(engineVariant(settings)).toEqual('windows-amd64-avx2-cuda-12-0') )
mockFork.mockReturnValue({
on: jest.fn((event, callback) => {
if (event === 'message') {
callback('avx512')
}
}),
send: jest.fn(),
})
expect(engineVariant(settings)).resolves.toEqual(
'windows-amd64-avx2-cuda-12-0'
)
}) })
it('executes on Linux CPU', () => { it('executes on Linux CPU', () => {
@ -180,16 +253,23 @@ describe('test executable cortex file', () => {
...testSettings, ...testSettings,
run_mode: 'cpu', run_mode: 'cpu',
} }
mockCpuInfo.mockReturnValue(['noavx']) mockFork.mockReturnValue({
on: jest.fn((event, callback) => {
if (event === 'message') {
callback('noavx')
}
}),
send: jest.fn(),
})
expect(executableCortexFile(settings)).toEqual( expect(executableCortexFile(settings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining("shared"), enginePath: expect.stringContaining('shared'),
executablePath: expect.stringContaining(`cortex-server`), executablePath: expect.stringContaining(`cortex-server`),
cudaVisibleDevices: '', cudaVisibleDevices: '',
vkVisibleDevices: '', vkVisibleDevices: '',
}) })
) )
expect(engineVariant()).toEqual('linux-amd64-noavx') expect(engineVariant()).resolves.toEqual('linux-amd64-noavx')
}) })
it('executes on Linux Cuda 11', () => { it('executes on Linux Cuda 11', () => {
@ -216,16 +296,25 @@ describe('test executable cortex file', () => {
}, },
], ],
} }
mockCpuInfo.mockReturnValue(['avx512'])
mockFork.mockReturnValue({
on: jest.fn((event, callback) => {
if (event === 'message') {
callback('avx512')
}
}),
send: jest.fn(),
})
expect(executableCortexFile(settings)).toEqual( expect(executableCortexFile(settings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining("shared"), enginePath: expect.stringContaining('shared'),
executablePath: expect.stringContaining(`cortex-server`), executablePath: expect.stringContaining(`cortex-server`),
cudaVisibleDevices: '0', cudaVisibleDevices: '0',
vkVisibleDevices: '0', vkVisibleDevices: '0',
}) })
) )
expect(engineVariant(settings)).toEqual('linux-amd64-avx2-cuda-11-7') expect(engineVariant(settings)).resolves.toBe('linux-amd64-avx2-cuda-11-7')
}) })
it('executes on Linux Cuda 12', () => { it('executes on Linux Cuda 12', () => {
@ -252,15 +341,25 @@ describe('test executable cortex file', () => {
}, },
], ],
} }
mockFork.mockReturnValue({
on: jest.fn((event, callback) => {
if (event === 'message') {
callback('avx2')
}
}),
send: jest.fn(),
})
expect(executableCortexFile(settings)).toEqual( expect(executableCortexFile(settings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining("shared"), enginePath: expect.stringContaining('shared'),
executablePath: expect.stringContaining(`cortex-server`), executablePath: expect.stringContaining(`cortex-server`),
cudaVisibleDevices: '0', cudaVisibleDevices: '0',
vkVisibleDevices: '0', vkVisibleDevices: '0',
}) })
) )
expect(engineVariant(settings)).toEqual('linux-amd64-avx2-cuda-12-0') expect(engineVariant(settings)).resolves.toEqual(
'linux-amd64-avx2-cuda-12-0'
)
}) })
// Generate test for different cpu instructions on Linux // Generate test for different cpu instructions on Linux
@ -275,7 +374,14 @@ describe('test executable cortex file', () => {
const cpuInstructions = ['avx512', 'avx2', 'avx', 'noavx'] const cpuInstructions = ['avx512', 'avx2', 'avx', 'noavx']
cpuInstructions.forEach((instruction) => { cpuInstructions.forEach((instruction) => {
mockCpuInfo.mockReturnValue([instruction]) mockFork.mockReturnValue({
on: jest.fn((event, callback) => {
if (event === 'message') {
callback(instruction)
}
}),
send: jest.fn(),
})
expect(executableCortexFile(settings)).toEqual( expect(executableCortexFile(settings)).toEqual(
expect.objectContaining({ expect.objectContaining({
@ -286,7 +392,9 @@ describe('test executable cortex file', () => {
vkVisibleDevices: '', vkVisibleDevices: '',
}) })
) )
expect(engineVariant(settings)).toEqual(`linux-amd64-${instruction}`) expect(engineVariant(settings)).resolves.toEqual(
`linux-amd64-${instruction}`
)
}) })
}) })
// Generate test for different cpu instructions on Windows // Generate test for different cpu instructions on Windows
@ -300,7 +408,14 @@ describe('test executable cortex file', () => {
} }
const cpuInstructions = ['avx512', 'avx2', 'avx', 'noavx'] const cpuInstructions = ['avx512', 'avx2', 'avx', 'noavx']
cpuInstructions.forEach((instruction) => { cpuInstructions.forEach((instruction) => {
mockCpuInfo.mockReturnValue([instruction]) mockFork.mockReturnValue({
on: jest.fn((event, callback) => {
if (event === 'message') {
callback(instruction)
}
}),
send: jest.fn(),
})
expect(executableCortexFile(settings)).toEqual( expect(executableCortexFile(settings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining('shared'), enginePath: expect.stringContaining('shared'),
@ -309,7 +424,9 @@ describe('test executable cortex file', () => {
vkVisibleDevices: '', vkVisibleDevices: '',
}) })
) )
expect(engineVariant(settings)).toEqual(`windows-amd64-${instruction}`) expect(engineVariant(settings)).resolves.toEqual(
`windows-amd64-${instruction}`
)
}) })
}) })
@ -340,16 +457,23 @@ describe('test executable cortex file', () => {
} }
const cpuInstructions = ['avx512', 'avx2', 'avx', 'noavx'] const cpuInstructions = ['avx512', 'avx2', 'avx', 'noavx']
cpuInstructions.forEach((instruction) => { cpuInstructions.forEach((instruction) => {
mockCpuInfo.mockReturnValue([instruction]) mockFork.mockReturnValue({
on: jest.fn((event, callback) => {
if (event === 'message') {
callback(instruction)
}
}),
send: jest.fn(),
})
expect(executableCortexFile(settings)).toEqual( expect(executableCortexFile(settings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining("shared"), enginePath: expect.stringContaining('shared'),
executablePath: expect.stringContaining(`cortex-server.exe`), executablePath: expect.stringContaining(`cortex-server.exe`),
cudaVisibleDevices: '0', cudaVisibleDevices: '0',
vkVisibleDevices: '0', vkVisibleDevices: '0',
}) })
) )
expect(engineVariant(settings)).toEqual( expect(engineVariant(settings)).resolves.toEqual(
`windows-amd64-${instruction === 'avx512' || instruction === 'avx2' ? 'avx2' : 'noavx'}-cuda-12-0` `windows-amd64-${instruction === 'avx512' || instruction === 'avx2' ? 'avx2' : 'noavx'}-cuda-12-0`
) )
}) })
@ -382,16 +506,23 @@ describe('test executable cortex file', () => {
], ],
} }
cpuInstructions.forEach((instruction) => { cpuInstructions.forEach((instruction) => {
mockCpuInfo.mockReturnValue([instruction]) mockFork.mockReturnValue({
on: jest.fn((event, callback) => {
if (event === 'message') {
callback(instruction)
}
}),
send: jest.fn(),
})
expect(executableCortexFile(settings)).toEqual( expect(executableCortexFile(settings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining("shared"), enginePath: expect.stringContaining('shared'),
executablePath: expect.stringContaining(`cortex-server`), executablePath: expect.stringContaining(`cortex-server`),
cudaVisibleDevices: '0', cudaVisibleDevices: '0',
vkVisibleDevices: '0', vkVisibleDevices: '0',
}) })
) )
expect(engineVariant(settings)).toEqual( expect(engineVariant(settings)).resolves.toEqual(
`linux-amd64-${instruction === 'avx512' || instruction === 'avx2' ? 'avx2' : 'noavx'}-cuda-12-0` `linux-amd64-${instruction === 'avx512' || instruction === 'avx2' ? 'avx2' : 'noavx'}-cuda-12-0`
) )
}) })
@ -425,16 +556,23 @@ describe('test executable cortex file', () => {
], ],
} }
cpuInstructions.forEach((instruction) => { cpuInstructions.forEach((instruction) => {
mockCpuInfo.mockReturnValue([instruction]) mockFork.mockReturnValue({
on: jest.fn((event, callback) => {
if (event === 'message') {
callback(instruction)
}
}),
send: jest.fn(),
})
expect(executableCortexFile(settings)).toEqual( expect(executableCortexFile(settings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining("shared"), enginePath: expect.stringContaining('shared'),
executablePath: expect.stringContaining(`cortex-server`), executablePath: expect.stringContaining(`cortex-server`),
cudaVisibleDevices: '0', cudaVisibleDevices: '0',
vkVisibleDevices: '0', vkVisibleDevices: '0',
}) })
) )
expect(engineVariant(settings)).toEqual(`linux-amd64-vulkan`) expect(engineVariant(settings)).resolves.toEqual(`linux-amd64-vulkan`)
}) })
}) })
@ -452,10 +590,17 @@ describe('test executable cortex file', () => {
...testSettings, ...testSettings,
run_mode: 'cpu', run_mode: 'cpu',
} }
mockCpuInfo.mockReturnValue([]) mockFork.mockReturnValue({
on: jest.fn((event, callback) => {
if (event === 'message') {
callback('noavx')
}
}),
send: jest.fn(),
})
expect(executableCortexFile(settings)).toEqual( expect(executableCortexFile(settings)).toEqual(
expect.objectContaining({ expect.objectContaining({
enginePath: expect.stringContaining("shared"), enginePath: expect.stringContaining('shared'),
executablePath: executablePath:
originalPlatform === 'darwin' originalPlatform === 'darwin'
? expect.stringContaining(`cortex-server`) ? expect.stringContaining(`cortex-server`)

View File

@ -61,6 +61,254 @@
}, },
"engine": "groq" "engine": "groq"
}, },
{
"sources": [
{
"url": "https://groq.com"
}
],
"id": "llama-3.1-70b-versatile",
"object": "model",
"name": "Groq Llama 3.1 70b Versatile",
"version": "1.1",
"description": "Groq Llama 3.1 70b Versatile with supercharged speed!",
"format": "api",
"settings": {},
"parameters": {
"max_tokens": 8000,
"temperature": 0.7,
"top_p": 0.95,
"stream": true,
"stop": [],
"frequency_penalty": 0,
"presence_penalty": 0
},
"metadata": {
"author": "Meta",
"tags": [
"General",
"Big Context Length"
]
},
"engine": "groq"
},
{
"sources": [
{
"url": "https://groq.com"
}
],
"id": "llama-3.1-8b-instant",
"object": "model",
"name": "Groq Llama 3.1 8b Instant",
"version": "1.1",
"description": "Groq Llama 3.1 8b with supercharged speed!",
"format": "api",
"settings": {},
"parameters": {
"max_tokens": 8000,
"temperature": 0.7,
"top_p": 0.95,
"stream": true,
"stop": [],
"frequency_penalty": 0,
"presence_penalty": 0
},
"metadata": {
"author": "Meta",
"tags": [
"General",
"Big Context Length"
]
},
"engine": "groq"
},
{
"sources": [
{
"url": "https://groq.com"
}
],
"id": "llama-3.2-11b-text-preview",
"object": "model",
"name": "Groq Llama 3.2 11b Text Preview",
"version": "1.1",
"description": "Groq Llama 3.2 11b Text Preview with supercharged speed!",
"format": "api",
"settings": {},
"parameters": {
"max_tokens": 8192,
"temperature": 0.7,
"top_p": 0.95,
"stream": true,
"stop": [],
"frequency_penalty": 0,
"presence_penalty": 0
},
"metadata": {
"author": "Meta",
"tags": [
"General",
"Big Context Length"
]
},
"engine": "groq"
},
{
"sources": [
{
"url": "https://groq.com"
}
],
"id": "llama-3.2-11b-vision-preview",
"object": "model",
"name": "Groq Llama 3.2 11b Vision Preview",
"version": "1.1",
"description": "Groq Llama 3.2 11b Vision Preview with supercharged speed!",
"format": "api",
"settings": {},
"parameters": {
"max_tokens": 8192,
"temperature": 0.7,
"top_p": 0.95,
"stream": true,
"stop": [],
"frequency_penalty": 0,
"presence_penalty": 0
},
"metadata": {
"author": "Meta",
"tags": [
"General",
"Big Context Length"
]
},
"engine": "groq"
},
{
"sources": [
{
"url": "https://groq.com"
}
],
"id": "llama-3.2-1b-preview",
"object": "model",
"name": "Groq Llama 3.2 1b Preview",
"version": "1.1",
"description": "Groq Llama 3.2 1b Preview with supercharged speed!",
"format": "api",
"settings": {},
"parameters": {
"max_tokens": 8192,
"temperature": 0.7,
"top_p": 0.95,
"stream": true,
"stop": [],
"frequency_penalty": 0,
"presence_penalty": 0
},
"metadata": {
"author": "Meta",
"tags": [
"General",
"Big Context Length"
]
},
"engine": "groq"
},
{
"sources": [
{
"url": "https://groq.com"
}
],
"id": "llama-3.2-3b-preview",
"object": "model",
"name": "Groq Llama 3.2 3b Preview",
"version": "1.1",
"description": "Groq Llama 3.2 3b Preview with supercharged speed!",
"format": "api",
"settings": {},
"parameters": {
"max_tokens": 8192,
"temperature": 0.7,
"top_p": 0.95,
"stream": true,
"stop": [],
"frequency_penalty": 0,
"presence_penalty": 0
},
"metadata": {
"author": "Meta",
"tags": [
"General",
"Big Context Length"
]
},
"engine": "groq"
},
{
"sources": [
{
"url": "https://groq.com"
}
],
"id": "llama-3.2-90b-text-preview",
"object": "model",
"name": "Groq Llama 3.2 90b Text Preview",
"version": "1.1",
"description": "Groq Llama 3.2 90b Text Preview with supercharged speed!",
"format": "api",
"settings": {},
"parameters": {
"max_tokens": 8192,
"temperature": 0.7,
"top_p": 0.95,
"stream": true,
"stop": [],
"frequency_penalty": 0,
"presence_penalty": 0
},
"metadata": {
"author": "Meta",
"tags": [
"General",
"Big Context Length"
]
},
"engine": "groq"
},
{
"sources": [
{
"url": "https://groq.com"
}
],
"id": "llama-3.2-90b-vision-preview",
"object": "model",
"name": "Groq Llama 3.2 90b Vision Preview",
"version": "1.1",
"description": "Groq Llama 3.2 90b Vision Preview with supercharged speed!",
"format": "api",
"settings": {},
"parameters": {
"max_tokens": 8192,
"temperature": 0.7,
"top_p": 0.95,
"stream": true,
"stop": [],
"frequency_penalty": 0,
"presence_penalty": 0
},
"metadata": {
"author": "Meta",
"tags": [
"General",
"Big Context Length"
]
},
"engine": "groq"
},
{ {
"sources": [ "sources": [
{ {
@ -91,6 +339,36 @@
}, },
"engine": "groq" "engine": "groq"
}, },
{
"sources": [
{
"url": "https://groq.com"
}
],
"id": "gemma2-9b-it",
"object": "model",
"name": "Groq Gemma 9B Instruct",
"version": "1.2",
"description": "Groq Gemma 9b Instruct with supercharged speed!",
"format": "api",
"settings": {},
"parameters": {
"max_tokens": 8192,
"temperature": 0.7,
"top_p": 0.95,
"stream": true,
"stop": [],
"frequency_penalty": 0,
"presence_penalty": 0
},
"metadata": {
"author": "Google",
"tags": [
"General"
]
},
"engine": "groq"
},
{ {
"sources": [ "sources": [
{ {

View File

@ -1,7 +1,9 @@
import { import {
AppConfigurationEventName,
GpuSetting, GpuSetting,
MonitoringExtension, MonitoringExtension,
OperatingSystemInfo, OperatingSystemInfo,
events,
executeOnMain, executeOnMain,
} from '@janhq/core' } from '@janhq/core'
@ -37,6 +39,7 @@ export default class JanMonitoringExtension extends MonitoringExtension {
// Attempt to fetch nvidia info // Attempt to fetch nvidia info
await executeOnMain(NODE, 'updateNvidiaInfo') await executeOnMain(NODE, 'updateNvidiaInfo')
events.emit(AppConfigurationEventName.OnConfigurationUpdate, {})
} }
onSettingUpdate<T>(key: string, value: T): void { onSettingUpdate<T>(key: string, value: T): void {

View File

@ -66,4 +66,5 @@
} }
::-webkit-scrollbar-thumb { ::-webkit-scrollbar-thumb {
background: hsla(var(--scrollbar-thumb)); background: hsla(var(--scrollbar-thumb));
border-radius: 20px;
} }

View File

@ -52,9 +52,8 @@ export default function RootLayout() {
<body className="font-sans antialiased"> <body className="font-sans antialiased">
<JotaiWrapper> <JotaiWrapper>
<ThemeWrapper> <ThemeWrapper>
<ClipboardListener> <ClipboardListener />
<Search /> <Search />
</ClipboardListener>
</ThemeWrapper> </ThemeWrapper>
</JotaiWrapper> </JotaiWrapper>
</body> </body>

View File

@ -4,7 +4,10 @@ import SettingComponentBuilder from '@/containers/ModelSetting/SettingComponent'
type Props = { type Props = {
componentData: SettingComponentProps[] componentData: SettingComponentProps[]
onValueChanged: (key: string, value: string | number | boolean) => void onValueChanged: (
key: string,
value: string | number | boolean | string[]
) => void
disabled?: boolean disabled?: boolean
} }

View File

@ -29,7 +29,6 @@ const ErrorMessage = ({ message }: { message: ThreadMessage }) => {
switch (message.error_code) { switch (message.error_code) {
case ErrorCode.InvalidApiKey: case ErrorCode.InvalidApiKey:
case ErrorCode.AuthenticationError: case ErrorCode.AuthenticationError:
case ErrorCode.InvalidRequestError:
return ( return (
<span data-testid="invalid-API-key-error"> <span data-testid="invalid-API-key-error">
Invalid API key. Please check your API key from{' '} Invalid API key. Please check your API key from{' '}
@ -53,7 +52,7 @@ const ErrorMessage = ({ message }: { message: ThreadMessage }) => {
) )
default: default:
return ( return (
<p> <p data-testid="passthrough-error-message" className="capitalize">
{message.content[0]?.text?.value && ( {message.content[0]?.text?.value && (
<AutoLink text={message.content[0].text.value} /> <AutoLink text={message.content[0].text.value} />
)} )}

View File

@ -1,10 +1,8 @@
'use client' 'use client'
import { useEffect } from 'react' import { useEffect, useMemo } from 'react'
import { motion as m } from 'framer-motion' import { useAtomValue, useSetAtom } from 'jotai'
import { useAtom, useAtomValue } from 'jotai'
import { twMerge } from 'tailwind-merge' import { twMerge } from 'tailwind-merge'
@ -36,7 +34,7 @@ import { mainViewStateAtom } from '@/helpers/atoms/App.atom'
import { reduceTransparentAtom } from '@/helpers/atoms/Setting.atom' import { reduceTransparentAtom } from '@/helpers/atoms/Setting.atom'
const BaseLayout = () => { const BaseLayout = () => {
const [mainViewState, setMainViewState] = useAtom(mainViewStateAtom) const setMainViewState = useSetAtom(mainViewStateAtom)
const importModelStage = useAtomValue(getImportModelStageAtom) const importModelStage = useAtomValue(getImportModelStageAtom)
const reduceTransparent = useAtomValue(reduceTransparentAtom) const reduceTransparent = useAtomValue(reduceTransparentAtom)
@ -68,24 +66,7 @@ const BaseLayout = () => {
<TopPanel /> <TopPanel />
<div className="relative top-9 flex h-[calc(100vh-(36px+36px))] w-screen"> <div className="relative top-9 flex h-[calc(100vh-(36px+36px))] w-screen">
<RibbonPanel /> <RibbonPanel />
<div className={twMerge('relative flex w-full')}> <MainViewContainer />
<div className="w-full">
<m.div
key={mainViewState}
initial={{ opacity: 0, y: -8 }}
className="h-full"
animate={{
opacity: 1,
y: 0,
transition: {
duration: 0.5,
},
}}
>
<MainViewContainer />
</m.div>
</div>
</div>
<LoadingModal /> <LoadingModal />
{importModelStage === 'SELECTING_MODEL' && <SelectingModelModal />} {importModelStage === 'SELECTING_MODEL' && <SelectingModelModal />}
{importModelStage === 'MODEL_SELECTED' && <ImportModelOptionModal />} {importModelStage === 'MODEL_SELECTED' && <ImportModelOptionModal />}

View File

@ -1,15 +1,30 @@
import { ReactNode, useCallback, useEffect, useRef } from 'react' import { PropsWithChildren, useCallback, useEffect, useRef } from 'react'
import { ScrollArea } from '@janhq/joi' import { ScrollArea } from '@janhq/joi'
type Props = { import { useAtomValue } from 'jotai'
children: ReactNode
}
const ListContainer = ({ children }: Props) => { import { activeThreadAtom } from '@/helpers/atoms/Thread.atom'
const ListContainer = ({ children }: PropsWithChildren) => {
const listRef = useRef<HTMLDivElement>(null) const listRef = useRef<HTMLDivElement>(null)
const prevScrollTop = useRef(0) const prevScrollTop = useRef(0)
const isUserManuallyScrollingUp = useRef(false) const isUserManuallyScrollingUp = useRef(false)
const activeThread = useAtomValue(activeThreadAtom)
const prevActiveThread = useRef(activeThread)
// Handle active thread changes
useEffect(() => {
if (prevActiveThread.current?.id !== activeThread?.id) {
isUserManuallyScrollingUp.current = false
const scrollHeight = listRef.current?.scrollHeight ?? 0
listRef.current?.scrollTo({
top: scrollHeight,
behavior: 'instant',
})
prevActiveThread.current = activeThread // Update the previous active thread reference
}
}, [activeThread])
const handleScroll = useCallback((event: React.UIEvent<HTMLElement>) => { const handleScroll = useCallback((event: React.UIEvent<HTMLElement>) => {
const currentScrollTop = event.currentTarget.scrollTop const currentScrollTop = event.currentTarget.scrollTop

View File

@ -1,75 +0,0 @@
// GenerateResponse.test.tsx
import React from 'react';
import { render, screen, act } from '@testing-library/react';
import '@testing-library/jest-dom';
import GenerateResponse from './GenerateResponse';
jest.useFakeTimers();
describe('GenerateResponse Component', () => {
it('renders initially with 1% loader width', () => {
render(<GenerateResponse />);
const loader = screen.getByTestId('response-loader');
expect(loader).toHaveStyle('width: 24%');
});
it('updates loader width over time', () => {
render(<GenerateResponse />);
const loader = screen.getByTestId('response-loader');
// Advance timers to simulate time passing
act(() => {
jest.advanceTimersByTime(1000);
});
expect(loader).not.toHaveStyle('width: 1%');
expect(parseFloat(loader.style.width)).toBeGreaterThan(1);
});
it('pauses at specific percentages', () => {
render(<GenerateResponse />);
const loader = screen.getByTestId('response-loader');
// Advance to 24%
act(() => {
for (let i = 0; i < 24; i++) {
jest.advanceTimersByTime(50);
}
});
expect(loader).toHaveStyle('width: 50%');
// Advance past the pause
act(() => {
jest.advanceTimersByTime(300);
});
expect(loader).toHaveStyle('width: 78%');
});
it('stops at 85%', () => {
render(<GenerateResponse />);
const loader = screen.getByTestId('response-loader');
// Advance to 50%
act(() => {
for (let i = 0; i < 85; i++) {
jest.advanceTimersByTime(50);
}
});
expect(loader).toHaveStyle('width: 50%');
// Check if it stays at 78%
act(() => {
jest.advanceTimersByTime(1000);
});
expect(loader).toHaveStyle('width: 78%');
});
it('displays the correct text', () => {
render(<GenerateResponse />);
expect(screen.getByText('Generating response...')).toBeInTheDocument();
});
});

View File

@ -1,5 +1,7 @@
import React, { useEffect, useState } from 'react' import React, { useEffect, useState } from 'react'
import { motion as m } from 'framer-motion'
export default function GenerateResponse() { export default function GenerateResponse() {
const [loader, setLoader] = useState(0) const [loader, setLoader] = useState(0)
@ -28,10 +30,17 @@ export default function GenerateResponse() {
return ( return (
<div className=" mb-1 mt-2 py-2 text-center"> <div className=" mb-1 mt-2 py-2 text-center">
<div className="relative inline-block overflow-hidden rounded-lg bg-[hsla(var(--loader-bg))] px-4 py-2 font-semibold text-[hsla(var(--loader-fg))] shadow-lg"> <div className="relative inline-block overflow-hidden rounded-lg bg-[hsla(var(--loader-bg))] px-4 py-2 font-semibold text-[hsla(var(--loader-fg))] shadow-lg">
<div <m.div
initial={{ width: 0 }}
className="absolute left-0 top-0 h-full bg-[hsla(var(--loader-active-bg))]" className="absolute left-0 top-0 h-full bg-[hsla(var(--loader-active-bg))]"
style={{ width: `${loader}%` }} style={{ width: 250 }}
data-testid="response-loader" data-testid="response-loader"
animate={{
width: `${loader}%`,
transition: {
duration: 0.25,
},
}}
/> />
<span className="relative z-10">Generating response...</span> <span className="relative z-10">Generating response...</span>
</div> </div>

View File

@ -1,5 +1,7 @@
import React, { useEffect, useState } from 'react' import React, { useEffect, useState } from 'react'
import { motion as m } from 'framer-motion'
import { useActiveModel } from '@/hooks/useActiveModel' import { useActiveModel } from '@/hooks/useActiveModel'
export default function ModelStart() { export default function ModelStart() {
@ -37,10 +39,17 @@ export default function ModelStart() {
return ( return (
<div className=" mb-1 mt-2 py-2 text-center"> <div className=" mb-1 mt-2 py-2 text-center">
<div className="relative inline-block max-w-[300px] overflow-hidden text-ellipsis whitespace-nowrap rounded-lg bg-[hsla(var(--loader-bg))] px-4 py-2 font-semibold text-[hsla(var(--loader-fg))] shadow-lg"> <div className="relative inline-block max-w-[300px] overflow-hidden text-ellipsis whitespace-nowrap rounded-lg bg-[hsla(var(--loader-bg))] px-4 py-2 font-semibold text-[hsla(var(--loader-fg))] shadow-lg">
<div <m.div
initial={{ width: 0 }}
className="absolute left-0 top-0 h-full bg-[hsla(var(--loader-active-bg))]" className="absolute left-0 top-0 h-full bg-[hsla(var(--loader-active-bg))]"
style={{ width: `${loader}%` }} style={{ width: 250 }}
data-testid="model-loader" data-testid="model-loader"
animate={{
width: `${loader}%`,
transition: {
duration: 0.25,
},
}}
/> />
<span className="relative z-10 line-clamp-1 max-w-[300px]"> <span className="relative z-10 line-clamp-1 max-w-[300px]">
{stateModel.state === 'start' ? 'Starting' : 'Stopping'} {stateModel.state === 'start' ? 'Starting' : 'Stopping'}

View File

@ -1,5 +1,10 @@
import { memo } from 'react'
import { motion as m } from 'framer-motion'
import { useAtomValue } from 'jotai' import { useAtomValue } from 'jotai'
import { twMerge } from 'tailwind-merge'
import { MainViewState } from '@/constants/screens' import { MainViewState } from '@/constants/screens'
import HubScreen from '@/screens/Hub' import HubScreen from '@/screens/Hub'
@ -31,7 +36,26 @@ const MainViewContainer = () => {
break break
} }
return children return (
<div className={twMerge('relative flex w-full')}>
<div className="w-full">
<m.div
key={mainViewState}
initial={{ opacity: 0, y: -8 }}
className="h-full"
animate={{
opacity: 1,
y: 0,
transition: {
duration: 0.25,
},
}}
>
{children}
</m.div>
</div>
</div>
)
} }
export default MainViewContainer export default memo(MainViewContainer)

View File

@ -2,7 +2,6 @@ import '@testing-library/jest-dom'
import React from 'react' import React from 'react'
import { render, fireEvent } from '@testing-library/react' import { render, fireEvent } from '@testing-library/react'
import ModelConfigInput from './index' import ModelConfigInput from './index'
import { Tooltip } from '@janhq/joi'
// Mocking the Tooltip component to simplify testing // Mocking the Tooltip component to simplify testing
jest.mock('@janhq/joi', () => ({ jest.mock('@janhq/joi', () => ({

View File

@ -19,28 +19,30 @@ const ModelConfigInput = ({
description, description,
placeholder, placeholder,
onValueChanged, onValueChanged,
}: Props) => ( }: Props) => {
<div className="flex flex-col"> return (
<div className="mb-2 flex items-center gap-x-2"> <div className="flex flex-col">
<p className="font-medium">{title}</p> <div className="mb-2 flex items-center gap-x-2">
<Tooltip <p className="font-medium">{title}</p>
trigger={ <Tooltip
<InfoIcon trigger={
size={16} <InfoIcon
className="flex-shrink-0 text-[hsla(var(--text-secondary))]" size={16}
/> className="flex-shrink-0 text-[hsla(var(--text-secondary))]"
} />
content={description} }
content={description}
/>
</div>
<TextArea
placeholder={placeholder}
onChange={(e) => onValueChanged?.(e.target.value)}
autoResize
value={value}
disabled={disabled}
/> />
</div> </div>
<TextArea )
placeholder={placeholder} }
onChange={(e) => onValueChanged?.(e.target.value)}
autoResize
value={value}
disabled={disabled}
/>
</div>
)
export default ModelConfigInput export default ModelConfigInput

View File

@ -46,8 +46,7 @@ const ModelSearch = ({ onSearchLocal }: Props) => {
errMessage = err.message errMessage = err.message
} }
toaster({ toaster({
title: 'Failed to get Hugging Face models', title: errMessage,
description: errMessage,
type: 'error', type: 'error',
}) })
console.error(err) console.error(err)

View File

@ -8,11 +8,15 @@ import {
import Checkbox from '@/containers/Checkbox' import Checkbox from '@/containers/Checkbox'
import ModelConfigInput from '@/containers/ModelConfigInput' import ModelConfigInput from '@/containers/ModelConfigInput'
import SliderRightPanel from '@/containers/SliderRightPanel' import SliderRightPanel from '@/containers/SliderRightPanel'
import TagInput from '@/containers/TagInput'
type Props = { type Props = {
componentProps: SettingComponentProps[] componentProps: SettingComponentProps[]
disabled?: boolean disabled?: boolean
onValueUpdated: (key: string, value: string | number | boolean) => void onValueUpdated: (
key: string,
value: string | number | boolean | string[]
) => void
} }
const SettingComponent: React.FC<Props> = ({ const SettingComponent: React.FC<Props> = ({
@ -53,7 +57,24 @@ const SettingComponent: React.FC<Props> = ({
name={data.key} name={data.key}
description={data.description} description={data.description}
placeholder={placeholder} placeholder={placeholder}
value={textValue} value={textValue as string}
onValueChanged={(value) => onValueUpdated(data.key, value)}
/>
)
}
case 'tag': {
const { placeholder, value: textValue } =
data.controllerProps as InputComponentProps
return (
<TagInput
title={data.title}
disabled={disabled}
key={data.key}
name={data.key}
description={data.description}
placeholder={placeholder}
value={textValue as string[]}
onValueChanged={(value) => onValueUpdated(data.key, value)} onValueChanged={(value) => onValueUpdated(data.key, value)}
/> />
) )

View File

@ -6,7 +6,10 @@ import SettingComponentBuilder from './SettingComponent'
type Props = { type Props = {
componentProps: SettingComponentProps[] componentProps: SettingComponentProps[]
onValueChanged: (key: string, value: string | number | boolean) => void onValueChanged: (
key: string,
value: string | number | boolean | string[]
) => void
disabled?: boolean disabled?: boolean
} }

View File

@ -1,4 +1,4 @@
import { Fragment, PropsWithChildren, useEffect } from 'react' import { Fragment, useEffect } from 'react'
import { AppUpdateInfo } from '@janhq/core' import { AppUpdateInfo } from '@janhq/core'
import { useSetAtom } from 'jotai' import { useSetAtom } from 'jotai'
@ -8,7 +8,7 @@ import {
updateVersionErrorAtom, updateVersionErrorAtom,
} from '@/helpers/atoms/App.atom' } from '@/helpers/atoms/App.atom'
const AppUpdateListener = ({ children }: PropsWithChildren) => { const AppUpdateListener = () => {
const setProgress = useSetAtom(appDownloadProgressAtom) const setProgress = useSetAtom(appDownloadProgressAtom)
const setUpdateVersionError = useSetAtom(updateVersionErrorAtom) const setUpdateVersionError = useSetAtom(updateVersionErrorAtom)
@ -39,7 +39,7 @@ const AppUpdateListener = ({ children }: PropsWithChildren) => {
} }
}, [setProgress, setUpdateVersionError]) }, [setProgress, setUpdateVersionError])
return <Fragment>{children}</Fragment> return <Fragment></Fragment>
} }
export default AppUpdateListener export default AppUpdateListener

View File

@ -1,10 +1,10 @@
import { Fragment, PropsWithChildren } from 'react' import { Fragment } from 'react'
import { useSetAtom } from 'jotai' import { useSetAtom } from 'jotai'
import { selectedTextAtom } from './Jotai' import { selectedTextAtom } from './Jotai'
const ClipboardListener = ({ children }: PropsWithChildren) => { const ClipboardListener = () => {
const setSelectedText = useSetAtom(selectedTextAtom) const setSelectedText = useSetAtom(selectedTextAtom)
if (typeof window !== 'undefined') { if (typeof window !== 'undefined') {
@ -13,7 +13,7 @@ const ClipboardListener = ({ children }: PropsWithChildren) => {
}) })
} }
return <Fragment>{children}</Fragment> return <Fragment></Fragment>
} }
export default ClipboardListener export default ClipboardListener

View File

@ -0,0 +1,64 @@
'use client'
import { PropsWithChildren, useCallback, useEffect, useState } from 'react'
import Loader from '@/containers/Loader'
import { setupCoreServices } from '@/services/coreService'
import {
isCoreExtensionInstalled,
setupBaseExtensions,
} from '@/services/extensionService'
import { extensionManager } from '@/extension'
export const CoreConfigurator = ({ children }: PropsWithChildren) => {
const [setupCore, setSetupCore] = useState(false)
const [activated, setActivated] = useState(false)
const [settingUp, setSettingUp] = useState(false)
const setupExtensions = useCallback(async () => {
// Register all active extensions
await extensionManager.registerActive()
setTimeout(async () => {
if (!isCoreExtensionInstalled()) {
setSettingUp(true)
await setupBaseExtensions()
return
}
extensionManager.load()
setSettingUp(false)
setActivated(true)
}, 500)
}, [])
// Services Setup
useEffect(() => {
setupCoreServices()
setSetupCore(true)
return () => {
extensionManager.unload()
}
}, [])
useEffect(() => {
if (setupCore) {
// Electron
if (window && window.core?.api) {
setupExtensions()
} else {
// Host
setActivated(true)
}
}
}, [setupCore, setupExtensions])
return (
<>
{settingUp && <Loader description="Preparing Update..." />}
{setupCore && activated && <>{children}</>}
</>
)
}

View File

@ -1,13 +1,12 @@
'use client' 'use client'
import { Fragment, ReactNode, useEffect } from 'react' import { Fragment, useEffect } from 'react'
import { AppConfiguration, getUserHomePath } from '@janhq/core' import { AppConfiguration, getUserHomePath } from '@janhq/core'
import { useSetAtom } from 'jotai' import { useSetAtom } from 'jotai'
import useAssistants from '@/hooks/useAssistants' import useAssistants from '@/hooks/useAssistants'
import useGetSystemResources from '@/hooks/useGetSystemResources' import useGetSystemResources from '@/hooks/useGetSystemResources'
import { useLoadTheme } from '@/hooks/useLoadTheme'
import useModels from '@/hooks/useModels' import useModels from '@/hooks/useModels'
import useThreads from '@/hooks/useThreads' import useThreads from '@/hooks/useThreads'
@ -20,27 +19,20 @@ import {
} from '@/helpers/atoms/AppConfig.atom' } from '@/helpers/atoms/AppConfig.atom'
import { janSettingScreenAtom } from '@/helpers/atoms/Setting.atom' import { janSettingScreenAtom } from '@/helpers/atoms/Setting.atom'
type Props = { const DataLoader: React.FC = () => {
children: ReactNode
}
const DataLoader: React.FC<Props> = ({ children }) => {
const setJanDataFolderPath = useSetAtom(janDataFolderPathAtom) const setJanDataFolderPath = useSetAtom(janDataFolderPathAtom)
const setQuickAskEnabled = useSetAtom(quickAskEnabledAtom) const setQuickAskEnabled = useSetAtom(quickAskEnabledAtom)
const setJanDefaultDataFolder = useSetAtom(defaultJanDataFolderAtom) const setJanDefaultDataFolder = useSetAtom(defaultJanDataFolderAtom)
const setJanSettingScreen = useSetAtom(janSettingScreenAtom) const setJanSettingScreen = useSetAtom(janSettingScreenAtom)
const { loadDataModel, configurePullOptions } = useModels() const { getData: loadModels } = useModels()
useThreads() useThreads()
useAssistants() useAssistants()
useGetSystemResources() useGetSystemResources()
useLoadTheme()
useEffect(() => { useEffect(() => {
// Load data once // Load data once
loadDataModel() loadModels()
// Configure pull options once
configurePullOptions()
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []) }, [])
@ -71,7 +63,7 @@ const DataLoader: React.FC<Props> = ({ children }) => {
console.debug('Load Data...') console.debug('Load Data...')
return <Fragment>{children}</Fragment> return <Fragment></Fragment>
} }
export default DataLoader export default DataLoader

View File

@ -1,4 +1,4 @@
import { Fragment, ReactNode } from 'react' import { Fragment } from 'react'
import { useSetAtom } from 'jotai' import { useSetAtom } from 'jotai'
@ -13,11 +13,8 @@ import {
importHuggingFaceModelStageAtom, importHuggingFaceModelStageAtom,
importingHuggingFaceRepoDataAtom, importingHuggingFaceRepoDataAtom,
} from '@/helpers/atoms/HuggingFace.atom' } from '@/helpers/atoms/HuggingFace.atom'
type Props = {
children: ReactNode
}
const DeepLinkListener: React.FC<Props> = ({ children }) => { const DeepLinkListener: React.FC = () => {
const { getHfRepoData } = useGetHFRepoData() const { getHfRepoData } = useGetHFRepoData()
const setLoadingInfo = useSetAtom(loadingModalInfoAtom) const setLoadingInfo = useSetAtom(loadingModalInfoAtom)
const setImportingHuggingFaceRepoData = useSetAtom( const setImportingHuggingFaceRepoData = useSetAtom(
@ -69,7 +66,7 @@ const DeepLinkListener: React.FC<Props> = ({ children }) => {
handleDeepLinkAction(action) handleDeepLinkAction(action)
}) })
return <Fragment>{children}</Fragment> return <Fragment></Fragment>
} }
type DeepLinkAction = { type DeepLinkAction = {

View File

@ -1,4 +1,4 @@
import { PropsWithChildren, useCallback, useEffect } from 'react' import { useCallback, useEffect } from 'react'
import React from 'react' import React from 'react'
@ -23,7 +23,7 @@ import { toaster } from '../Toast'
import AppUpdateListener from './AppUpdateListener' import AppUpdateListener from './AppUpdateListener'
import ClipboardListener from './ClipboardListener' import ClipboardListener from './ClipboardListener'
import EventHandler from './EventHandler' import ModelHandler from './ModelHandler'
import ModelImportListener from './ModelImportListener' import ModelImportListener from './ModelImportListener'
import QuickAskListener from './QuickAskListener' import QuickAskListener from './QuickAskListener'
@ -39,7 +39,7 @@ import {
removeDownloadingModelAtom, removeDownloadingModelAtom,
} from '@/helpers/atoms/Model.atom' } from '@/helpers/atoms/Model.atom'
const EventListenerWrapper = ({ children }: PropsWithChildren) => { const EventListener = () => {
const setDownloadState = useSetAtom(setDownloadStateAtom) const setDownloadState = useSetAtom(setDownloadStateAtom)
const setInstallingExtension = useSetAtom(setInstallingExtensionAtom) const setInstallingExtension = useSetAtom(setInstallingExtensionAtom)
const removeInstallingExtension = useSetAtom(removeInstallingExtensionAtom) const removeInstallingExtension = useSetAtom(removeInstallingExtensionAtom)
@ -156,16 +156,14 @@ const EventListenerWrapper = ({ children }: PropsWithChildren) => {
]) ])
return ( return (
<AppUpdateListener> <>
<ClipboardListener> <AppUpdateListener />
<ModelImportListener> <ClipboardListener />
<QuickAskListener> <ModelImportListener />
<EventHandler>{children}</EventHandler> <QuickAskListener />
</QuickAskListener> <ModelHandler />
</ModelImportListener> </>
</ClipboardListener>
</AppUpdateListener>
) )
} }
export default EventListenerWrapper export default EventListener

View File

@ -1,13 +1,9 @@
'use client' 'use client'
import { ReactNode } from 'react' import { PropsWithChildren } from 'react'
import { Provider, atom } from 'jotai' import { Provider, atom } from 'jotai'
type Props = {
children: ReactNode
}
export const editPromptAtom = atom<string>('') export const editPromptAtom = atom<string>('')
export const currentPromptAtom = atom<string>('') export const currentPromptAtom = atom<string>('')
export const fileUploadAtom = atom<FileInfo[]>([]) export const fileUploadAtom = atom<FileInfo[]>([])
@ -16,7 +12,7 @@ export const searchAtom = atom<string>('')
export const selectedTextAtom = atom('') export const selectedTextAtom = atom('')
export default function JotaiWrapper({ children }: Props) { export default function JotaiWrapper({ children }: PropsWithChildren) {
return <Provider>{children}</Provider> return <Provider>{children}</Provider>
} }

View File

@ -1,6 +1,6 @@
'use client' 'use client'
import { Fragment, ReactNode, useEffect } from 'react' import { Fragment, useEffect } from 'react'
import { useAtom, useAtomValue, useSetAtom } from 'jotai' import { useAtom, useAtomValue, useSetAtom } from 'jotai'
@ -8,6 +8,8 @@ import { MainViewState } from '@/constants/screens'
import { useCreateNewThread } from '@/hooks/useCreateNewThread' import { useCreateNewThread } from '@/hooks/useCreateNewThread'
import { useStarterScreen } from '@/hooks/useStarterScreen'
import { import {
mainViewStateAtom, mainViewStateAtom,
showLeftPanelAtom, showLeftPanelAtom,
@ -20,11 +22,7 @@ import {
ThreadModalAction, ThreadModalAction,
} from '@/helpers/atoms/Thread.atom' } from '@/helpers/atoms/Thread.atom'
type Props = { export default function KeyListener() {
children: ReactNode
}
export default function KeyListener({ children }: Props) {
const setShowLeftPanel = useSetAtom(showLeftPanelAtom) const setShowLeftPanel = useSetAtom(showLeftPanelAtom)
const setShowRightPanel = useSetAtom(showRightPanelAtom) const setShowRightPanel = useSetAtom(showRightPanelAtom)
const [mainViewState, setMainViewState] = useAtom(mainViewStateAtom) const [mainViewState, setMainViewState] = useAtom(mainViewStateAtom)
@ -32,6 +30,7 @@ export default function KeyListener({ children }: Props) {
const assistants = useAtomValue(assistantsAtom) const assistants = useAtomValue(assistantsAtom)
const activeThread = useAtomValue(activeThreadAtom) const activeThread = useAtomValue(activeThreadAtom)
const setModalActionThread = useSetAtom(modalActionThreadAtom) const setModalActionThread = useSetAtom(modalActionThreadAtom)
const { isShowStarterScreen } = useStarterScreen()
useEffect(() => { useEffect(() => {
const onKeyDown = (e: KeyboardEvent) => { const onKeyDown = (e: KeyboardEvent) => {
@ -60,7 +59,7 @@ export default function KeyListener({ children }: Props) {
return return
} }
if (e.code === 'KeyN' && prefixKey) { if (e.code === 'KeyN' && prefixKey && !isShowStarterScreen) {
if (mainViewState !== MainViewState.Thread) return if (mainViewState !== MainViewState.Thread) return
requestCreateNewThread(assistants[0]) requestCreateNewThread(assistants[0])
setMainViewState(MainViewState.Thread) setMainViewState(MainViewState.Thread)
@ -82,6 +81,7 @@ export default function KeyListener({ children }: Props) {
}, [ }, [
activeThread, activeThread,
assistants, assistants,
isShowStarterScreen,
mainViewState, mainViewState,
requestCreateNewThread, requestCreateNewThread,
setMainViewState, setMainViewState,
@ -90,5 +90,5 @@ export default function KeyListener({ children }: Props) {
setShowRightPanel, setShowRightPanel,
]) ])
return <Fragment>{children}</Fragment> return <Fragment></Fragment>
} }

View File

@ -1,4 +1,4 @@
import { Fragment, ReactNode, useCallback, useEffect, useRef } from 'react' import { Fragment, useCallback, useEffect, useRef } from 'react'
import { import {
ChatCompletionMessage, ChatCompletionMessage,
@ -30,6 +30,7 @@ import {
getCurrentChatMessagesAtom, getCurrentChatMessagesAtom,
addNewMessageAtom, addNewMessageAtom,
updateMessageAtom, updateMessageAtom,
tokenSpeedAtom,
} from '@/helpers/atoms/ChatMessage.atom' } from '@/helpers/atoms/ChatMessage.atom'
import { downloadedModelsAtom } from '@/helpers/atoms/Model.atom' import { downloadedModelsAtom } from '@/helpers/atoms/Model.atom'
import { import {
@ -43,7 +44,7 @@ import {
const maxWordForThreadTitle = 10 const maxWordForThreadTitle = 10
const defaultThreadTitle = 'New Thread' const defaultThreadTitle = 'New Thread'
export default function EventHandler({ children }: { children: ReactNode }) { export default function ModelHandler() {
const messages = useAtomValue(getCurrentChatMessagesAtom) const messages = useAtomValue(getCurrentChatMessagesAtom)
const addNewMessage = useSetAtom(addNewMessageAtom) const addNewMessage = useSetAtom(addNewMessageAtom)
const updateMessage = useSetAtom(updateMessageAtom) const updateMessage = useSetAtom(updateMessageAtom)
@ -62,6 +63,7 @@ export default function EventHandler({ children }: { children: ReactNode }) {
const activeModelRef = useRef(activeModel) const activeModelRef = useRef(activeModel)
const activeModelParams = useAtomValue(getActiveThreadModelParamsAtom) const activeModelParams = useAtomValue(getActiveThreadModelParamsAtom)
const activeModelParamsRef = useRef(activeModelParams) const activeModelParamsRef = useRef(activeModelParams)
const setTokenSpeed = useSetAtom(tokenSpeedAtom)
useEffect(() => { useEffect(() => {
threadsRef.current = threads threadsRef.current = threads
@ -179,6 +181,31 @@ export default function EventHandler({ children }: { children: ReactNode }) {
if (message.content.length) { if (message.content.length) {
setIsGeneratingResponse(false) setIsGeneratingResponse(false)
} }
setTokenSpeed((prev) => {
const currentTimestamp = new Date().getTime() // Get current time in milliseconds
if (!prev) {
// If this is the first update, just set the lastTimestamp and return
return {
lastTimestamp: currentTimestamp,
tokenSpeed: 0,
tokenCount: 1,
message: message.id,
}
}
const timeDiffInSeconds =
(currentTimestamp - prev.lastTimestamp) / 1000 // Time difference in seconds
const totalTokenCount = prev.tokenCount + 1
const averageTokenSpeed =
totalTokenCount / (timeDiffInSeconds > 0 ? timeDiffInSeconds : 1) // Calculate average token speed
return {
...prev,
tokenSpeed: averageTokenSpeed,
tokenCount: totalTokenCount,
message: message.id,
}
})
return return
} else if ( } else if (
message.status === MessageStatus.Error && message.status === MessageStatus.Error &&
@ -333,5 +360,5 @@ export default function EventHandler({ children }: { children: ReactNode }) {
} }
}, [onNewMessageResponse, onMessageResponseUpdate, onModelStopped]) }, [onNewMessageResponse, onMessageResponseUpdate, onModelStopped])
return <Fragment>{children}</Fragment> return <Fragment></Fragment>
} }

View File

@ -1,4 +1,4 @@
import { Fragment, PropsWithChildren, useCallback, useEffect } from 'react' import { Fragment, useCallback, useEffect } from 'react'
import { import {
ImportingModel, ImportingModel,
@ -17,7 +17,7 @@ import {
updateImportingModelProgressAtom, updateImportingModelProgressAtom,
} from '@/helpers/atoms/Model.atom' } from '@/helpers/atoms/Model.atom'
const ModelImportListener = ({ children }: PropsWithChildren) => { const ModelImportListener = () => {
const updateImportingModelProgress = useSetAtom( const updateImportingModelProgress = useSetAtom(
updateImportingModelProgressAtom updateImportingModelProgressAtom
) )
@ -103,7 +103,7 @@ const ModelImportListener = ({ children }: PropsWithChildren) => {
onImportModelFailed, onImportModelFailed,
]) ])
return <Fragment>{children}</Fragment> return <Fragment></Fragment>
} }
export default ModelImportListener export default ModelImportListener

View File

@ -1,4 +1,4 @@
import { Fragment, ReactNode } from 'react' import { Fragment } from 'react'
import { useSetAtom } from 'jotai' import { useSetAtom } from 'jotai'
@ -10,11 +10,7 @@ import useSendChatMessage from '@/hooks/useSendChatMessage'
import { mainViewStateAtom } from '@/helpers/atoms/App.atom' import { mainViewStateAtom } from '@/helpers/atoms/App.atom'
type Props = { const QuickAskListener: React.FC = () => {
children: ReactNode
}
const QuickAskListener: React.FC<Props> = ({ children }) => {
const { sendChatMessage } = useSendChatMessage() const { sendChatMessage } = useSendChatMessage()
const setMainState = useSetAtom(mainViewStateAtom) const setMainState = useSetAtom(mainViewStateAtom)
@ -27,7 +23,7 @@ const QuickAskListener: React.FC<Props> = ({ children }) => {
debounced(input) debounced(input)
}) })
return <Fragment>{children}</Fragment> return <Fragment></Fragment>
} }
export default QuickAskListener export default QuickAskListener

View File

@ -45,17 +45,6 @@ describe('Responsive', () => {
}) })
}) })
it('renders children correctly', () => {
const { getByText } = render(
<Responsive>
<div>Child Content</div>
</Responsive>
)
// Check if the child content is rendered
expect(getByText('Child Content')).toBeInTheDocument()
})
it('hides left and right panels on small screens', () => { it('hides left and right panels on small screens', () => {
// Simulate mobile view // Simulate mobile view
window.matchMedia = jest.fn().mockImplementation((query) => ({ window.matchMedia = jest.fn().mockImplementation((query) => ({
@ -64,11 +53,7 @@ describe('Responsive', () => {
removeListener: jest.fn(), removeListener: jest.fn(),
})) }))
render( render(<Responsive />)
<Responsive>
<div>Child Content</div>
</Responsive>
)
// Check that the left and right panel states were updated to false // Check that the left and right panel states were updated to false
expect(mockSetShowLeftPanel).toHaveBeenCalledWith(false) expect(mockSetShowLeftPanel).toHaveBeenCalledWith(false)
@ -83,11 +68,7 @@ describe('Responsive', () => {
removeListener: jest.fn(), removeListener: jest.fn(),
})) }))
render( render(<Responsive />)
<Responsive>
<div>Child Content</div>
</Responsive>
)
// Change back to desktop view // Change back to desktop view
window.matchMedia = jest.fn().mockImplementation((query) => ({ window.matchMedia = jest.fn().mockImplementation((query) => ({
@ -97,11 +78,7 @@ describe('Responsive', () => {
})) }))
// Call the effect manually to simulate the component re-rendering // Call the effect manually to simulate the component re-rendering
const rerender = render( const rerender = render(<Responsive />)
<Responsive>
<div>Child Content</div>
</Responsive>
)
// Check that the last known states were restored (which were true initially) // Check that the last known states were restored (which were true initially)
expect(mockSetShowLeftPanel).toHaveBeenCalledWith(true) expect(mockSetShowLeftPanel).toHaveBeenCalledWith(true)

View File

@ -1,11 +1,11 @@
import { Fragment, PropsWithChildren, useEffect, useRef } from 'react' import { Fragment, useEffect, useRef } from 'react'
import { useMediaQuery } from '@janhq/joi' import { useMediaQuery } from '@janhq/joi'
import { useAtom } from 'jotai' import { useAtom } from 'jotai'
import { showLeftPanelAtom, showRightPanelAtom } from '@/helpers/atoms/App.atom' import { showLeftPanelAtom, showRightPanelAtom } from '@/helpers/atoms/App.atom'
const Responsive = ({ children }: PropsWithChildren) => { const Responsive = () => {
const matches = useMediaQuery('(max-width: 880px)') const matches = useMediaQuery('(max-width: 880px)')
const [showLeftPanel, setShowLeftPanel] = useAtom(showLeftPanelAtom) const [showLeftPanel, setShowLeftPanel] = useAtom(showLeftPanelAtom)
const [showRightPanel, setShowRightPanel] = useAtom(showRightPanelAtom) const [showRightPanel, setShowRightPanel] = useAtom(showRightPanelAtom)
@ -30,7 +30,7 @@ const Responsive = ({ children }: PropsWithChildren) => {
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [matches, setShowLeftPanel, setShowRightPanel]) }, [matches, setShowLeftPanel, setShowRightPanel])
return <Fragment>{children}</Fragment> return <Fragment></Fragment>
} }
export default Responsive export default Responsive

View File

@ -0,0 +1,20 @@
'use client'
import { useEffect } from 'react'
import { useConfigurations } from '@/hooks/useConfigurations'
import { useLoadTheme } from '@/hooks/useLoadTheme'
const SettingsHandler: React.FC = () => {
useLoadTheme()
const { configurePullOptions } = useConfigurations()
useEffect(() => {
configurePullOptions()
}, [configurePullOptions])
return <></>
}
export default SettingsHandler

View File

@ -1,93 +1,42 @@
'use client' 'use client'
import { PropsWithChildren, useCallback, useEffect, useState } from 'react' import { PropsWithChildren } from 'react'
import { Toaster } from 'react-hot-toast' import { Toaster } from 'react-hot-toast'
import Loader from '@/containers/Loader' import EventListener from '@/containers/Providers/EventListener'
import EventListenerWrapper from '@/containers/Providers/EventListener'
import JotaiWrapper from '@/containers/Providers/Jotai' import JotaiWrapper from '@/containers/Providers/Jotai'
import ThemeWrapper from '@/containers/Providers/Theme' import ThemeWrapper from '@/containers/Providers/Theme'
import { setupCoreServices } from '@/services/coreService'
import {
isCoreExtensionInstalled,
setupBaseExtensions,
} from '@/services/extensionService'
import Umami from '@/utils/umami' import Umami from '@/utils/umami'
import { CoreConfigurator } from './CoreConfigurator'
import DataLoader from './DataLoader' import DataLoader from './DataLoader'
import DeepLinkListener from './DeepLinkListener' import DeepLinkListener from './DeepLinkListener'
import KeyListener from './KeyListener' import KeyListener from './KeyListener'
import Responsive from './Responsive' import Responsive from './Responsive'
import { extensionManager } from '@/extension' import SettingsHandler from './SettingsHandler'
const Providers = ({ children }: PropsWithChildren) => { const Providers = ({ children }: PropsWithChildren) => {
const [setupCore, setSetupCore] = useState(false)
const [activated, setActivated] = useState(false)
const [settingUp, setSettingUp] = useState(false)
const setupExtensions = useCallback(async () => {
// Register all active extensions
await extensionManager.registerActive()
setTimeout(async () => {
if (!isCoreExtensionInstalled()) {
setSettingUp(true)
await setupBaseExtensions()
return
}
extensionManager.load()
setSettingUp(false)
setActivated(true)
}, 500)
}, [])
// Services Setup
useEffect(() => {
setupCoreServices()
setSetupCore(true)
return () => {
extensionManager.unload()
}
}, [])
useEffect(() => {
if (setupCore) {
// Electron
if (window && window.core?.api) {
setupExtensions()
} else {
// Host
setActivated(true)
}
}
}, [setupCore, setupExtensions])
return ( return (
<ThemeWrapper> <ThemeWrapper>
<JotaiWrapper> <JotaiWrapper>
<Umami /> <Umami />
{settingUp && <Loader description="Preparing Update..." />} <CoreConfigurator>
{setupCore && activated && (
<> <>
<Responsive> <Responsive />
<KeyListener> <KeyListener />
<EventListenerWrapper> <EventListener />
<DataLoader> <DataLoader />
<DeepLinkListener>{children}</DeepLinkListener> <SettingsHandler />
</DataLoader> <DeepLinkListener />
</EventListenerWrapper> <Toaster />
<Toaster /> {children}
</KeyListener>
</Responsive>
</> </>
)} </CoreConfigurator>
</JotaiWrapper> </JotaiWrapper>
</ThemeWrapper> </ThemeWrapper>
) )

View File

@ -0,0 +1,50 @@
import React from 'react'
import { render, fireEvent } from '@testing-library/react'
import TagInput from './index' // Adjust the import path as necessary
import '@testing-library/jest-dom'
describe('TagInput Component', () => {
let props: any
beforeEach(() => {
props = {
title: 'Tags',
name: 'tag-input',
description: 'Add your tags',
placeholder: 'Enter a tag',
value: ['tag1', 'tag2'],
onValueChanged: jest.fn(),
}
})
it('renders correctly', () => {
const { getByText, getByPlaceholderText } = render(<TagInput {...props} />)
expect(getByText('Tags')).toBeInTheDocument()
expect(getByText('tag1')).toBeInTheDocument()
expect(getByText('tag2')).toBeInTheDocument()
expect(getByPlaceholderText('Enter a tag')).toBeInTheDocument()
})
it('calls onValueChanged when a new tag is added', () => {
const { getByPlaceholderText } = render(<TagInput {...props} />)
const input = getByPlaceholderText('Enter a tag')
fireEvent.change(input, { target: { value: 'tag3' } })
fireEvent.keyDown(input, { key: 'Enter', code: 'Enter' })
expect(props.onValueChanged).toHaveBeenCalledWith(
expect.arrayContaining(['tag1', 'tag2', 'tag3'])
)
})
it('calls onValueChanged when a tag is removed', () => {
const { getAllByRole } = render(<TagInput {...props} />)
const removeButton = getAllByRole('button')[0] // Click on the first remove button
fireEvent.click(removeButton)
expect(props.onValueChanged).toHaveBeenCalledWith(
expect.arrayContaining(['tag2'])
)
})
})

View File

@ -0,0 +1,149 @@
import { useEffect, useRef, useState } from 'react'
import { Badge, Input, Tooltip } from '@janhq/joi'
import { InfoIcon, XIcon } from 'lucide-react'
type Props = {
title: string
disabled?: boolean
name: string
description: string
placeholder: string
value: string[]
onValueChanged?: (e: string | number | boolean | string[]) => void
}
function TooltipBadge({
item,
value,
onValueChanged,
}: {
item: string
value: string[]
onValueChanged?: (e: string[]) => void
}) {
const textRef = useRef<HTMLSpanElement>(null)
const [isEllipsized, setIsEllipsized] = useState(false)
useEffect(() => {
if (textRef.current) {
setIsEllipsized(textRef.current.scrollWidth > textRef.current.clientWidth)
}
}, [item])
return (
<div className="relative">
{isEllipsized ? (
<Tooltip
trigger={
<div className="relative">
<Badge theme="secondary" className="text-ellipsis">
<span
ref={textRef}
className="inline-block max-w-[100px] overflow-hidden text-ellipsis whitespace-nowrap"
>
{item}
</span>
<button
type="button"
className="ml-1.5 w-3 bg-transparent"
onClick={() => {
onValueChanged &&
onValueChanged(value.filter((i) => i !== item))
}}
>
<XIcon className="w-3" />
</button>
</Badge>
</div>
}
content={item}
/>
) : (
<Badge theme="secondary" className="relative">
<span
ref={textRef}
className="max-w-[90px] overflow-hidden text-ellipsis"
>
{item}
</span>
<button
type="button"
className="ml-1.5 w-3 bg-transparent"
onClick={() => {
onValueChanged && onValueChanged(value.filter((i) => i !== item))
}}
>
<XIcon className="w-3" />
</button>
</Badge>
)}
</div>
)
}
const TagInput = ({
title,
disabled = false,
value,
description,
placeholder,
onValueChanged,
}: Props) => {
const [pendingDataPoint, setPendingDataPoint] = useState('')
const addPendingDataPoint = () => {
if (pendingDataPoint) {
const newDataPoints = new Set([...value, pendingDataPoint])
onValueChanged && onValueChanged(Array.from(newDataPoints))
setPendingDataPoint('')
}
}
return (
<div className="flex flex-col">
<div className="mb-2 flex items-center gap-x-2">
<p className="font-medium">{title}</p>
<Tooltip
trigger={
<InfoIcon
size={16}
className="flex-shrink-0 text-[hsla(var(--text-secondary))]"
/>
}
content={description}
/>
</div>
<Input
value={pendingDataPoint}
disabled={disabled}
onChange={(e) => setPendingDataPoint(e.target.value)}
placeholder={placeholder}
className="w-full"
onKeyDown={(e) => {
if (e.key === 'Enter' || e.key === 'Tab') {
e.preventDefault()
addPendingDataPoint()
}
}}
/>
{value.length > 0 && (
<div className="relative mt-2 flex min-h-[2.5rem] flex-wrap items-center gap-2">
{value.map((item, idx) => {
return (
<TooltipBadge
key={idx}
item={item}
value={value}
onValueChanged={onValueChanged}
/>
)
})}
</div>
)}
</div>
)
}
export default TagInput

View File

@ -12,14 +12,35 @@ export const janDataFolderPathAtom = atom('')
export const experimentalFeatureEnabledAtom = atomWithStorage( export const experimentalFeatureEnabledAtom = atomWithStorage(
EXPERIMENTAL_FEATURE, EXPERIMENTAL_FEATURE,
false false,
undefined,
{ getOnInit: true }
) )
export const proxyEnabledAtom = atomWithStorage(PROXY_FEATURE_ENABLED, false) export const proxyEnabledAtom = atomWithStorage(
export const proxyAtom = atomWithStorage(HTTPS_PROXY_FEATURE, '') PROXY_FEATURE_ENABLED,
false,
undefined,
{ getOnInit: true }
)
export const proxyAtom = atomWithStorage(HTTPS_PROXY_FEATURE, '', undefined, {
getOnInit: true,
})
export const ignoreSslAtom = atomWithStorage(IGNORE_SSL, false) export const ignoreSslAtom = atomWithStorage(IGNORE_SSL, false, undefined, {
export const vulkanEnabledAtom = atomWithStorage(VULKAN_ENABLED, false) getOnInit: true,
export const quickAskEnabledAtom = atomWithStorage(QUICK_ASK_ENABLED, false) })
export const vulkanEnabledAtom = atomWithStorage(
VULKAN_ENABLED,
false,
undefined,
{ getOnInit: true }
)
export const quickAskEnabledAtom = atomWithStorage(
QUICK_ASK_ENABLED,
false,
undefined,
{ getOnInit: true }
)
export const hostAtom = atom('http://localhost:1337/') export const hostAtom = atom('http://localhost:1337/')

View File

@ -11,13 +11,22 @@ import {
updateThreadStateLastMessageAtom, updateThreadStateLastMessageAtom,
} from './Thread.atom' } from './Thread.atom'
import { TokenSpeed } from '@/types/token'
/** /**
* Stores all chat messages for all threads * Stores all chat messages for all threads
*/ */
export const chatMessages = atom<Record<string, ThreadMessage[]>>({}) export const chatMessages = atom<Record<string, ThreadMessage[]>>({})
/**
* Stores the status of the messages load for each thread
*/
export const readyThreadsMessagesAtom = atom<Record<string, boolean>>({}) export const readyThreadsMessagesAtom = atom<Record<string, boolean>>({})
/**
* Store the token speed for current message
*/
export const tokenSpeedAtom = atom<TokenSpeed | undefined>(undefined)
/** /**
* Return the chat messages for the current active conversation * Return the chat messages for the current active conversation
*/ */

View File

@ -16,7 +16,9 @@ enum ModelStorageAtomKeys {
*/ */
export const downloadedModelsAtom = atomWithStorage<Model[]>( export const downloadedModelsAtom = atomWithStorage<Model[]>(
ModelStorageAtomKeys.DownloadedModels, ModelStorageAtomKeys.DownloadedModels,
[] [],
undefined,
{ getOnInit: true }
) )
/** /**
@ -25,7 +27,9 @@ export const downloadedModelsAtom = atomWithStorage<Model[]>(
*/ */
export const configuredModelsAtom = atomWithStorage<Model[]>( export const configuredModelsAtom = atomWithStorage<Model[]>(
ModelStorageAtomKeys.AvailableModels, ModelStorageAtomKeys.AvailableModels,
[] [],
undefined,
{ getOnInit: true }
) )
export const removeDownloadedModelAtom = atom( export const removeDownloadedModelAtom = atom(

View File

@ -11,12 +11,39 @@ export const janSettingScreenAtom = atom<SettingScreen[]>([])
export const THEME = 'themeAppearance' export const THEME = 'themeAppearance'
export const REDUCE_TRANSPARENT = 'reduceTransparent' export const REDUCE_TRANSPARENT = 'reduceTransparent'
export const SPELL_CHECKING = 'spellChecking' export const SPELL_CHECKING = 'spellChecking'
export const themesOptionsAtom = atom<{ name: string; value: string }[]>([]) export const THEME_DATA = 'themeData'
export const janThemesPathAtom = atom<string | undefined>(undefined) export const THEME_OPTIONS = 'themeOptions'
export const selectedThemeIdAtom = atomWithStorage<string>(THEME, '') export const THEME_PATH = 'themePath'
export const themeDataAtom = atom<Theme | undefined>(undefined) export const themesOptionsAtom = atomWithStorage<
{ name: string; value: string }[]
>(THEME_OPTIONS, [], undefined, { getOnInit: true })
export const janThemesPathAtom = atomWithStorage<string | undefined>(
THEME_PATH,
undefined,
undefined,
{ getOnInit: true }
)
export const selectedThemeIdAtom = atomWithStorage<string>(
THEME,
'',
undefined,
{ getOnInit: true }
)
export const themeDataAtom = atomWithStorage<Theme | undefined>(
THEME_DATA,
undefined,
undefined,
{ getOnInit: true }
)
export const reduceTransparentAtom = atomWithStorage<boolean>( export const reduceTransparentAtom = atomWithStorage<boolean>(
REDUCE_TRANSPARENT, REDUCE_TRANSPARENT,
false false,
undefined,
{ getOnInit: true }
)
export const spellCheckAtom = atomWithStorage<boolean>(
SPELL_CHECKING,
false,
undefined,
{ getOnInit: true }
) )
export const spellCheckAtom = atomWithStorage<boolean>(SPELL_CHECKING, false)

View File

@ -207,7 +207,9 @@ export const setThreadModelParamsAtom = atom(
*/ */
export const activeSettingInputBoxAtom = atomWithStorage<boolean>( export const activeSettingInputBoxAtom = atomWithStorage<boolean>(
ACTIVE_SETTING_INPUT_BOX, ACTIVE_SETTING_INPUT_BOX,
false false,
undefined,
{ getOnInit: true }
) )
/** /**

View File

@ -0,0 +1,41 @@
import { useCallback, useEffect } from 'react'
import { ExtensionTypeEnum, ModelExtension } from '@janhq/core'
import { useAtomValue } from 'jotai'
import { extensionManager } from '@/extension'
import {
ignoreSslAtom,
proxyAtom,
proxyEnabledAtom,
} from '@/helpers/atoms/AppConfig.atom'
export const useConfigurations = () => {
const proxyEnabled = useAtomValue(proxyEnabledAtom)
const proxyUrl = useAtomValue(proxyAtom)
const proxyIgnoreSSL = useAtomValue(ignoreSslAtom)
const configurePullOptions = useCallback(() => {
extensionManager
.get<ModelExtension>(ExtensionTypeEnum.Model)
?.configurePullOptions(
proxyEnabled
? {
proxy_url: proxyUrl,
verify_peer_ssl: !proxyIgnoreSSL,
}
: {
proxy_url: '',
verify_peer_ssl: false,
}
)
}, [proxyEnabled, proxyUrl, proxyIgnoreSSL])
useEffect(() => {
configurePullOptions()
}, [])
return {
configurePullOptions,
}
}

View File

@ -21,8 +21,8 @@ jest.mock('@janhq/core', () => ({
instance: jest.fn().mockReturnValue({ instance: jest.fn().mockReturnValue({
get: jest.fn(), get: jest.fn(),
engines: { engines: {
values: jest.fn().mockReturnValue([]) values: jest.fn().mockReturnValue([]),
} },
}), }),
}, },
})) }))
@ -45,6 +45,7 @@ describe('useFactoryReset', () => {
getAppConfigurations: mockGetAppConfigurations, getAppConfigurations: mockGetAppConfigurations,
updateAppConfiguration: mockUpdateAppConfiguration, updateAppConfiguration: mockUpdateAppConfiguration,
relaunch: mockRelaunch, relaunch: mockRelaunch,
factoryReset: jest.fn(),
}, },
} }
mockGetAppConfigurations.mockResolvedValue({ mockGetAppConfigurations.mockResolvedValue({
@ -80,7 +81,6 @@ describe('useFactoryReset', () => {
expect(mockSetFactoryResetState).toHaveBeenCalledWith( expect(mockSetFactoryResetState).toHaveBeenCalledWith(
FactoryResetState.ClearLocalStorage FactoryResetState.ClearLocalStorage
) )
expect(mockRelaunch).toHaveBeenCalled()
}) })
it('should keep current folder when specified', async () => { it('should keep current folder when specified', async () => {

View File

@ -62,13 +62,17 @@ export default function useFactoryReset() {
await window.core?.api?.updateAppConfiguration(configuration) await window.core?.api?.updateAppConfiguration(configuration)
} }
// Perform factory reset
await window.core?.api?.factoryReset()
// 4: Clear app local storage // 4: Clear app local storage
setFactoryResetState(FactoryResetState.ClearLocalStorage) setFactoryResetState(FactoryResetState.ClearLocalStorage)
// reset the localStorage // reset the localStorage
localStorage.clear() localStorage.clear()
window.core = undefined
// 5: Relaunch the app // 5: Relaunch the app
await window.core?.api?.relaunch() window.location.reload()
}, },
[defaultJanDataFolder, stopModel, setFactoryResetState] [defaultJanDataFolder, stopModel, setFactoryResetState]
) )

View File

@ -4,6 +4,11 @@ import { fs, joinPath } from '@janhq/core'
import { useAtom, useAtomValue, useSetAtom } from 'jotai' import { useAtom, useAtomValue, useSetAtom } from 'jotai'
import { useLoadTheme } from './useLoadTheme' import { useLoadTheme } from './useLoadTheme'
import { janDataFolderPathAtom } from '@/helpers/atoms/AppConfig.atom'
import {
selectedThemeIdAtom,
themeDataAtom,
} from '@/helpers/atoms/Setting.atom'
// Mock dependencies // Mock dependencies
jest.mock('next-themes') jest.mock('next-themes')
@ -36,10 +41,25 @@ describe('useLoadTheme', () => {
it('should load theme and set variables', async () => { it('should load theme and set variables', async () => {
// Mock Jotai hooks // Mock Jotai hooks
;(useAtomValue as jest.Mock).mockReturnValue(mockJanDataFolderPath) ;(useAtomValue as jest.Mock).mockImplementation((atom) => {
switch (atom) {
case janDataFolderPathAtom:
return mockJanDataFolderPath
default:
return undefined
}
})
;(useSetAtom as jest.Mock).mockReturnValue(jest.fn()) ;(useSetAtom as jest.Mock).mockReturnValue(jest.fn())
;(useAtom as jest.Mock).mockReturnValue([mockSelectedThemeId, jest.fn()]) ;(useAtom as jest.Mock).mockImplementation((atom) => {
;(useAtom as jest.Mock).mockReturnValue([mockThemeData, jest.fn()]) switch (atom) {
case selectedThemeIdAtom:
return [mockSelectedThemeId, jest.fn()]
case themeDataAtom:
return [mockThemeData, jest.fn()]
default:
return [undefined, jest.fn()]
}
})
// Mock fs and joinPath // Mock fs and joinPath
;(fs.readdirSync as jest.Mock).mockResolvedValue(['joi-light', 'joi-dark']) ;(fs.readdirSync as jest.Mock).mockResolvedValue(['joi-light', 'joi-dark'])

View File

@ -4,7 +4,7 @@ import { useTheme } from 'next-themes'
import { fs, joinPath } from '@janhq/core' import { fs, joinPath } from '@janhq/core'
import { useAtom, useAtomValue, useSetAtom } from 'jotai' import { useAtom, useAtomValue } from 'jotai'
import cssVars from '@/utils/jsonToCssVariables' import cssVars from '@/utils/jsonToCssVariables'
@ -20,8 +20,8 @@ type NativeThemeProps = 'light' | 'dark'
export const useLoadTheme = () => { export const useLoadTheme = () => {
const janDataFolderPath = useAtomValue(janDataFolderPathAtom) const janDataFolderPath = useAtomValue(janDataFolderPathAtom)
const setThemeOptions = useSetAtom(themesOptionsAtom) const [themeOptions, setThemeOptions] = useAtom(themesOptionsAtom)
const setThemePath = useSetAtom(janThemesPathAtom) const [themePath, setThemePath] = useAtom(janThemesPathAtom)
const [themeData, setThemeData] = useAtom(themeDataAtom) const [themeData, setThemeData] = useAtom(themeDataAtom)
const [selectedIdTheme, setSelectedIdTheme] = useAtom(selectedThemeIdAtom) const [selectedIdTheme, setSelectedIdTheme] = useAtom(selectedThemeIdAtom)
const { setTheme } = useTheme() const { setTheme } = useTheme()
@ -84,11 +84,23 @@ export const useLoadTheme = () => {
setThemePath, setThemePath,
]) ])
useEffect(() => { const applyTheme = useCallback(async () => {
getThemes() if (!themeData || !themeOptions || !themePath) {
await getThemes()
} else {
const variables = cssVars(themeData.variables)
const headTag = document.getElementsByTagName('head')[0]
const styleTag = document.createElement('style')
styleTag.innerHTML = `:root {${variables}}`
headTag.appendChild(styleTag)
}
setNativeTheme(themeData?.nativeTheme as NativeThemeProps) setNativeTheme(themeData?.nativeTheme as NativeThemeProps)
}, [themeData, themeOptions, themePath, getThemes])
useEffect(() => {
applyTheme()
}, [ }, [
getThemes, applyTheme,
selectedIdTheme, selectedIdTheme,
setNativeTheme, setNativeTheme,
setSelectedIdTheme, setSelectedIdTheme,

View File

@ -43,7 +43,7 @@ describe('useModels', () => {
const { result } = renderHook(() => useModels()) const { result } = renderHook(() => useModels())
await act(() => { await act(() => {
result.current?.loadDataModel() result.current?.getData()
}) })
expect(mockModelExtension.getModels).toHaveBeenCalled() expect(mockModelExtension.getModels).toHaveBeenCalled()
@ -70,7 +70,7 @@ describe('useModels', () => {
const { result } = renderHook(() => useModels()) const { result } = renderHook(() => useModels())
await act(() => { await act(() => {
result.current?.loadDataModel() result.current?.getData()
}) })
expect(mockModelExtension.getModels()).rejects.toThrow() expect(mockModelExtension.getModels()).rejects.toThrow()

View File

@ -9,18 +9,14 @@ import {
ModelManager, ModelManager,
} from '@janhq/core' } from '@janhq/core'
import { useSetAtom, useAtom, useAtomValue } from 'jotai' import { useSetAtom } from 'jotai'
import { useDebouncedCallback } from 'use-debounce' import { useDebouncedCallback } from 'use-debounce'
import { isLocalEngine } from '@/utils/modelEngine' import { isLocalEngine } from '@/utils/modelEngine'
import { extensionManager } from '@/extension' import { extensionManager } from '@/extension'
import {
ignoreSslAtom,
proxyAtom,
proxyEnabledAtom,
} from '@/helpers/atoms/AppConfig.atom'
import { import {
configuredModelsAtom, configuredModelsAtom,
downloadedModelsAtom, downloadedModelsAtom,
@ -32,11 +28,8 @@ import {
* and updates the atoms accordingly. * and updates the atoms accordingly.
*/ */
const useModels = () => { const useModels = () => {
const [downloadedModels, setDownloadedModels] = useAtom(downloadedModelsAtom) const setDownloadedModels = useSetAtom(downloadedModelsAtom)
const setExtensionModels = useSetAtom(configuredModelsAtom) const setExtensionModels = useSetAtom(configuredModelsAtom)
const proxyEnabled = useAtomValue(proxyEnabledAtom)
const proxyUrl = useAtomValue(proxyAtom)
const proxyIgnoreSSL = useAtomValue(ignoreSslAtom)
const getData = useCallback(() => { const getData = useCallback(() => {
const getDownloadedModels = async () => { const getDownloadedModels = async () => {
@ -87,18 +80,17 @@ const useModels = () => {
const updateStates = useCallback(() => { const updateStates = useCallback(() => {
const cachedModels = ModelManager.instance().models.values().toArray() const cachedModels = ModelManager.instance().models.values().toArray()
const toUpdate = [ setDownloadedModels((downloadedModels) => [
...downloadedModels, ...downloadedModels,
...cachedModels.filter( ...cachedModels.filter(
(e) => (e) =>
!isLocalEngine(e.engine) && !isLocalEngine(e.engine) &&
!downloadedModels.some((g: Model) => g.id === e.id) !downloadedModels.some((g: Model) => g.id === e.id)
), ),
] ])
setDownloadedModels(toUpdate)
setExtensionModels(cachedModels) setExtensionModels(cachedModels)
}, [downloadedModels, setDownloadedModels, setExtensionModels]) }, [setDownloadedModels, setExtensionModels])
const getModels = async (): Promise<Model[]> => const getModels = async (): Promise<Model[]> =>
extensionManager extensionManager
@ -118,25 +110,8 @@ const useModels = () => {
} }
}, [reloadData, updateStates]) }, [reloadData, updateStates])
const configurePullOptions = useCallback(() => {
extensionManager
.get<ModelExtension>(ExtensionTypeEnum.Model)
?.configurePullOptions(
proxyEnabled
? {
proxy_url: proxyUrl,
verify_peer_ssl: !proxyIgnoreSSL,
}
: {
proxy_url: '',
verify_peer_ssl: false,
}
)
}, [proxyEnabled, proxyUrl, proxyIgnoreSSL])
return { return {
loadDataModel: getData, getData,
configurePullOptions,
} }
} }

View File

@ -31,7 +31,8 @@ export default function useRecommendedModel() {
const getAndSortDownloadedModels = useCallback(async (): Promise<Model[]> => { const getAndSortDownloadedModels = useCallback(async (): Promise<Model[]> => {
const models = downloadedModels.sort((a, b) => const models = downloadedModels.sort((a, b) =>
a.engine !== InferenceEngine.nitro && b.engine === InferenceEngine.nitro a.engine !== InferenceEngine.cortex_llamacpp &&
b.engine === InferenceEngine.cortex_llamacpp
? 1 ? 1
: -1 : -1
) )
@ -43,6 +44,7 @@ export default function useRecommendedModel() {
Model | undefined Model | undefined
> => { > => {
const models = await getAndSortDownloadedModels() const models = await getAndSortDownloadedModels()
if (!activeThread) return if (!activeThread) return
const modelId = activeThread.assistants[0]?.model.id const modelId = activeThread.assistants[0]?.model.id
const model = models.find((model) => model.id === modelId) const model = models.find((model) => model.id === modelId)

View File

@ -34,6 +34,7 @@ import {
addNewMessageAtom, addNewMessageAtom,
deleteMessageAtom, deleteMessageAtom,
getCurrentChatMessagesAtom, getCurrentChatMessagesAtom,
tokenSpeedAtom,
} from '@/helpers/atoms/ChatMessage.atom' } from '@/helpers/atoms/ChatMessage.atom'
import { selectedModelAtom } from '@/helpers/atoms/Model.atom' import { selectedModelAtom } from '@/helpers/atoms/Model.atom'
import { import {
@ -45,7 +46,6 @@ import {
updateThreadWaitingForResponseAtom, updateThreadWaitingForResponseAtom,
} from '@/helpers/atoms/Thread.atom' } from '@/helpers/atoms/Thread.atom'
export const queuedMessageAtom = atom(false)
export const reloadModelAtom = atom(false) export const reloadModelAtom = atom(false)
export default function useSendChatMessage() { export default function useSendChatMessage() {
@ -70,7 +70,7 @@ export default function useSendChatMessage() {
const [fileUpload, setFileUpload] = useAtom(fileUploadAtom) const [fileUpload, setFileUpload] = useAtom(fileUploadAtom)
const setIsGeneratingResponse = useSetAtom(isGeneratingResponseAtom) const setIsGeneratingResponse = useSetAtom(isGeneratingResponseAtom)
const activeThreadRef = useRef<Thread | undefined>() const activeThreadRef = useRef<Thread | undefined>()
const setQueuedMessage = useSetAtom(queuedMessageAtom) const setTokenSpeed = useSetAtom(tokenSpeedAtom)
const selectedModelRef = useRef<Model | undefined>() const selectedModelRef = useRef<Model | undefined>()
@ -135,7 +135,10 @@ export default function useSendChatMessage() {
sendChatMessage(toSendMessage.content[0]?.text.value) sendChatMessage(toSendMessage.content[0]?.text.value)
} }
const sendChatMessage = async (message: string) => { const sendChatMessage = async (
message: string,
messages?: ThreadMessage[]
) => {
if (!message || message.trim().length === 0) return if (!message || message.trim().length === 0) return
if (!activeThreadRef.current) { if (!activeThreadRef.current) {
@ -144,6 +147,7 @@ export default function useSendChatMessage() {
} }
if (engineParamsUpdate) setReloadModel(true) if (engineParamsUpdate) setReloadModel(true)
setTokenSpeed(undefined)
const runtimeParams = extractInferenceParams(activeModelParams) const runtimeParams = extractInferenceParams(activeModelParams)
const settingParams = extractModelLoadParams(activeModelParams) const settingParams = extractModelLoadParams(activeModelParams)
@ -187,7 +191,7 @@ export default function useSendChatMessage() {
parameters: runtimeParams, parameters: runtimeParams,
}, },
activeThreadRef.current, activeThreadRef.current,
currentMessages messages ?? currentMessages
).addSystemMessage(activeThreadRef.current.assistants[0].instructions) ).addSystemMessage(activeThreadRef.current.assistants[0].instructions)
requestBuilder.pushMessage(prompt, base64Blob, fileUpload[0]?.type) requestBuilder.pushMessage(prompt, base64Blob, fileUpload[0]?.type)
@ -228,9 +232,7 @@ export default function useSendChatMessage() {
} }
if (modelRef.current?.id !== modelId) { if (modelRef.current?.id !== modelId) {
setQueuedMessage(true)
const error = await startModel(modelId).catch((error: Error) => error) const error = await startModel(modelId).catch((error: Error) => error)
setQueuedMessage(false)
if (error) { if (error) {
updateThreadWaiting(activeThreadRef.current.id, false) updateThreadWaiting(activeThreadRef.current.id, false)
return return

View File

@ -1,4 +1,4 @@
import { useState, useEffect } from 'react' import { useState, useEffect, useMemo } from 'react'
import { useAtomValue } from 'jotai' import { useAtomValue } from 'jotai'
@ -12,8 +12,9 @@ export function useStarterScreen() {
const downloadedModels = useAtomValue(downloadedModelsAtom) const downloadedModels = useAtomValue(downloadedModelsAtom)
const threads = useAtomValue(threadsAtom) const threads = useAtomValue(threadsAtom)
const isDownloadALocalModel = downloadedModels.some((x) => const isDownloadALocalModel = useMemo(
isLocalEngine(x.engine) () => downloadedModels.some((x) => isLocalEngine(x.engine)),
[downloadedModels]
) )
const [extensionHasSettings, setExtensionHasSettings] = useState< const [extensionHasSettings, setExtensionHasSettings] = useState<
@ -57,14 +58,19 @@ export function useStarterScreen() {
setExtensionHasSettings(extensionsMenu) setExtensionHasSettings(extensionsMenu)
} }
getAllSettings() getAllSettings()
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []) }, [])
const isAnyRemoteModelConfigured = extensionHasSettings.some( const isAnyRemoteModelConfigured = useMemo(
(x) => x.apiKey.length > 1 () => extensionHasSettings.some((x) => x.apiKey.length > 1),
[extensionHasSettings]
) )
const isShowStarterScreen = const isShowStarterScreen = useMemo(
!isAnyRemoteModelConfigured && !isDownloadALocalModel && !threads.length () =>
!isAnyRemoteModelConfigured && !isDownloadALocalModel && !threads.length,
[isAnyRemoteModelConfigured, isDownloadALocalModel, threads]
)
return { return {
extensionHasSettings, extensionHasSettings,

View File

@ -1,6 +1,6 @@
{ {
"name": "@janhq/web", "name": "@janhq/web",
"version": "0.5.9", "version": "0.5.10",
"private": true, "private": true,
"homepage": "./", "homepage": "./",
"scripts": { "scripts": {
@ -16,10 +16,11 @@
"dependencies": { "dependencies": {
"@janhq/core": "link:./core", "@janhq/core": "link:./core",
"@janhq/joi": "link:./joi", "@janhq/joi": "link:./joi",
"@tanstack/react-virtual": "^3.10.9",
"autoprefixer": "10.4.16", "autoprefixer": "10.4.16",
"class-variance-authority": "^0.7.0", "class-variance-authority": "^0.7.0",
"framer-motion": "^10.16.4", "framer-motion": "^10.16.4",
"highlight.js": "^11.9.0", "highlight.js": "^11.10.0",
"jotai": "^2.6.0", "jotai": "^2.6.0",
"katex": "^0.16.10", "katex": "^0.16.10",
"lodash": "^4.17.21", "lodash": "^4.17.21",
@ -29,7 +30,6 @@
"next-themes": "^0.2.1", "next-themes": "^0.2.1",
"postcss": "8.4.31", "postcss": "8.4.31",
"postcss-url": "10.1.3", "postcss-url": "10.1.3",
"posthog-js": "^1.95.1",
"react": "18.2.0", "react": "18.2.0",
"react-circular-progressbar": "^2.1.0", "react-circular-progressbar": "^2.1.0",
"react-dom": "18.2.0", "react-dom": "18.2.0",
@ -38,7 +38,7 @@
"react-icons": "^4.12.0", "react-icons": "^4.12.0",
"react-markdown": "^9.0.1", "react-markdown": "^9.0.1",
"react-toastify": "^9.1.3", "react-toastify": "^9.1.3",
"rehype-highlight": "^7.0.1", "rehype-highlight": "^6.0.0",
"rehype-highlight-code-lines": "^1.0.4", "rehype-highlight-code-lines": "^1.0.4",
"rehype-katex": "^7.0.1", "rehype-katex": "^7.0.1",
"rehype-raw": "^7.0.0", "rehype-raw": "^7.0.0",
@ -83,5 +83,8 @@
"rimraf": "^5.0.5", "rimraf": "^5.0.5",
"ts-jest": "^29.2.5", "ts-jest": "^29.2.5",
"typescript": "^5.3.3" "typescript": "^5.3.3"
},
"resolutions": {
"highlight.js": "11.10.0"
} }
} }

View File

@ -86,7 +86,7 @@ const LocalServerRightPanel = () => {
}, [currentModelSettingParams, setLocalAPIserverModelParams]) }, [currentModelSettingParams, setLocalAPIserverModelParams])
const onValueChanged = useCallback( const onValueChanged = useCallback(
(key: string, value: string | number | boolean) => { (key: string, value: string | number | boolean | string[]) => {
setCurrentModelSettingParams((prevParams) => ({ setCurrentModelSettingParams((prevParams) => ({
...prevParams, ...prevParams,
[key]: value, [key]: value,

View File

@ -25,7 +25,7 @@ import { useDebouncedCallback } from 'use-debounce'
import { snackbar, toaster } from '@/containers/Toast' import { snackbar, toaster } from '@/containers/Toast'
import { useActiveModel } from '@/hooks/useActiveModel' import { useActiveModel } from '@/hooks/useActiveModel'
import useModels from '@/hooks/useModels' import { useConfigurations } from '@/hooks/useConfigurations'
import { useSettings } from '@/hooks/useSettings' import { useSettings } from '@/hooks/useSettings'
import DataFolder from './DataFolder' import DataFolder from './DataFolder'
@ -68,7 +68,7 @@ const Advanced = () => {
const [dropdownOptions, setDropdownOptions] = useState<HTMLDivElement | null>( const [dropdownOptions, setDropdownOptions] = useState<HTMLDivElement | null>(
null null
) )
const { configurePullOptions } = useModels() const { configurePullOptions } = useConfigurations()
const [toggle, setToggle] = useState<HTMLDivElement | null>(null) const [toggle, setToggle] = useState<HTMLDivElement | null>(null)
@ -438,7 +438,7 @@ const Advanced = () => {
{/* Vulkan for AMD GPU/ APU and Intel Arc GPU */} {/* Vulkan for AMD GPU/ APU and Intel Arc GPU */}
{!isMac && experimentalEnabled && ( {!isMac && experimentalEnabled && (
<div className="flex w-full flex-col items-start justify-between gap-4 border-b border-[hsla(var(--app-border))] py-4 first:pt-0 last:border-none sm:flex-row"> <div className="flex w-full flex-col items-start justify-between gap-4 border-b border-[hsla(var(--app-border))] py-4 first:pt-0 last:border-none sm:flex-row">
<div className="flex-shrink-0 space-y-1"> <div className="space-y-1">
<div className="flex gap-x-2"> <div className="flex gap-x-2">
<h6 className="font-semibold capitalize">Vulkan Support</h6> <h6 className="font-semibold capitalize">Vulkan Support</h6>
</div> </div>
@ -447,11 +447,12 @@ const Advanced = () => {
model performance (reload needed). model performance (reload needed).
</p> </p>
</div> </div>
<div className="flex-sharink-0">
<Switch <Switch
checked={vulkanEnabled} checked={vulkanEnabled}
onChange={(e) => updateVulkanEnabled(e.target.checked)} onChange={(e) => updateVulkanEnabled(e.target.checked)}
/> />
</div>
</div> </div>
)} )}

View File

@ -44,7 +44,7 @@ const ExtensionSetting = () => {
const onValueChanged = async ( const onValueChanged = async (
key: string, key: string,
value: string | number | boolean value: string | number | boolean | string[]
) => { ) => {
// find the key in settings state, update it and set the state back // find the key in settings state, update it and set the state back
const newSettings = settings.map((setting) => { const newSettings = settings.map((setting) => {

View File

@ -39,14 +39,6 @@ const availableHotkeys = [
combination: 'Shift Enter', combination: 'Shift Enter',
description: 'Insert a new line (in input field)', description: 'Insert a new line (in input field)',
}, },
{
combination: 'Arrow Up',
description: 'Navigate to previous option (within search dialog)',
},
{
combination: 'Arrow Down',
description: 'Navigate to next option (within search dialog)',
},
] ]
const Hotkeys = () => { const Hotkeys = () => {

View File

@ -51,7 +51,7 @@ const SettingDetailTextInputItem = ({
}, []) }, [])
const copy = useCallback(() => { const copy = useCallback(() => {
navigator.clipboard.writeText(value) navigator.clipboard.writeText(value as string)
if (value.length > 0) { if (value.length > 0) {
setCopied(true) setCopied(true)
} }

View File

@ -5,7 +5,10 @@ import SettingDetailToggleItem from './SettingDetailToggleItem'
type Props = { type Props = {
componentProps: SettingComponentProps[] componentProps: SettingComponentProps[]
onValueUpdated: (key: string, value: string | number | boolean) => void onValueUpdated: (
key: string,
value: string | number | boolean | string[]
) => void
} }
const SettingDetailItem = ({ componentProps, onValueUpdated }: Props) => { const SettingDetailItem = ({ componentProps, onValueUpdated }: Props) => {

View File

@ -24,7 +24,7 @@ const AssistantSetting: React.FC<Props> = ({ componentData }) => {
const setEngineParamsUpdate = useSetAtom(engineParamsUpdateAtom) const setEngineParamsUpdate = useSetAtom(engineParamsUpdateAtom)
const onValueChanged = useCallback( const onValueChanged = useCallback(
(key: string, value: string | number | boolean) => { (key: string, value: string | number | boolean | string[]) => {
if (!activeThread) return if (!activeThread) return
const shouldReloadModel = const shouldReloadModel =
componentData.find((x) => x.key === key)?.requireModelReload ?? false componentData.find((x) => x.key === key)?.requireModelReload ?? false

View File

@ -24,6 +24,8 @@ import useDownloadModel from '@/hooks/useDownloadModel'
import { modelDownloadStateAtom } from '@/hooks/useDownloadState' import { modelDownloadStateAtom } from '@/hooks/useDownloadState'
import { useStarterScreen } from '@/hooks/useStarterScreen'
import { formatDownloadPercentage, toGibibytes } from '@/utils/converter' import { formatDownloadPercentage, toGibibytes } from '@/utils/converter'
import { import {
getLogoEngine, getLogoEngine,
@ -38,16 +40,8 @@ import {
} from '@/helpers/atoms/Model.atom' } from '@/helpers/atoms/Model.atom'
import { selectedSettingAtom } from '@/helpers/atoms/Setting.atom' import { selectedSettingAtom } from '@/helpers/atoms/Setting.atom'
type Props = { const OnDeviceStarterScreen = () => {
extensionHasSettings: { const { extensionHasSettings } = useStarterScreen()
name?: string
setting: string
apiKey: string
provider: string
}[]
}
const OnDeviceStarterScreen = ({ extensionHasSettings }: Props) => {
const [searchValue, setSearchValue] = useState('') const [searchValue, setSearchValue] = useState('')
const [isOpen, setIsOpen] = useState(Boolean(searchValue.length)) const [isOpen, setIsOpen] = useState(Boolean(searchValue.length))
const downloadingModels = useAtomValue(getDownloadingModelAtom) const downloadingModels = useAtomValue(getDownloadingModelAtom)
@ -99,7 +93,10 @@ const OnDeviceStarterScreen = ({ extensionHasSettings }: Props) => {
return rows return rows
} }
const rows = getRows(groupByEngine, itemsPerRow) const rows = getRows(
groupByEngine.sort((a, b) => a.localeCompare(b)),
itemsPerRow
)
const refDropdown = useClickOutside(() => setIsOpen(false)) const refDropdown = useClickOutside(() => setIsOpen(false))

View File

@ -1,10 +1,10 @@
import { MessageStatus } from '@janhq/core' import { memo, useEffect, useMemo, useRef, useState } from 'react'
import { ThreadMessage } from '@janhq/core'
import { useVirtualizer } from '@tanstack/react-virtual'
import { useAtomValue } from 'jotai' import { useAtomValue } from 'jotai'
import ErrorMessage from '@/containers/ErrorMessage'
import ListContainer from '@/containers/ListContainer'
import { loadModelErrorAtom } from '@/hooks/useActiveModel' import { loadModelErrorAtom } from '@/hooks/useActiveModel'
import ChatItem from '../ChatItem' import ChatItem from '../ChatItem'
@ -15,33 +15,133 @@ import EmptyThread from './EmptyThread'
import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom' import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'
const ChatBody = () => { const ChatConfigurator = memo(() => {
const messages = useAtomValue(getCurrentChatMessagesAtom) const messages = useAtomValue(getCurrentChatMessagesAtom)
const [current, setCurrent] = useState<ThreadMessage[]>([])
const loadModelError = useAtomValue(loadModelErrorAtom) const loadModelError = useAtomValue(loadModelErrorAtom)
const isMessagesIdentificial = (
arr1: ThreadMessage[],
arr2: ThreadMessage[]
): boolean => {
if (arr1.length !== arr2.length) return false
return arr1.every((item, index) => item.id === arr2[index].id)
}
useEffect(() => {
if (
messages?.length !== current?.length ||
!isMessagesIdentificial(messages, current)
) {
setCurrent(messages)
}
}, [messages, current, loadModelError])
if (!messages.length) return <EmptyThread /> if (!messages.length) return <EmptyThread />
return ( return (
<ListContainer> <div className="flex h-full w-full flex-col">
{messages.map((message, index) => ( <ChatBody loadModelError={loadModelError} messages={current} />
<div key={message.id}> </div>
{message.status !== MessageStatus.Error &&
message.content.length > 0 && (
<ChatItem {...message} key={message.id} />
)}
{!loadModelError &&
index === messages.length - 1 &&
message.status !== MessageStatus.Pending &&
message.status !== MessageStatus.Ready && (
<ErrorMessage message={message} />
)}
</div>
))}
{loadModelError && <LoadModelError />}
</ListContainer>
) )
} })
export default ChatBody const ChatBody = memo(
({
messages,
loadModelError,
}: {
messages: ThreadMessage[]
loadModelError?: string
}) => {
// The scrollable element for your list
const parentRef = useRef(null)
const count = useMemo(
() => (messages?.length ?? 0) + (loadModelError ? 1 : 0),
[messages, loadModelError]
)
// The virtualizer
const virtualizer = useVirtualizer({
count,
getScrollElement: () => parentRef.current,
estimateSize: () => 35,
overscan: 5,
})
useEffect(() => {
if (count > 0 && messages && virtualizer) {
virtualizer.scrollToIndex(count - 1)
}
}, [count, virtualizer, messages, loadModelError])
const items = virtualizer.getVirtualItems()
virtualizer.shouldAdjustScrollPositionOnItemSizeChange = (
item,
_,
instance
) => {
return (
// item.start < (instance.scrollOffset ?? 0) &&
instance.scrollDirection !== 'backward'
)
}
return (
<div className="scroll-area flex h-full w-full flex-col overflow-x-hidden">
<div
ref={parentRef}
className="List"
style={{
flex: 1,
height: '100%',
width: '100%',
overflowY: 'auto',
overflowX: 'hidden',
contain: 'strict',
}}
>
<div
style={{
height: virtualizer.getTotalSize(),
width: '100%',
position: 'relative',
}}
>
<div
style={{
position: 'absolute',
top: 0,
left: 0,
width: '100%',
transform: `translateY(${items[0]?.start ?? 0}px)`,
}}
>
{items.map((virtualRow) => (
<div
key={virtualRow.key}
data-index={virtualRow.index}
ref={virtualizer.measureElement}
>
{loadModelError && virtualRow.index === count - 1 ? (
<LoadModelError />
) : (
<ChatItem
{...messages[virtualRow.index]}
loadModelError={loadModelError}
isCurrentMessage={
virtualRow.index === messages?.length - 1
}
/>
)}
</div>
))}
</div>
</div>
</div>
</div>
)
}
)
export default memo(ChatConfigurator)

View File

@ -1,15 +1,67 @@
import React, { forwardRef } from 'react' import React, { forwardRef, useEffect, useState } from 'react'
import { ThreadMessage } from '@janhq/core' import {
events,
MessageEvent,
MessageStatus,
ThreadContent,
ThreadMessage,
} from '@janhq/core'
import SimpleTextMessage from '../SimpleTextMessage' import ErrorMessage from '@/containers/ErrorMessage'
import MessageContainer from '../TextMessage'
type Ref = HTMLDivElement type Ref = HTMLDivElement
const ChatItem = forwardRef<Ref, ThreadMessage>((message, ref) => ( type Props = {
<div ref={ref} className="relative"> loadModelError?: string
<SimpleTextMessage {...message} /> isCurrentMessage?: boolean
</div> } & ThreadMessage
))
const ChatItem = forwardRef<Ref, Props>((message, ref) => {
const [content, setContent] = useState<ThreadContent[]>(message.content)
const [status, setStatus] = useState<MessageStatus>(message.status)
const [errorMessage, setErrorMessage] = useState<ThreadMessage | undefined>(
message.isCurrentMessage && message.status === MessageStatus.Error
? message
: undefined
)
function onMessageUpdate(data: ThreadMessage) {
if (data.id === message.id) {
setContent(data.content)
if (data.status !== status) setStatus(data.status)
if (data.status === MessageStatus.Error && message.isCurrentMessage)
setErrorMessage(data)
}
}
useEffect(() => {
if (!message.isCurrentMessage && errorMessage) setErrorMessage(undefined)
}, [message, errorMessage])
useEffect(() => {
if (message.status === MessageStatus.Pending)
events.on(MessageEvent.OnMessageUpdate, onMessageUpdate)
return () => {
events.off(MessageEvent.OnMessageUpdate, onMessageUpdate)
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])
return (
<>
{status !== MessageStatus.Error && content?.length > 0 && (
<div ref={ref} className="relative">
<MessageContainer {...message} content={content} status={status} />
</div>
)}
{errorMessage && !message.loadModelError && (
<ErrorMessage message={errorMessage} />
)}
</>
)
})
export default ChatItem export default ChatItem

View File

@ -90,7 +90,7 @@ const EditChatInput: React.FC<Props> = ({ message }) => {
newMessages newMessages
) )
.then(() => { .then(() => {
sendChatMessage(editPrompt) sendChatMessage(editPrompt, newMessages)
}) })
} }
} }

View File

@ -49,7 +49,7 @@ const LoadModelError = () => {
} else { } else {
return ( return (
<div className="mx-6 flex flex-col items-center space-y-2 text-center font-medium text-[hsla(var(--text-secondary))]"> <div className="mx-6 flex flex-col items-center space-y-2 text-center font-medium text-[hsla(var(--text-secondary))]">
{loadModelError && <p>{loadModelError}</p>} {loadModelError && <p className="capitalize">{loadModelError}</p>}
<p> <p>
{`Something's wrong.`}&nbsp;Access&nbsp; {`Something's wrong.`}&nbsp;Access&nbsp;
<span <span
@ -66,7 +66,7 @@ const LoadModelError = () => {
} }
return ( return (
<div className="mt-10"> <div className="flex flex-1">
<div className="flex w-full flex-col items-center text-center font-medium"> <div className="flex w-full flex-col items-center text-center font-medium">
<p className="w-[90%]"> <p className="w-[90%]">
<ErrorMessage /> <ErrorMessage />

View File

@ -1,413 +0,0 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable react-hooks/exhaustive-deps */
/* eslint-disable @typescript-eslint/naming-convention */
import React, { useEffect, useState } from 'react'
import Markdown from 'react-markdown'
import {
ChatCompletionRole,
ContentType,
MessageStatus,
ThreadMessage,
} from '@janhq/core'
import { Tooltip } from '@janhq/joi'
import { useAtomValue } from 'jotai'
import { FolderOpenIcon } from 'lucide-react'
import rehypeHighlight from 'rehype-highlight'
import rehypeHighlightCodeLines from 'rehype-highlight-code-lines'
import rehypeKatex from 'rehype-katex'
import rehypeRaw from 'rehype-raw'
import remarkMath from 'remark-math'
import 'katex/dist/katex.min.css'
import { twMerge } from 'tailwind-merge'
import LogoMark from '@/containers/Brand/Logo/Mark'
import { useClipboard } from '@/hooks/useClipboard'
import { usePath } from '@/hooks/usePath'
import { getLanguageFromExtension } from '@/utils/codeLanguageExtension'
import { toGibibytes } from '@/utils/converter'
import { displayDate } from '@/utils/datetime'
import { openFileTitle } from '@/utils/titleUtils'
import EditChatInput from '../EditChatInput'
import Icon from '../FileUploadPreview/Icon'
import MessageToolbar from '../MessageToolbar'
import { RelativeImage } from './RelativeImage'
import {
editMessageAtom,
getCurrentChatMessagesAtom,
} from '@/helpers/atoms/ChatMessage.atom'
import { activeThreadAtom } from '@/helpers/atoms/Thread.atom'
const SimpleTextMessage: React.FC<ThreadMessage> = (props) => {
let text = ''
const isUser = props.role === ChatCompletionRole.User
const isSystem = props.role === ChatCompletionRole.System
const editMessage = useAtomValue(editMessageAtom)
const activeThread = useAtomValue(activeThreadAtom)
if (props.content && props.content.length > 0) {
text = props.content[0]?.text?.value ?? ''
}
const clipboard = useClipboard({ timeout: 1000 })
function extractCodeLines(node: { children: { children: any[] }[] }) {
const codeLines: any[] = []
// Helper function to extract text recursively from children
function getTextFromNode(node: {
type: string
value: any
children: any[]
}): string {
if (node.type === 'text') {
return node.value
} else if (node.children) {
return node.children.map(getTextFromNode).join('')
}
return ''
}
// Traverse each line in the <code> block
node.children[0].children.forEach(
(lineNode: {
type: string
tagName: string
value: any
children: any[]
}) => {
if (lineNode.type === 'element' && lineNode.tagName === 'span') {
const lineContent = getTextFromNode(lineNode)
codeLines.push(lineContent)
}
}
)
// Join the lines with newline characters for proper formatting
return codeLines.join('\n')
}
function wrapCodeBlocksWithoutVisit() {
return (tree: { children: any[] }) => {
tree.children = tree.children.map((node) => {
if (node.tagName === 'pre' && node.children[0]?.tagName === 'code') {
const language = node.children[0].properties.className?.[1]?.replace(
'language-',
''
)
if (extractCodeLines(node) === '') {
return node
}
return {
type: 'element',
tagName: 'div',
properties: {
className: ['code-block-wrapper'],
},
children: [
{
type: 'element',
tagName: 'div',
properties: {
className: [
'code-block',
'group/item',
'relative',
'my-4',
'overflow-auto',
],
},
children: [
{
type: 'element',
tagName: 'div',
properties: {
className:
'code-header bg-[hsla(var(--app-code-block))] flex justify-between items-center py-2 px-3 border-b border-[hsla(var(--app-border))] rounded-t-lg',
},
children: [
{
type: 'element',
tagName: 'span',
properties: {
className: 'text-xs font-medium text-gray-300',
},
children: [
{
type: 'text',
value: language
? `${getLanguageFromExtension(language)}`
: '',
},
],
},
{
type: 'element',
tagName: 'button',
properties: {
className:
'copy-button ml-auto flex items-center gap-1 text-xs font-medium text-gray-400 hover:text-gray-600 focus:outline-none',
onClick: (event: Event) => {
clipboard.copy(extractCodeLines(node))
const button = event.currentTarget as HTMLElement
button.innerHTML = `
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-check pointer-events-none text-green-600"><path d="M20 6 9 17l-5-5"/></svg>
<span>Copied</span>
`
setTimeout(() => {
button.innerHTML = `
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-copy pointer-events-none text-gray-400"><rect width="14" height="14" x="8" y="8" rx="2" ry="2"/><path d="M4 16c-1.1 0-2-.9-2-2V4c0-1.1.9-2 2-2h10c1.1 0 2 .9 2 2"/></svg>
<span>Copy</span>
`
}, 2000)
},
},
children: [
{
type: 'element',
tagName: 'svg',
properties: {
xmlns: 'http://www.w3.org/2000/svg',
width: '16',
height: '16',
viewBox: '0 0 24 24',
fill: 'none',
stroke: 'currentColor',
strokeWidth: '2',
strokeLinecap: 'round',
strokeLinejoin: 'round',
className:
'lucide lucide-copy pointer-events-none text-gray-400',
},
children: [
{
type: 'element',
tagName: 'rect',
properties: {
width: '14',
height: '14',
x: '8',
y: '8',
rx: '2',
ry: '2',
},
children: [],
},
{
type: 'element',
tagName: 'path',
properties: {
d: 'M4 16c-1.1 0-2-.9-2-2V4c0-1.1.9-2 2-2h10c1.1 0 2 .9 2 2',
},
children: [],
},
],
},
{ type: 'text', value: 'Copy' },
],
},
],
},
node,
],
},
],
}
}
return node
})
}
}
const { onViewFile, onViewFileContainer } = usePath()
const [tokenCount, setTokenCount] = useState(0)
const [lastTimestamp, setLastTimestamp] = useState<number | undefined>()
const [tokenSpeed, setTokenSpeed] = useState(0)
const messages = useAtomValue(getCurrentChatMessagesAtom)
useEffect(() => {
if (props.status !== MessageStatus.Pending) {
return
}
const currentTimestamp = new Date().getTime() // Get current time in milliseconds
if (!lastTimestamp) {
// If this is the first update, just set the lastTimestamp and return
if (props.content[0]?.text?.value !== '')
setLastTimestamp(currentTimestamp)
return
}
const timeDiffInSeconds = (currentTimestamp - lastTimestamp) / 1000 // Time difference in seconds
const totalTokenCount = tokenCount + 1
const averageTokenSpeed = totalTokenCount / timeDiffInSeconds // Calculate average token speed
setTokenSpeed(averageTokenSpeed)
setTokenCount(totalTokenCount)
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [props.content])
return (
<div className="group relative mx-auto max-w-[700px] p-4">
<div
className={twMerge(
'mb-2 flex items-center justify-start gap-x-2',
!isUser && 'mt-2'
)}
>
{!isUser && !isSystem && <LogoMark width={28} />}
{isUser && (
<div className="flex h-8 w-8 items-center justify-center rounded-full border border-[hsla(var(--app-border))] last:border-none">
<svg
width="12"
height="16"
viewBox="0 0 12 16"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M6 0.497864C4.34315 0.497864 3 1.84101 3 3.49786C3 5.15472 4.34315 6.49786 6 6.49786C7.65685 6.49786 9 5.15472 9 3.49786C9 1.84101 7.65685 0.497864 6 0.497864ZM9.75 7.99786L2.24997 7.99787C1.00734 7.99787 0 9.00527 0 10.2479C0 11.922 0.688456 13.2633 1.81822 14.1701C2.93013 15.0625 4.42039 15.4979 6 15.4979C7.57961 15.4979 9.06987 15.0625 10.1818 14.1701C11.3115 13.2633 12 11.922 12 10.2479C12 9.00522 10.9926 7.99786 9.75 7.99786Z"
fill="#9CA3AF"
/>
</svg>
</div>
)}
<div
className={twMerge(
'font-extrabold capitalize',
isUser && 'text-gray-500'
)}
>
{isUser
? props.role
: (activeThread?.assistants[0].assistant_name ?? props.role)}
</div>
<p className="text-xs font-medium text-gray-400">
{displayDate(props.created)}
</p>
<div
className={twMerge(
'absolute right-0 cursor-pointer transition-all',
messages[messages.length - 1]?.id === props.id && !isUser
? 'absolute -bottom-8 right-4'
: 'hidden group-hover:absolute group-hover:right-4 group-hover:top-4 group-hover:flex'
)}
>
<MessageToolbar message={props} />
</div>
{messages[messages.length - 1]?.id === props.id &&
(props.status === MessageStatus.Pending || tokenSpeed > 0) && (
<p className="absolute right-8 text-xs font-medium text-[hsla(var(--text-secondary))]">
Token Speed: {Number(tokenSpeed).toFixed(2)}t/s
</p>
)}
</div>
<div
className={twMerge(
'w-full',
!isUser && !text.includes(' ') && 'break-all'
)}
>
<>
{props.content[0]?.type === ContentType.Image && (
<div className="group/image relative mb-2 inline-flex cursor-pointer overflow-hidden rounded-xl">
<div className="left-0 top-0 z-20 h-full w-full group-hover/image:inline-block">
<RelativeImage
src={props.content[0]?.text.annotations[0]}
id={props.id}
onClick={() =>
onViewFile(`${props.content[0]?.text.annotations[0]}`)
}
/>
</div>
<Tooltip
trigger={
<div
className="absolute right-2 top-2 z-20 hidden h-8 w-8 cursor-pointer items-center justify-center rounded-md bg-[hsla(var(--app-bg))] group-hover/image:flex"
onClick={onViewFileContainer}
>
<FolderOpenIcon size={20} />
</div>
}
content={<span>{openFileTitle()}</span>}
/>
</div>
)}
{props.content[0]?.type === ContentType.Pdf && (
<div className="group/file bg-secondary relative mb-2 inline-flex w-60 cursor-pointer gap-x-3 overflow-hidden rounded-lg p-4">
<div
className="absolute left-0 top-0 z-20 hidden h-full w-full bg-black/20 backdrop-blur-sm group-hover/file:inline-block"
onClick={() =>
onViewFile(`${props.id}.${props.content[0]?.type}`)
}
/>
<Tooltip
trigger={
<div
className="absolute right-2 top-2 z-20 hidden h-8 w-8 cursor-pointer items-center justify-center rounded-md bg-[hsla(var(--app-bg))] group-hover/file:flex"
onClick={onViewFileContainer}
>
<FolderOpenIcon size={20} />
</div>
}
content={<span>{openFileTitle()}</span>}
/>
<Icon type={props.content[0].type} />
<div className="w-full">
<h6 className="line-clamp-1 w-4/5 font-medium">
{props.content[0].text.name?.replaceAll(/[-._]/g, ' ')}
</h6>
<p className="text-[hsla(var(--text-secondary)]">
{toGibibytes(Number(props.content[0].text.size))}
</p>
</div>
</div>
)}
{editMessage === props.id ? (
<div>
<EditChatInput message={props} />
</div>
) : (
<div
className={twMerge(
'message max-width-[100%] flex flex-col gap-y-2 overflow-auto leading-relaxed'
)}
dir="ltr"
>
<Markdown
remarkPlugins={[remarkMath]}
rehypePlugins={[
[rehypeKatex, { throwOnError: false }],
rehypeRaw,
rehypeHighlight,
[rehypeHighlightCodeLines, { showLineNumbers: true }],
wrapCodeBlocksWithoutVisit,
]}
skipHtml={true}
>
{text}
</Markdown>
</div>
)}
</>
</div>
</div>
)
}
export default React.memo(SimpleTextMessage)

View File

@ -0,0 +1,55 @@
import { memo } from 'react'
import { Tooltip } from '@janhq/joi'
import { FolderOpenIcon } from 'lucide-react'
import { usePath } from '@/hooks/usePath'
import { toGibibytes } from '@/utils/converter'
import { openFileTitle } from '@/utils/titleUtils'
import Icon from '../FileUploadPreview/Icon'
const DocMessage = ({
id,
name,
size,
}: {
id: string
name?: string
size?: number
}) => {
const { onViewFile, onViewFileContainer } = usePath()
return (
<div className="group/file bg-secondary relative mb-2 inline-flex w-60 cursor-pointer gap-x-3 overflow-hidden rounded-lg p-4">
<div
className="absolute left-0 top-0 z-20 hidden h-full w-full bg-black/20 backdrop-blur-sm group-hover/file:inline-block"
onClick={() => onViewFile(`${id}.pdf`)}
/>
<Tooltip
trigger={
<div
className="absolute right-2 top-2 z-20 hidden h-8 w-8 cursor-pointer items-center justify-center rounded-md bg-[hsla(var(--app-bg))] group-hover/file:flex"
onClick={onViewFileContainer}
>
<FolderOpenIcon size={20} />
</div>
}
content={<span>{openFileTitle()}</span>}
/>
<Icon type="pdf" />
<div className="w-full">
<h6 className="line-clamp-1 w-4/5 font-medium">
{name?.replaceAll(/[-._]/g, ' ')}
</h6>
<p className="text-[hsla(var(--text-secondary)]">
{toGibibytes(Number(size))}
</p>
</div>
</div>
)
}
export default memo(DocMessage)

View File

@ -0,0 +1,45 @@
import { memo, useMemo } from 'react'
import { ThreadContent } from '@janhq/core'
import { Tooltip } from '@janhq/joi'
import { FolderOpenIcon } from 'lucide-react'
import { usePath } from '@/hooks/usePath'
import { openFileTitle } from '@/utils/titleUtils'
import { RelativeImage } from '../TextMessage/RelativeImage'
const ImageMessage = ({ content }: { content: ThreadContent }) => {
const { onViewFile, onViewFileContainer } = usePath()
const annotation = useMemo(
() => content?.text?.annotations[0] ?? '',
[content]
)
return (
<div className="group/image relative mb-2 inline-flex cursor-pointer overflow-hidden rounded-xl">
<div className="left-0 top-0 z-20 h-full w-full group-hover/image:inline-block">
<RelativeImage
src={annotation}
onClick={() => onViewFile(annotation)}
/>
</div>
<Tooltip
trigger={
<div
className="absolute right-2 top-2 z-20 hidden h-8 w-8 cursor-pointer items-center justify-center rounded-md bg-[hsla(var(--app-bg))] group-hover/image:flex"
onClick={onViewFileContainer}
>
<FolderOpenIcon size={20} />
</div>
}
content={<span>{openFileTitle()}</span>}
/>
</div>
)
}
export default memo(ImageMessage)

View File

@ -0,0 +1,222 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable react-hooks/exhaustive-deps */
/* eslint-disable @typescript-eslint/naming-convention */
import React, { memo } from 'react'
import Markdown from 'react-markdown'
import latex from 'highlight.js/lib/languages/latex'
import rehypeHighlight from 'rehype-highlight'
import rehypeHighlightCodeLines from 'rehype-highlight-code-lines'
import rehypeKatex from 'rehype-katex'
import rehypeRaw from 'rehype-raw'
import remarkMath from 'remark-math'
import 'katex/dist/katex.min.css'
import { useClipboard } from '@/hooks/useClipboard'
import { getLanguageFromExtension } from '@/utils/codeLanguageExtension'
export const MarkdownTextMessage = memo(
({ text }: { id: string; text: string }) => {
const clipboard = useClipboard({ timeout: 1000 })
function extractCodeLines(node: { children: { children: any[] }[] }) {
const codeLines: any[] = []
// Helper function to extract text recursively from children
function getTextFromNode(node: {
type: string
value: any
children: any[]
}): string {
if (node.type === 'text') {
return node.value
} else if (node.children) {
return node.children.map(getTextFromNode).join('')
}
return ''
}
// Traverse each line in the <code> block
node.children[0].children.forEach(
(lineNode: {
type: string
tagName: string
value: any
children: any[]
}) => {
if (lineNode.type === 'element' && lineNode.tagName === 'span') {
const lineContent = getTextFromNode(lineNode)
codeLines.push(lineContent)
}
}
)
// Join the lines with newline characters for proper formatting
return codeLines.join('\n')
}
function wrapCodeBlocksWithoutVisit() {
return (tree: { children: any[] }) => {
tree.children = tree.children.map((node) => {
if (node.tagName === 'pre' && node.children[0]?.tagName === 'code') {
const language =
node.children[0].properties.className?.[1]?.replace(
'language-',
''
)
if (extractCodeLines(node) === '') {
return node
}
return {
type: 'element',
tagName: 'div',
properties: {
className: ['code-block-wrapper'],
},
children: [
{
type: 'element',
tagName: 'div',
properties: {
className: [
'code-block',
'group/item',
'relative',
'my-4',
'overflow-auto',
],
},
children: [
{
type: 'element',
tagName: 'div',
properties: {
className:
'code-header bg-[hsla(var(--app-code-block))] flex justify-between items-center py-2 px-3 code-header--border rounded-t-lg',
},
children: [
{
type: 'element',
tagName: 'span',
properties: {
className: 'text-xs font-medium text-gray-300',
},
children: [
{
type: 'text',
value: language
? `${getLanguageFromExtension(language)}`
: '',
},
],
},
{
type: 'element',
tagName: 'button',
properties: {
className:
'copy-button ml-auto flex items-center gap-1 text-xs font-medium text-gray-400 hover:text-gray-600 focus:outline-none',
onClick: (event: Event) => {
clipboard.copy(extractCodeLines(node))
const button = event.currentTarget as HTMLElement
button.innerHTML = `
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-check pointer-events-none text-green-600"><path d="M20 6 9 17l-5-5"/></svg>
<span>Copied</span>
`
setTimeout(() => {
button.innerHTML = `
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-copy pointer-events-none text-gray-400"><rect width="14" height="14" x="8" y="8" rx="2" ry="2"/><path d="M4 16c-1.1 0-2-.9-2-2V4c0-1.1.9-2 2-2h10c1.1 0 2 .9 2 2"/></svg>
<span>Copy</span>
`
}, 2000)
},
},
children: [
{
type: 'element',
tagName: 'svg',
properties: {
xmlns: 'http://www.w3.org/2000/svg',
width: '16',
height: '16',
viewBox: '0 0 24 24',
fill: 'none',
stroke: 'currentColor',
strokeWidth: '2',
strokeLinecap: 'round',
strokeLinejoin: 'round',
className:
'lucide lucide-copy pointer-events-none text-gray-400',
},
children: [
{
type: 'element',
tagName: 'rect',
properties: {
width: '14',
height: '14',
x: '8',
y: '8',
rx: '2',
ry: '2',
},
children: [],
},
{
type: 'element',
tagName: 'path',
properties: {
d: 'M4 16c-1.1 0-2-.9-2-2V4c0-1.1.9-2 2-2h10c1.1 0 2 .9 2 2',
},
children: [],
},
],
},
{ type: 'text', value: 'Copy' },
],
},
],
},
node,
],
},
],
}
}
return node
})
}
}
return (
<>
<Markdown
remarkPlugins={[remarkMath]}
rehypePlugins={[
[rehypeKatex, { throwOnError: false }],
rehypeRaw,
[
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
rehypeHighlight,
{
languages: { latex },
subset: false,
plainText: ['txt', 'text'],
},
],
[rehypeHighlightCodeLines, { showLineNumbers: true }],
wrapCodeBlocksWithoutVisit,
]}
skipHtml={true}
>
{text}
</Markdown>
</>
)
}
)

View File

@ -3,11 +3,9 @@ import { useEffect, useState } from 'react'
import { getJanDataFolderPath } from '@janhq/core' import { getJanDataFolderPath } from '@janhq/core'
export const RelativeImage = ({ export const RelativeImage = ({
id,
src, src,
onClick, onClick,
}: { }: {
id: string
src: string src: string
onClick: () => void onClick: () => void
}) => { }) => {
@ -22,7 +20,7 @@ export const RelativeImage = ({
<button onClick={onClick}> <button onClick={onClick}>
<img <img
className="aspect-auto h-[300px] cursor-pointer" className="aspect-auto h-[300px] cursor-pointer"
alt={id} alt={src}
src={src.includes('files/') ? `file://${path}/${src}` : src} src={src.includes('files/') ? `file://${path}/${src}` : src}
/> />
</button> </button>

View File

@ -0,0 +1,141 @@
import React, { useMemo } from 'react'
import { ChatCompletionRole, ContentType, ThreadMessage } from '@janhq/core'
import { useAtomValue } from 'jotai'
import 'katex/dist/katex.min.css'
import { twMerge } from 'tailwind-merge'
import LogoMark from '@/containers/Brand/Logo/Mark'
import { displayDate } from '@/utils/datetime'
import EditChatInput from '../EditChatInput'
import MessageToolbar from '../MessageToolbar'
import DocMessage from './DocMessage'
import ImageMessage from './ImageMessage'
import { MarkdownTextMessage } from './MarkdownTextMessage'
import {
editMessageAtom,
getCurrentChatMessagesAtom,
tokenSpeedAtom,
} from '@/helpers/atoms/ChatMessage.atom'
import { activeThreadAtom } from '@/helpers/atoms/Thread.atom'
const MessageContainer: React.FC<ThreadMessage> = (props) => {
const isUser = props.role === ChatCompletionRole.User
const isSystem = props.role === ChatCompletionRole.System
const editMessage = useAtomValue(editMessageAtom)
const activeThread = useAtomValue(activeThreadAtom)
const tokenSpeed = useAtomValue(tokenSpeedAtom)
const messages = useAtomValue(getCurrentChatMessagesAtom)
const text = useMemo(
() => props.content[0]?.text?.value ?? '',
[props.content]
)
const messageType = useMemo(
() => props.content[0]?.type ?? '',
[props.content]
)
return (
<div className="group relative mx-auto max-w-[700px] p-4">
<div
className={twMerge(
'mb-2 flex items-center justify-start gap-x-2',
!isUser && 'mt-2'
)}
>
{!isUser && !isSystem && <LogoMark width={28} />}
{isUser && (
<div className="flex h-8 w-8 items-center justify-center rounded-full border border-[hsla(var(--app-border))] last:border-none">
<svg
width="12"
height="16"
viewBox="0 0 12 16"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M6 0.497864C4.34315 0.497864 3 1.84101 3 3.49786C3 5.15472 4.34315 6.49786 6 6.49786C7.65685 6.49786 9 5.15472 9 3.49786C9 1.84101 7.65685 0.497864 6 0.497864ZM9.75 7.99786L2.24997 7.99787C1.00734 7.99787 0 9.00527 0 10.2479C0 11.922 0.688456 13.2633 1.81822 14.1701C2.93013 15.0625 4.42039 15.4979 6 15.4979C7.57961 15.4979 9.06987 15.0625 10.1818 14.1701C11.3115 13.2633 12 11.922 12 10.2479C12 9.00522 10.9926 7.99786 9.75 7.99786Z"
fill="#9CA3AF"
/>
</svg>
</div>
)}
<div
className={twMerge(
'font-extrabold capitalize',
isUser && 'text-gray-500'
)}
>
{isUser
? props.role
: (activeThread?.assistants[0].assistant_name ?? props.role)}
</div>
<p className="text-xs font-medium text-gray-400">
{displayDate(props.created)}
</p>
<div
className={twMerge(
'absolute right-0 cursor-pointer transition-all',
messages[messages.length - 1]?.id === props.id && !isUser
? 'absolute -bottom-8 right-4'
: 'hidden group-hover:absolute group-hover:right-4 group-hover:top-4 group-hover:flex'
)}
>
<MessageToolbar message={props} />
</div>
{tokenSpeed &&
tokenSpeed.message === props.id &&
tokenSpeed.tokenSpeed > 0 && (
<p className="absolute right-8 text-xs font-medium text-[hsla(var(--text-secondary))]">
Token Speed: {Number(tokenSpeed.tokenSpeed).toFixed(2)}t/s
</p>
)}
</div>
<div
className={twMerge(
'w-full',
!isUser && !text.includes(' ') && 'break-all'
)}
>
<>
{messageType === ContentType.Image && (
<ImageMessage content={props.content[0]} />
)}
{messageType === ContentType.Pdf && (
<DocMessage
id={props.id}
name={props.content[0]?.text?.name}
size={props.content[0]?.text?.size}
/>
)}
{editMessage === props.id ? (
<div>
<EditChatInput message={props} />
</div>
) : (
<div
className={twMerge(
'message max-width-[100%] flex flex-col gap-y-2 overflow-x-auto overflow-y-hidden leading-relaxed'
)}
dir="ltr"
>
<MarkdownTextMessage id={props.id} text={text} />
</div>
)}
</>
</div>
</div>
)
}
export default React.memo(MessageContainer)

View File

@ -1,6 +1,6 @@
/* eslint-disable @typescript-eslint/naming-convention */ /* eslint-disable @typescript-eslint/naming-convention */
import { useEffect, useState } from 'react' import { memo, useEffect, useState } from 'react'
import { Accept, useDropzone } from 'react-dropzone' import { Accept, useDropzone } from 'react-dropzone'
@ -232,4 +232,4 @@ const ThreadCenterPanel = () => {
) )
} }
export default ThreadCenterPanel export default memo(ThreadCenterPanel)

Some files were not shown because too many files have changed in this diff Show More