Merge branch 'dev' into fix2345

This commit is contained in:
Daniel 2024-03-14 21:34:09 +07:00 committed by GitHub
commit 768637712f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 30 additions and 21 deletions

View File

@ -76,31 +76,31 @@ Jan is an open-source ChatGPT alternative that runs 100% offline on your compute
<tr style="text-align:center"> <tr style="text-align:center">
<td style="text-align:center"><b>Experimental (Nightly Build)</b></td> <td style="text-align:center"><b>Experimental (Nightly Build)</b></td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-win-x64-0.4.8-321.exe'> <a href='https://delta.jan.ai/latest/jan-win-x64-0.4.8-322.exe'>
<img src='./docs/static/img/windows.png' style="height:14px; width: 14px" /> <img src='./docs/static/img/windows.png' style="height:14px; width: 14px" />
<b>jan.exe</b> <b>jan.exe</b>
</a> </a>
</td> </td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-mac-x64-0.4.8-321.dmg'> <a href='https://delta.jan.ai/latest/jan-mac-x64-0.4.8-322.dmg'>
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" /> <img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
<b>Intel</b> <b>Intel</b>
</a> </a>
</td> </td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-mac-arm64-0.4.8-321.dmg'> <a href='https://delta.jan.ai/latest/jan-mac-arm64-0.4.8-322.dmg'>
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" /> <img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
<b>M1/M2</b> <b>M1/M2</b>
</a> </a>
</td> </td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-linux-amd64-0.4.8-321.deb'> <a href='https://delta.jan.ai/latest/jan-linux-amd64-0.4.8-322.deb'>
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" /> <img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.deb</b> <b>jan.deb</b>
</a> </a>
</td> </td>
<td style="text-align:center"> <td style="text-align:center">
<a href='https://delta.jan.ai/latest/jan-linux-x86_64-0.4.8-321.AppImage'> <a href='https://delta.jan.ai/latest/jan-linux-x86_64-0.4.8-322.AppImage'>
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" /> <img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
<b>jan.AppImage</b> <b>jan.AppImage</b>
</a> </a>

View File

@ -2,17 +2,17 @@ import { GpuSetting, GpuSettingInfo, ResourceInfo } from '@janhq/core'
import { getJanDataFolderPath, log } from '@janhq/core/node' import { getJanDataFolderPath, log } from '@janhq/core/node'
import { mem, cpu } from 'node-os-utils' import { mem, cpu } from 'node-os-utils'
import { exec } from 'child_process' import { exec } from 'child_process'
import { writeFileSync, existsSync, readFileSync } from 'fs' import { writeFileSync, existsSync, readFileSync, mkdirSync } from 'fs'
import path from 'path' import path from 'path'
/**
* Path to the settings directory
**/
export const SETTINGS_DIR = path.join(getJanDataFolderPath(), 'settings')
/** /**
* Path to the settings file * Path to the settings file
**/ **/
export const GPU_INFO_FILE = path.join( export const GPU_INFO_FILE = path.join(SETTINGS_DIR, 'settings.json')
getJanDataFolderPath(),
'settings',
'settings.json'
)
/** /**
* Default GPU settings * Default GPU settings
@ -136,6 +136,11 @@ export const updateNvidiaInfo = async () => {
try { try {
JSON.parse(readFileSync(GPU_INFO_FILE, 'utf-8')) JSON.parse(readFileSync(GPU_INFO_FILE, 'utf-8'))
} catch (error) { } catch (error) {
if (!existsSync(SETTINGS_DIR)) {
mkdirSync(SETTINGS_DIR, {
recursive: true,
})
}
writeFileSync(GPU_INFO_FILE, JSON.stringify(DEFAULT_SETTINGS, null, 2)) writeFileSync(GPU_INFO_FILE, JSON.stringify(DEFAULT_SETTINGS, null, 2))
} }

View File

@ -33,10 +33,10 @@
"description": "LlamaCorn is a refined version of TinyLlama-1.1B, optimized for conversational quality, running on consumer devices through TensorRT-LLM", "description": "LlamaCorn is a refined version of TinyLlama-1.1B, optimized for conversational quality, running on consumer devices through TensorRT-LLM",
"format": "TensorRT-LLM", "format": "TensorRT-LLM",
"settings": { "settings": {
"ctx_len": 2048 "ctx_len": 2048,
"text_model": false
}, },
"parameters": { "parameters": {
"stream": true,
"max_tokens": 4096 "max_tokens": 4096
}, },
"metadata": { "metadata": {

View File

@ -19,6 +19,7 @@ import {
systemInformations, systemInformations,
LocalOAIEngine, LocalOAIEngine,
fs, fs,
MessageRequest,
} from '@janhq/core' } from '@janhq/core'
import models from '../models.json' import models from '../models.json'
@ -144,4 +145,10 @@ export default class TensorRTLLMExtension extends LocalOAIEngine {
) )
return Promise.resolve() return Promise.resolve()
} }
inference(data: MessageRequest): void {
// TensorRT LLM Extension supports streaming only
if (data.model) data.model.parameters.stream = true
super.inference(data)
}
} }

View File

@ -244,16 +244,13 @@ const ChatInput: React.FC = () => {
<li <li
className={twMerge( className={twMerge(
'flex w-full cursor-pointer items-center space-x-2 px-4 py-2 text-muted-foreground hover:bg-secondary', 'flex w-full cursor-pointer items-center space-x-2 px-4 py-2 text-muted-foreground hover:bg-secondary',
activeThread?.assistants[0].model.settings.vision_model && activeThread?.assistants[0].model.settings.text_model ===
activeThread?.assistants[0].model.settings false
.text_model === false
? 'cursor-not-allowed opacity-50' ? 'cursor-not-allowed opacity-50'
: 'cursor-pointer' : 'cursor-pointer'
)} )}
onClick={() => { onClick={() => {
if ( if (
!activeThread?.assistants[0].model.settings
.vision_model ||
activeThread?.assistants[0].model.settings activeThread?.assistants[0].model.settings
.text_model !== false .text_model !== false
) { ) {

View File

@ -123,6 +123,7 @@ const TensorRtExtensionItem: React.FC<Props> = ({ item }) => {
{item.description} {item.description}
</p> </p>
</div> </div>
{(!compatibility || compatibility['platform']?.includes(PLATFORM)) && {(!compatibility || compatibility['platform']?.includes(PLATFORM)) &&
isGpuSupported ? ( isGpuSupported ? (
<div className="flex min-w-[150px] flex-row justify-end"> <div className="flex min-w-[150px] flex-row justify-end">
@ -185,15 +186,14 @@ const InstallStateIndicator: React.FC<InstallStateProps> = ({
onInstallClick, onInstallClick,
onCancelClick, onCancelClick,
}) => { }) => {
// TODO: NamH support dark mode for this
if (installProgress !== -1) { if (installProgress !== -1) {
const progress = installProgress * 100 const progress = installProgress * 100
return ( return (
<div className="flex h-10 flex-row items-center justify-center space-x-2 rounded-md bg-[#EFF8FF] px-4 text-primary"> <div className="flex h-10 flex-row items-center justify-center space-x-2 rounded-lg bg-[#EFF8FF] px-4 text-primary dark:bg-secondary">
<button onClick={onCancelClick} className="font-semibold text-primary"> <button onClick={onCancelClick} className="font-semibold text-primary">
Cancel Cancel
</button> </button>
<div className="flex w-[113px] flex-row items-center justify-center space-x-2 rounded-md bg-[#D1E9FF] px-2 py-[2px]"> <div className="flex w-[113px] flex-row items-center justify-center space-x-2 rounded-md bg-[#D1E9FF] px-2 py-[2px] dark:bg-black/50">
<Progress className="h-1 w-[69px]" value={progress} /> <Progress className="h-1 w-[69px]" value={progress} />
<span className="text-xs font-bold text-primary"> <span className="text-xs font-bold text-primary">
{progress.toFixed(0)}% {progress.toFixed(0)}%