Merge branch 'dev' into fix2345
This commit is contained in:
commit
768637712f
10
README.md
10
README.md
@ -76,31 +76,31 @@ Jan is an open-source ChatGPT alternative that runs 100% offline on your compute
|
||||
<tr style="text-align:center">
|
||||
<td style="text-align:center"><b>Experimental (Nightly Build)</b></td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://delta.jan.ai/latest/jan-win-x64-0.4.8-321.exe'>
|
||||
<a href='https://delta.jan.ai/latest/jan-win-x64-0.4.8-322.exe'>
|
||||
<img src='./docs/static/img/windows.png' style="height:14px; width: 14px" />
|
||||
<b>jan.exe</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://delta.jan.ai/latest/jan-mac-x64-0.4.8-321.dmg'>
|
||||
<a href='https://delta.jan.ai/latest/jan-mac-x64-0.4.8-322.dmg'>
|
||||
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
|
||||
<b>Intel</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://delta.jan.ai/latest/jan-mac-arm64-0.4.8-321.dmg'>
|
||||
<a href='https://delta.jan.ai/latest/jan-mac-arm64-0.4.8-322.dmg'>
|
||||
<img src='./docs/static/img/mac.png' style="height:15px; width: 15px" />
|
||||
<b>M1/M2</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://delta.jan.ai/latest/jan-linux-amd64-0.4.8-321.deb'>
|
||||
<a href='https://delta.jan.ai/latest/jan-linux-amd64-0.4.8-322.deb'>
|
||||
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<b>jan.deb</b>
|
||||
</a>
|
||||
</td>
|
||||
<td style="text-align:center">
|
||||
<a href='https://delta.jan.ai/latest/jan-linux-x86_64-0.4.8-321.AppImage'>
|
||||
<a href='https://delta.jan.ai/latest/jan-linux-x86_64-0.4.8-322.AppImage'>
|
||||
<img src='./docs/static/img/linux.png' style="height:14px; width: 14px" />
|
||||
<b>jan.AppImage</b>
|
||||
</a>
|
||||
|
||||
@ -2,17 +2,17 @@ import { GpuSetting, GpuSettingInfo, ResourceInfo } from '@janhq/core'
|
||||
import { getJanDataFolderPath, log } from '@janhq/core/node'
|
||||
import { mem, cpu } from 'node-os-utils'
|
||||
import { exec } from 'child_process'
|
||||
import { writeFileSync, existsSync, readFileSync } from 'fs'
|
||||
import { writeFileSync, existsSync, readFileSync, mkdirSync } from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
/**
|
||||
* Path to the settings directory
|
||||
**/
|
||||
export const SETTINGS_DIR = path.join(getJanDataFolderPath(), 'settings')
|
||||
/**
|
||||
* Path to the settings file
|
||||
**/
|
||||
export const GPU_INFO_FILE = path.join(
|
||||
getJanDataFolderPath(),
|
||||
'settings',
|
||||
'settings.json'
|
||||
)
|
||||
export const GPU_INFO_FILE = path.join(SETTINGS_DIR, 'settings.json')
|
||||
|
||||
/**
|
||||
* Default GPU settings
|
||||
@ -136,6 +136,11 @@ export const updateNvidiaInfo = async () => {
|
||||
try {
|
||||
JSON.parse(readFileSync(GPU_INFO_FILE, 'utf-8'))
|
||||
} catch (error) {
|
||||
if (!existsSync(SETTINGS_DIR)) {
|
||||
mkdirSync(SETTINGS_DIR, {
|
||||
recursive: true,
|
||||
})
|
||||
}
|
||||
writeFileSync(GPU_INFO_FILE, JSON.stringify(DEFAULT_SETTINGS, null, 2))
|
||||
}
|
||||
|
||||
|
||||
@ -33,10 +33,10 @@
|
||||
"description": "LlamaCorn is a refined version of TinyLlama-1.1B, optimized for conversational quality, running on consumer devices through TensorRT-LLM",
|
||||
"format": "TensorRT-LLM",
|
||||
"settings": {
|
||||
"ctx_len": 2048
|
||||
"ctx_len": 2048,
|
||||
"text_model": false
|
||||
},
|
||||
"parameters": {
|
||||
"stream": true,
|
||||
"max_tokens": 4096
|
||||
},
|
||||
"metadata": {
|
||||
|
||||
@ -19,6 +19,7 @@ import {
|
||||
systemInformations,
|
||||
LocalOAIEngine,
|
||||
fs,
|
||||
MessageRequest,
|
||||
} from '@janhq/core'
|
||||
import models from '../models.json'
|
||||
|
||||
@ -144,4 +145,10 @@ export default class TensorRTLLMExtension extends LocalOAIEngine {
|
||||
)
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
inference(data: MessageRequest): void {
|
||||
// TensorRT LLM Extension supports streaming only
|
||||
if (data.model) data.model.parameters.stream = true
|
||||
super.inference(data)
|
||||
}
|
||||
}
|
||||
|
||||
@ -244,16 +244,13 @@ const ChatInput: React.FC = () => {
|
||||
<li
|
||||
className={twMerge(
|
||||
'flex w-full cursor-pointer items-center space-x-2 px-4 py-2 text-muted-foreground hover:bg-secondary',
|
||||
activeThread?.assistants[0].model.settings.vision_model &&
|
||||
activeThread?.assistants[0].model.settings
|
||||
.text_model === false
|
||||
activeThread?.assistants[0].model.settings.text_model ===
|
||||
false
|
||||
? 'cursor-not-allowed opacity-50'
|
||||
: 'cursor-pointer'
|
||||
)}
|
||||
onClick={() => {
|
||||
if (
|
||||
!activeThread?.assistants[0].model.settings
|
||||
.vision_model ||
|
||||
activeThread?.assistants[0].model.settings
|
||||
.text_model !== false
|
||||
) {
|
||||
|
||||
@ -123,6 +123,7 @@ const TensorRtExtensionItem: React.FC<Props> = ({ item }) => {
|
||||
{item.description}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{(!compatibility || compatibility['platform']?.includes(PLATFORM)) &&
|
||||
isGpuSupported ? (
|
||||
<div className="flex min-w-[150px] flex-row justify-end">
|
||||
@ -185,15 +186,14 @@ const InstallStateIndicator: React.FC<InstallStateProps> = ({
|
||||
onInstallClick,
|
||||
onCancelClick,
|
||||
}) => {
|
||||
// TODO: NamH support dark mode for this
|
||||
if (installProgress !== -1) {
|
||||
const progress = installProgress * 100
|
||||
return (
|
||||
<div className="flex h-10 flex-row items-center justify-center space-x-2 rounded-md bg-[#EFF8FF] px-4 text-primary">
|
||||
<div className="flex h-10 flex-row items-center justify-center space-x-2 rounded-lg bg-[#EFF8FF] px-4 text-primary dark:bg-secondary">
|
||||
<button onClick={onCancelClick} className="font-semibold text-primary">
|
||||
Cancel
|
||||
</button>
|
||||
<div className="flex w-[113px] flex-row items-center justify-center space-x-2 rounded-md bg-[#D1E9FF] px-2 py-[2px]">
|
||||
<div className="flex w-[113px] flex-row items-center justify-center space-x-2 rounded-md bg-[#D1E9FF] px-2 py-[2px] dark:bg-black/50">
|
||||
<Progress className="h-1 w-[69px]" value={progress} />
|
||||
<span className="text-xs font-bold text-primary">
|
||||
{progress.toFixed(0)}%
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user