Merge pull request #3493 from janhq/main

chore: Sync main to dev 0.5.3
This commit is contained in:
Van Pham 2024-08-29 18:52:49 +07:00 committed by GitHub
commit 0f84a57545
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 140 additions and 52 deletions

View File

@ -341,6 +341,11 @@ export const chatCompletions = async (request: any, reply: any) => {
request.body.stop = request.body.stop.slice(0, 4)
}
// add engine for new cortex cpp engine
if (requestedModel.engine === 'nitro') {
request.body.engine = 'cortex.llamacpp'
}
const fetch = require('node-fetch')
const response = await fetch(apiUrl, {
method: 'POST',

View File

@ -1,7 +1,7 @@
{
"sources": [
{
"filename": "gemma-1.1-2b-it-q4_k_m.gguf",
"filename": "gemma-1.1-2b-it-Q4_K_M.gguf",
"url": "https://huggingface.co/bartowski/gemma-1.1-2b-it-GGUF/resolve/main/gemma-1.1-2b-it-Q4_K_M.gguf"
}
],

View File

@ -1,7 +1,7 @@
{
"sources": [
{
"filename": "gemma-1.1-7b-it-q4_K_M.gguf",
"filename": "gemma-1.1-7b-it-Q4_K_M.gguf",
"url": "https://huggingface.co/bartowski/gemma-1.1-7b-it-GGUF/resolve/main/gemma-1.1-7b-it-Q4_K_M.gguf"
}
],
@ -14,7 +14,7 @@
"settings": {
"ctx_len": 8192,
"prompt_template": "<start_of_turn>user\n{prompt}<end_of_turn>\n<start_of_turn>model",
"llama_model_path": "gemma-1.1-7b-it-q4_K_M.gguf",
"llama_model_path": "gemma-1.1-7b-it-Q4_K_M.gguf",
"ngl": 29
},
"parameters": {

View File

@ -11,6 +11,9 @@ module.exports = {
'_next',
'*.md',
'out',
'**/*.test.tsx',
'**/*.test.ts',
'testRunner.js',
],
extends: [
'next/core-web-vitals',

View File

@ -19,6 +19,8 @@ import { usePath } from '@/hooks/usePath'
import { toGibibytes } from '@/utils/converter'
import { utilizedMemory } from '@/utils/memory'
import TableActiveModel from './TableActiveModel'
import { showSystemMonitorPanelAtom } from '@/helpers/atoms/App.atom'
@ -159,35 +161,41 @@ const SystemMonitor = () => {
{gpus.length > 0 && (
<div className="mb-4 border-b border-[hsla(var(--app-border))] pb-4 last:border-none">
{gpus.map((gpu, index) => (
<div key={index} className="mt-4 flex flex-col gap-x-2">
<div className="flex w-full items-start justify-between">
<span className="line-clamp-1 w-1/2 font-bold">
{gpu.name}
</span>
<div className="flex gap-x-2">
<div className="">
<span>
{gpu.memoryTotal - gpu.memoryFree}/
{gpu.memoryTotal}
</span>
<span> MB</span>
{gpus.map((gpu, index) => {
const gpuUtilization = utilizedMemory(
gpu.memoryFree,
gpu.memoryTotal
)
return (
<div key={index} className="mt-4 flex flex-col gap-x-2">
<div className="flex w-full items-start justify-between">
<span className="line-clamp-1 w-1/2 font-bold">
{gpu.name}
</span>
<div className="flex gap-x-2">
<div className="">
<span>
{gpu.memoryTotal - gpu.memoryFree}/
{gpu.memoryTotal}
</span>
<span> MB</span>
</div>
</div>
</div>
</div>
<div className="flex items-center gap-x-4">
<Progress
value={gpu.utilization}
className="w-full"
size="small"
/>
<span className="flex-shrink-0 ">
{gpu.utilization}%
</span>
<div className="flex items-center gap-x-4">
<Progress
value={gpuUtilization}
className="w-full"
size="small"
/>
<span className="flex-shrink-0 ">
{gpuUtilization}%
</span>
</div>
</div>
</div>
))}
)
})}
</div>
)}
</div>

View File

@ -299,7 +299,10 @@ const ModelDropdown = ({
<Badge
theme="secondary"
variant={open ? 'solid' : 'outline'}
className="cursor-pointer"
className={twMerge(
'cursor-pointer',
open && 'border border-transparent'
)}
onClick={() => setOpen(!open)}
>
<span className="line-clamp-1 ">{selectedModel?.name}</span>

View File

@ -6,9 +6,7 @@ import {
Model,
ModelExtension,
OptionType,
baseName,
fs,
joinPath,
} from '@janhq/core'
import { atom, useSetAtom } from 'jotai'

5
web/jest.config.js Normal file
View File

@ -0,0 +1,5 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
runner: './testRunner.js',
}

View File

@ -10,7 +10,8 @@
"lint": "eslint .",
"lint:fix": "eslint . --fix",
"format": "prettier --write \"**/*.{js,jsx,ts,tsx}\"",
"compile": "tsc --noEmit -p . --pretty"
"compile": "tsc --noEmit -p . --pretty",
"test": "jest"
},
"dependencies": {
"@heroicons/react": "^2.0.18",
@ -19,9 +20,9 @@
"@janhq/joi": "link:./joi",
"autoprefixer": "10.4.16",
"class-variance-authority": "^0.7.0",
"csstype": "^3.0.10",
"framer-motion": "^10.16.4",
"highlight.js": "^11.9.0",
"postcss-url": "10.1.3",
"jotai": "^2.6.0",
"katex": "^0.16.10",
"lodash": "^4.17.21",
@ -32,6 +33,7 @@
"next": "14.2.3",
"next-themes": "^0.2.1",
"postcss": "8.4.31",
"postcss-url": "10.1.3",
"posthog-js": "^1.95.1",
"react": "18.2.0",
"react-circular-progressbar": "^2.1.0",
@ -39,7 +41,6 @@
"react-dropzone": "^14.2.3",
"react-hook-form": "^7.47.0",
"react-hot-toast": "^2.4.1",
"csstype": "^3.0.10",
"react-icons": "^4.12.0",
"react-scroll-to-bottom": "^4.2.0",
"react-toastify": "^9.1.3",
@ -47,12 +48,13 @@
"tailwind-merge": "^2.0.0",
"tailwindcss": "3.3.5",
"ulidx": "^2.3.0",
"uuid": "^9.0.1",
"use-debounce": "^10.0.0",
"uuid": "^9.0.1",
"zod": "^3.22.4"
},
"devDependencies": {
"@next/eslint-plugin-next": "^14.0.1",
"@types/jest": "^29.5.12",
"@types/lodash": "^4.14.200",
"@types/node": "20.8.10",
"@types/react": "18.2.34",
@ -72,9 +74,11 @@
"eslint-plugin-prettier": "^5.0.1",
"eslint-plugin-react": "^7.34.0",
"eslint-plugin-react-hooks": "^4.6.0",
"jest-runner": "^29.7.0",
"prettier": "^3.0.3",
"prettier-plugin-tailwindcss": "^0.5.6",
"rimraf": "^5.0.5",
"ts-jest": "^29.2.5",
"typescript": "^5.3.3"
}
}

View File

@ -29,7 +29,7 @@ const ModelDownloadList = () => {
return (
<div className="flex h-[500px] flex-1 flex-col">
<h1 className="mb-3 font-semibold">Available Versions</h1>
<ScrollArea className="w-full lg:flex-1">
<ScrollArea className="w-full lg:h-full lg:flex-1">
{ggufModels.map((model, index) => {
if (!model.downloadUrl) return null
return (

View File

@ -10,6 +10,8 @@ import { Badge, Button, Progress } from '@janhq/joi'
import { useAtomValue, useSetAtom } from 'jotai'
import { twMerge } from 'tailwind-merge'
import { MainViewState } from '@/constants/screens'
import { useCreateNewThread } from '@/hooks/useCreateNewThread'
@ -114,16 +116,24 @@ const ModelDownloadRow: React.FC<Props> = ({
}
return (
<div className="flex flex-col gap-4 space-x-1 rounded border border-[hsla(var(--app-border))] p-3 md:flex-row md:items-center md:justify-between lg:w-[550px]">
<div className="flex">
{quantization && (
<Badge variant="soft" className="mr-1">
{quantization}
</Badge>
)}
<h1 className="mr-5 line-clamp-1 font-medium text-[hsla(var(--text-secondary))]">
{fileName}
</h1>
<div className="flex flex-col gap-4 rounded border border-[hsla(var(--app-border))] p-3 md:flex-row md:items-center md:justify-between xl:w-full">
<div className="flex justify-between">
<div className="flex">
{quantization && (
<Badge variant="soft" className="mr-1">
{quantization}
</Badge>
)}
<h1
className={twMerge(
'mr-5 line-clamp-1 font-medium text-[hsla(var(--text-secondary))]',
quantization && 'max-w-[25ch]'
)}
title={fileName}
>
{fileName}
</h1>
</div>
<Badge theme="secondary" className="hidden md:flex">
{toGibibytes(fileSize)}
</Badge>

View File

@ -33,7 +33,7 @@ const ModelSegmentInfo = () => {
if (!importingHuggingFaceRepoData) return null
return (
<div className="flex w-full flex-col space-y-4">
<div className="flex w-full flex-col space-y-4 lg:w-1/3">
<HeaderInfo title={'Model ID'}>
<h1 className="font-medium text-zinc-500 dark:text-gray-300">
{modelName}

View File

@ -388,7 +388,11 @@ const ChatInput = () => {
<ModelDropdown chatInputMode />
<Badge
theme="secondary"
className="flex cursor-pointer items-center gap-x-1"
className={twMerge(
'flex cursor-pointer items-center gap-x-1',
activeTabThreadRightPanel === 'model' &&
'border border-transparent'
)}
variant={
activeTabThreadRightPanel === 'model' ? 'solid' : 'outline'
}

View File

@ -164,9 +164,9 @@ const ThreadLeftPanel = () => {
>
<PencilIcon
size={16}
className="text-[hsla(var(--secondary))]"
className="text-[hsla(var(--text-secondary))]"
/>
<span className="text-bold text-[hsla(var(--secondary))]">
<span className="text-bold text-[hsla(var(--app-text-primary))]">
Edit title
</span>
</div>

19
web/testRunner.js Normal file
View File

@ -0,0 +1,19 @@
const jestRunner = require('jest-runner')
class EmptyTestFileRunner extends jestRunner.default {
async runTests(tests, watcher, onStart, onResult, onFailure, options) {
const nonEmptyTests = tests.filter(
(test) => test.context.hasteFS.getSize(test.path) > 0
)
return super.runTests(
nonEmptyTests,
watcher,
onStart,
onResult,
onFailure,
options
)
}
}
module.exports = EmptyTestFileRunner

View File

@ -2,7 +2,11 @@
"compilerOptions": {
"target": "ES2015",
"lib": ["dom", "dom.iterable", "esnext"],
"typeRoots": ["node_modules/@types", "./src/types"],
"typeRoots": [
"./node_modules/@types",
"./src/types",
"../node_modules/@types/jest"
],
"allowJs": true,
"skipLibCheck": true,
"strict": true,
@ -25,5 +29,5 @@
}
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
"exclude": ["node_modules"]
"exclude": ["node_modules", "**/*.test.ts"]
}

13
web/utils/memory.test.ts Normal file
View File

@ -0,0 +1,13 @@
// @auto-generated
import { utilizedMemory } from './memory'
test('test_utilizedMemory_arbitraryValues', () => {
const result = utilizedMemory(30, 100)
expect(result).toBe(70)
})
test('test_utilizedMemory_freeEqualsTotal', () => {
const result = utilizedMemory(100, 100)
expect(result).toBe(0)
})

9
web/utils/memory.ts Normal file
View File

@ -0,0 +1,9 @@
/**
* Calculate the percentage of memory used
* @param free
* @param total
* @returns
*/
export const utilizedMemory = (free: number, total: number) => {
return Math.round(((total - free) / Math.max(total, 1)) * 100)
}

View File

@ -4,6 +4,7 @@ export const getLogoEngine = (engine: InferenceEngine) => {
switch (engine) {
case InferenceEngine.anthropic:
return 'images/ModelProvider/anthropic.svg'
case InferenceEngine.nitro_tensorrt_llm:
case InferenceEngine.nitro:
return 'images/ModelProvider/nitro.svg'
case InferenceEngine.cortex_llamacpp:
@ -43,6 +44,8 @@ export const getTitleByEngine = (engine: InferenceEngine) => {
switch (engine) {
case InferenceEngine.nitro:
return 'Llama.cpp (Nitro)'
case InferenceEngine.nitro_tensorrt_llm:
return 'TensorRT-LLM (Nitro)'
case InferenceEngine.cortex_llamacpp:
return 'Llama.cpp (Cortex)'
case InferenceEngine.cortex_onnx: