Bring QA (0.6.9) changes to dev (#6296)

* fix: check for env value before setting (#6266)

* fix: check for env value before setting

* Use empty instead of none

* fix: update linux build script to be consistent with CI (#6269)

The local build script for Linux was failing due to a bundling error. This commit updates the `build:tauri:linux` script in `package.json` to be consistent with the CI build pipeline, which resolves the issue.

The updated script now includes:
- **`NO_STRIP=1`**: This environment variable prevents the `linuxdeploy` utility from stripping debugging symbols, which was a potential cause of the bundling failure.
- **`--verbose`**: This flag provides more detailed output during the build, which can be useful for debugging similar issues in the future.

* fix: compatibility imported model

* fix: update copy mmproj setting desc

* fix: toggle vision for remote model

* chore: add tooltip visions

* chore: show model setting only for local provider

* fix/update-ui-info

* chore: update filter hub while searching

* fix: system monitor window permission

* chore: update credit description

---------

Co-authored-by: Akarshan Biswas <akarshan.biswas@gmail.com>
Co-authored-by: Faisal Amir <urmauur@gmail.com>
Co-authored-by: Minh141120 <minh.itptit@gmail.com>
Co-authored-by: Nguyen Ngoc Minh <91668012+Minh141120@users.noreply.github.com>
This commit is contained in:
Dinh Long Nguyen 2025-08-26 15:35:56 +07:00 committed by GitHub
parent b5fbba6c81
commit 02f7b88dab
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 185 additions and 171 deletions

View File

@ -16,7 +16,7 @@
"description": "Environmental variables for llama.cpp(KEY=VALUE), separated by ';'",
"controllerType": "input",
"controllerProps": {
"value": "none",
"value": "",
"placeholder": "Eg. GGML_VK_VISIBLE_DEVICES=0,1",
"type": "text",
"textAlign": "right"

View File

@ -1082,9 +1082,9 @@ export default class llamacpp_extension extends AIEngine {
// If we reach here, download completed successfully (including validation)
// The downloadFiles function only returns successfully if all files downloaded AND validated
events.emit(DownloadEvent.onFileDownloadAndVerificationSuccess, {
modelId,
downloadType: 'Model'
events.emit(DownloadEvent.onFileDownloadAndVerificationSuccess, {
modelId,
downloadType: 'Model',
})
} catch (error) {
logger.error('Error downloading model:', modelId, opts, error)
@ -1092,7 +1092,8 @@ export default class llamacpp_extension extends AIEngine {
error instanceof Error ? error.message : String(error)
// Check if this is a cancellation
const isCancellationError = errorMessage.includes('Download cancelled') ||
const isCancellationError =
errorMessage.includes('Download cancelled') ||
errorMessage.includes('Validation cancelled') ||
errorMessage.includes('Hash computation cancelled') ||
errorMessage.includes('cancelled') ||
@ -1372,7 +1373,7 @@ export default class llamacpp_extension extends AIEngine {
envs['LLAMA_API_KEY'] = api_key
// set user envs
this.parseEnvFromString(envs, this.llamacpp_env)
if (this.llamacpp_env) this.parseEnvFromString(envs, this.llamacpp_env)
// model option is required
// NOTE: model_path and mmproj_path can be either relative to Jan's data folder or absolute path
@ -1751,7 +1752,7 @@ export default class llamacpp_extension extends AIEngine {
}
// set envs
const envs: Record<string, string> = {}
this.parseEnvFromString(envs, this.llamacpp_env)
if (this.llamacpp_env) this.parseEnvFromString(envs, this.llamacpp_env)
// Ensure backend is downloaded and ready before proceeding
await this.ensureBackendReady(backend, version)
@ -1767,7 +1768,7 @@ export default class llamacpp_extension extends AIEngine {
return dList
} catch (error) {
logger.error('Failed to query devices:\n', error)
throw new Error("Failed to load llamacpp backend")
throw new Error('Failed to load llamacpp backend')
}
}
@ -1876,7 +1877,7 @@ export default class llamacpp_extension extends AIEngine {
logger.info(
`Using explicit key_length: ${keyLen}, value_length: ${valLen}`
)
headDim = (keyLen + valLen)
headDim = keyLen + valLen
} else {
// Fall back to embedding_length estimation
const embeddingLen = Number(meta[`${arch}.embedding_length`])

View File

@ -22,7 +22,7 @@
"download:lib": "node ./scripts/download-lib.mjs",
"download:bin": "node ./scripts/download-bin.mjs",
"build:tauri:win32": "yarn download:bin && yarn tauri build",
"build:tauri:linux": "yarn download:bin && ./src-tauri/build-utils/shim-linuxdeploy.sh yarn tauri build && ./src-tauri/build-utils/buildAppImage.sh",
"build:tauri:linux": "yarn download:bin && NO_STRIP=1 ./src-tauri/build-utils/shim-linuxdeploy.sh yarn tauri build --verbose && ./src-tauri/build-utils/buildAppImage.sh",
"build:tauri:darwin": "yarn tauri build --target universal-apple-darwin",
"build:tauri": "yarn build:icon && yarn copy:assets:tauri && run-script-os",
"build:tauri:plugin:api": "cd src-tauri/plugins && yarn install && yarn workspaces foreach -Apt run build",

View File

@ -9,6 +9,11 @@
"core:window:allow-set-theme",
"log:default",
"core:webview:allow-create-webview-window",
"core:window:allow-set-focus"
"core:window:allow-set-focus",
"hardware:allow-get-system-info",
"hardware:allow-get-system-usage",
"llamacpp:allow-get-devices",
"llamacpp:allow-read-gguf-metadata",
"deep-link:allow-get-current"
]
}

View File

@ -6,7 +6,14 @@ import { cn } from '@/lib/utils'
function HoverCard({
...props
}: React.ComponentProps<typeof HoverCardPrimitive.Root>) {
return <HoverCardPrimitive.Root data-slot="hover-card" {...props} />
return (
<HoverCardPrimitive.Root
openDelay={0}
closeDelay={0}
data-slot="hover-card"
{...props}
/>
)
}
function HoverCardTrigger({

View File

@ -107,9 +107,15 @@ const ChatInput = ({ model, className, initialMessage }: ChatInputProps) => {
if (selectedProvider === 'llamacpp') {
const hasLocalMmproj = await checkMmprojExists(selectedModel.id)
setHasMmproj(hasLocalMmproj)
} else {
// For non-llamacpp providers, only check vision capability
}
// For non-llamacpp providers, only check vision capability
else if (
selectedProvider !== 'llamacpp' &&
selectedModel?.capabilities?.includes('vision')
) {
setHasMmproj(true)
} else {
setHasMmproj(false)
}
} catch (error) {
console.error('Error checking mmproj:', error)
@ -119,7 +125,7 @@ const ChatInput = ({ model, className, initialMessage }: ChatInputProps) => {
}
checkMmprojSupport()
}, [selectedModel?.id, selectedProvider])
}, [selectedModel?.capabilities, selectedModel?.id, selectedProvider])
// Check if there are active MCP servers
const hasActiveMCPServers = connectedServers.length > 0 || tools.length > 0
@ -535,29 +541,41 @@ const ChatInput = ({ model, className, initialMessage }: ChatInputProps) => {
)}
{/* File attachment - show only for models with mmproj */}
{hasMmproj && (
<div
className="h-6 p-1 flex items-center justify-center rounded-sm hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out gap-1"
onClick={handleAttachmentClick}
>
<IconPhoto size={18} className="text-main-view-fg/50" />
<input
type="file"
ref={fileInputRef}
className="hidden"
multiple
onChange={handleFileChange}
/>
</div>
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<div
className="h-7 p-1 flex items-center justify-center rounded-sm hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out gap-1"
onClick={handleAttachmentClick}
>
<IconPhoto
size={18}
className="text-main-view-fg/50"
/>
<input
type="file"
ref={fileInputRef}
className="hidden"
multiple
onChange={handleFileChange}
/>
</div>
</TooltipTrigger>
<TooltipContent>
<p>{t('vision')}</p>
</TooltipContent>
</Tooltip>
</TooltipProvider>
)}
{/* Microphone - always available - Temp Hide */}
{/* <div className="h-6 p-1 flex items-center justify-center rounded-sm hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out gap-1">
{/* <div className="h-7 p-1 flex items-center justify-center rounded-sm hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out gap-1">
<IconMicrophone size={18} className="text-main-view-fg/50" />
</div> */}
{selectedModel?.capabilities?.includes('embeddings') && (
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<div className="h-6 p-1 flex items-center justify-center rounded-sm hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out gap-1">
<div className="h-7 p-1 flex items-center justify-center rounded-sm hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out gap-1">
<IconCodeCircle2
size={18}
className="text-main-view-fg/50"
@ -601,7 +619,7 @@ const ChatInput = ({ model, className, initialMessage }: ChatInputProps) => {
return (
<div
className={cn(
'h-6 p-1 flex items-center justify-center rounded-sm hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out gap-1 cursor-pointer relative',
'h-7 p-1 flex items-center justify-center rounded-sm hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out gap-1 cursor-pointer relative',
isOpen && 'bg-main-view-fg/10'
)}
>
@ -632,7 +650,7 @@ const ChatInput = ({ model, className, initialMessage }: ChatInputProps) => {
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<div className="h-6 p-1 flex items-center justify-center rounded-sm hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out gap-1">
<div className="h-7 p-1 flex items-center justify-center rounded-sm hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out gap-1">
<IconWorld
size={18}
className="text-main-view-fg/50"
@ -649,7 +667,7 @@ const ChatInput = ({ model, className, initialMessage }: ChatInputProps) => {
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<div className="h-6 p-1 flex items-center justify-center rounded-sm hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out gap-1">
<div className="h-7 p-1 flex items-center justify-center rounded-sm hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out gap-1">
<IconAtom
size={18}
className="text-main-view-fg/50"

View File

@ -414,13 +414,15 @@ const DropdownModelProvider = ({
</span>
</button>
</PopoverTrigger>
{currentModel?.settings && provider && (
<ModelSetting
model={currentModel as Model}
provider={provider}
smallIcon
/>
)}
{currentModel?.settings &&
provider &&
provider.provider === 'llamacpp' && (
<ModelSetting
model={currentModel as Model}
provider={provider}
smallIcon
/>
)}
<ModelSupportStatus
modelId={selectedModel?.id}
provider={selectedProvider}

View File

@ -5,11 +5,11 @@ import {
} from '@/components/ui/hover-card'
import { IconInfoCircle } from '@tabler/icons-react'
import { CatalogModel, ModelQuant } from '@/services/models'
import { extractDescription } from '@/lib/models'
interface ModelInfoHoverCardProps {
model: CatalogModel
variant?: ModelQuant
isDefaultVariant?: boolean
defaultModelQuantizations: string[]
modelSupportStatus: Record<string, string>
onCheckModelSupport: (variant: ModelQuant) => void
@ -19,12 +19,12 @@ interface ModelInfoHoverCardProps {
export const ModelInfoHoverCard = ({
model,
variant,
isDefaultVariant,
defaultModelQuantizations,
modelSupportStatus,
onCheckModelSupport,
children,
}: ModelInfoHoverCardProps) => {
const isVariantMode = !!variant
const displayVariant =
variant ||
model.quants.find((m) =>
@ -95,8 +95,8 @@ export const ModelInfoHoverCard = ({
{children || (
<div className="cursor-pointer">
<IconInfoCircle
size={14}
className="mt-0.5 text-main-view-fg/50 hover:text-main-view-fg/80 transition-colors"
size={isDefaultVariant ? 20 : 14}
className="mt-0.5 text-main-view-fg/80 hover:text-main-view-fg/80 transition-colors"
/>
</div>
)}
@ -106,10 +106,10 @@ export const ModelInfoHoverCard = ({
{/* Header */}
<div className="border-b border-main-view-fg/10 pb-3">
<h4 className="text-sm font-semibold text-main-view-fg">
{isVariantMode ? variant.model_id : model.model_name}
{!isDefaultVariant ? variant?.model_id : model?.model_name}
</h4>
<p className="text-xs text-main-view-fg/60 mt-1">
{isVariantMode
{!isDefaultVariant
? 'Model Variant Information'
: 'Model Information'}
</p>
@ -118,57 +118,21 @@ export const ModelInfoHoverCard = ({
{/* Main Info Grid */}
<div className="grid grid-cols-2 gap-3 text-xs">
<div className="space-y-2">
{isVariantMode ? (
<>
<div>
<span className="text-main-view-fg/50 block">
File Size
</span>
<span className="text-main-view-fg font-medium mt-1 inline-block">
{variant.file_size}
</span>
</div>
<div>
<span className="text-main-view-fg/50 block">
Quantization
</span>
<span className="text-main-view-fg font-medium mt-1 inline-block">
{variant.model_id.split('-').pop()?.toUpperCase() ||
'N/A'}
</span>
</div>
</>
) : (
<>
<div>
<span className="text-main-view-fg/50 block">
Downloads
</span>
<span className="text-main-view-fg font-medium mt-1 inline-block">
{model.downloads?.toLocaleString() || '0'}
</span>
</div>
<div>
<span className="text-main-view-fg/50 block">Variants</span>
<span className="text-main-view-fg font-medium mt-1 inline-block">
{model.quants?.length || 0}
</span>
</div>
</>
)}
<>
<div>
<span className="text-main-view-fg/50 block">
{isDefaultVariant
? 'Maybe Default Quantization'
: 'Quantization'}
</span>
<span className="text-main-view-fg font-medium mt-1 inline-block">
{variant?.model_id.split('-').pop()?.toUpperCase() || 'N/A'}
</span>
</div>
</>
</div>
<div className="space-y-2">
{!isVariantMode && (
<div>
<span className="text-main-view-fg/50 block">
Default Size
</span>
<span className="text-main-view-fg font-medium mt-1 inline-block">
{displayVariant?.file_size || 'N/A'}
</span>
</div>
)}
<div>
<span className="text-main-view-fg/50 block">
Compatibility
@ -204,21 +168,6 @@ export const ModelInfoHoverCard = ({
</div>
</div>
)}
{/* Content Section */}
<div className="border-t border-main-view-fg/10 pt-3">
<h5 className="text-xs font-medium text-main-view-fg/70 mb-1">
{isVariantMode ? 'Download URL' : 'Description'}
</h5>
<div className="text-xs text-main-view-fg/60 bg-main-view-fg/5 rounded p-2">
{isVariantMode ? (
<div className="font-mono break-all">{variant.path}</div>
) : (
extractDescription(model?.description) ||
'No description available'
)}
</div>
</div>
</div>
</HoverCardContent>
</HoverCard>

View File

@ -7,7 +7,8 @@ import {
TooltipTrigger,
} from '@/components/ui/tooltip'
import { isModelSupported } from '@/services/models'
import { getJanDataFolderPath, joinPath } from '@janhq/core'
import { getJanDataFolderPath, joinPath, fs } from '@janhq/core'
import { invoke } from '@tauri-apps/api/core'
interface ModelSupportStatusProps {
modelId: string | undefined
@ -31,12 +32,12 @@ export const ModelSupportStatus = ({
async (
id: string,
ctxSize: number
): Promise<'RED' | 'YELLOW' | 'GREEN'> => {
): Promise<'RED' | 'YELLOW' | 'GREEN' | null> => {
try {
// Get Jan's data folder path and construct the full model file path
// Following the llamacpp extension structure: <Jan's data folder>/llamacpp/models/<modelId>/model.gguf
const janDataFolder = await getJanDataFolderPath()
const modelFilePath = await joinPath([
// First try the standard downloaded model path
const ggufModelPath = await joinPath([
janDataFolder,
'llamacpp',
'models',
@ -44,14 +45,47 @@ export const ModelSupportStatus = ({
'model.gguf',
])
return await isModelSupported(modelFilePath, ctxSize)
// Check if the standard model.gguf file exists
if (await fs.existsSync(ggufModelPath)) {
return await isModelSupported(ggufModelPath, ctxSize)
}
// If model.gguf doesn't exist, try reading from model.yml (for imported models)
const modelConfigPath = await joinPath([
janDataFolder,
'llamacpp',
'models',
id,
'model.yml',
])
if (!(await fs.existsSync(modelConfigPath))) {
console.error(
`Neither model.gguf nor model.yml found for model: ${id}`
)
return null
}
// Read the model configuration to get the actual model path
const modelConfig = await invoke<{ model_path: string }>('read_yaml', {
path: `llamacpp/models/${id}/model.yml`,
})
// Handle both absolute and relative paths
const actualModelPath =
modelConfig.model_path.startsWith('/') ||
modelConfig.model_path.match(/^[A-Za-z]:/)
? modelConfig.model_path // absolute path, use as-is
: await joinPath([janDataFolder, modelConfig.model_path]) // relative path, join with data folder
return await isModelSupported(actualModelPath, ctxSize)
} catch (error) {
console.error(
'Error checking model support with constructed path:',
'Error checking model support with path resolution:',
error
)
// If path construction or model support check fails, assume not supported
return 'RED'
return null
}
},
[]

View File

@ -7,11 +7,7 @@ import {
DialogTrigger,
} from '@/components/ui/dialog'
import { Switch } from '@/components/ui/switch'
import {
Tooltip,
TooltipContent,
TooltipTrigger,
} from '@/components/ui/tooltip'
import { useModelProvider } from '@/hooks/useModelProvider'
import {
IconPencil,
@ -19,7 +15,7 @@ import {
IconTool,
// IconWorld,
// IconAtom,
IconCodeCircle2,
// IconCodeCircle2,
} from '@tabler/icons-react'
import { useState, useEffect } from 'react'
import { useTranslation } from '@/i18n/react-i18next-compat'
@ -177,24 +173,16 @@ export const DialogEditModel = ({
{t('providers:editModel.vision')}
</span>
</div>
<Tooltip>
<TooltipTrigger>
<Switch
id="vision-capability"
checked={capabilities.vision}
disabled={true}
onCheckedChange={(checked) =>
handleCapabilityChange('vision', checked)
}
/>
</TooltipTrigger>
<TooltipContent>
{t('providers:editModel.notAvailable')}
</TooltipContent>
</Tooltip>
<Switch
id="vision-capability"
checked={capabilities.vision}
onCheckedChange={(checked) =>
handleCapabilityChange('vision', checked)
}
/>
</div>
<div className="flex items-center justify-between">
{/* <div className="flex items-center justify-between">
<div className="flex items-center space-x-2">
<IconCodeCircle2 className="size-4 text-main-view-fg/70" />
<span className="text-sm">
@ -216,7 +204,7 @@ export const DialogEditModel = ({
{t('providers:editModel.notAvailable')}
</TooltipContent>
</Tooltip>
</div>
</div> */}
{/* <div className="flex items-center justify-between">
<div className="flex items-center space-x-2">

View File

@ -241,7 +241,7 @@ export const useModelProvider = create<ModelProviderState>()(
}
// Migrate model settings
if (provider.models) {
if (provider.models && provider.provider === 'llamacpp') {
provider.models.forEach((model) => {
if (!model.settings) model.settings = {}

View File

@ -37,7 +37,7 @@
"reportAnIssueDesc": "Found a bug? Help us out by filing an issue on GitHub.",
"reportIssue": "Report Issue",
"credits": "Credits",
"creditsDesc1": "Jan is built with ❤️ by the Menlo Team.",
"creditsDesc1": "👋 Jan is built with ❤️ by the Menlo Research team.",
"creditsDesc2": "Special thanks to our open-source dependencies—especially llama.cpp and Tauri—and to our amazing AI community.",
"appVersion": "App Version",
"dataFolder": {
@ -234,7 +234,7 @@
"reportAnIssueDesc": "Found a bug? Help us out by filing an issue on GitHub.",
"reportIssue": "Report Issue",
"credits": "Credits",
"creditsDesc1": "Jan is built with ❤️ by the Menlo Team.",
"creditsDesc1": "👋 Jan is built with ❤️ by the Menlo Research team.",
"creditsDesc2": "Special thanks to our open-source dependencies—especially llama.cpp and Tauri—and to our amazing AI community."
},
"extensions": {

View File

@ -353,12 +353,7 @@ function Hub() {
// Immediately set local downloading state
addLocalDownloadingModel(modelId)
const mmprojPath = model.mmproj_models?.[0]?.path
pullModelWithMetadata(
modelId,
modelUrl,
mmprojPath,
huggingfaceToken
)
pullModelWithMetadata(modelId, modelUrl, mmprojPath, huggingfaceToken)
}
return (
@ -399,13 +394,13 @@ function Hub() {
)
}
}, [
localDownloadingModels,
downloadProcesses,
llamaProvider?.models,
isRecommendedModel,
downloadButtonRef,
localDownloadingModels,
addLocalDownloadingModel,
t,
addLocalDownloadingModel,
huggingfaceToken,
handleUseModel,
])
@ -482,9 +477,9 @@ function Hub() {
const isLastStep = currentStepIndex === steps.length - 1
const renderFilter = () => {
if (searchValue.length === 0)
return (
<>
return (
<>
{searchValue.length === 0 && (
<DropdownMenu>
<DropdownMenuTrigger>
<span className="flex cursor-pointer items-center gap-1 px-2 py-1 rounded-sm bg-main-view-fg/15 text-sm outline-none text-main-view-fg font-medium">
@ -509,17 +504,18 @@ function Hub() {
))}
</DropdownMenuContent>
</DropdownMenu>
<div className="flex items-center gap-2">
<Switch
checked={showOnlyDownloaded}
onCheckedChange={setShowOnlyDownloaded}
/>
<span className="text-xs text-main-view-fg/70 font-medium whitespace-nowrap">
{t('hub:downloaded')}
</span>
</div>
</>
)
)}
<div className="flex items-center gap-2">
<Switch
checked={showOnlyDownloaded}
onCheckedChange={setShowOnlyDownloaded}
/>
<span className="text-xs text-main-view-fg/70 font-medium whitespace-nowrap">
{t('hub:downloaded')}
</span>
</div>
</>
)
}
return (
@ -661,6 +657,18 @@ function Hub() {
defaultModelQuantizations={
defaultModelQuantizations
}
variant={
filteredModels[
virtualItem.index
].quants.find((m) =>
defaultModelQuantizations.some((e) =>
m.model_id.toLowerCase().includes(e)
)
) ??
filteredModels[virtualItem.index]
.quants?.[0]
}
isDefaultVariant={true}
modelSupportStatus={modelSupportStatus}
onCheckModelSupport={checkModelSupport}
/>

View File

@ -584,10 +584,12 @@ function ProviderDetail() {
}
actions={
<div className="flex items-center gap-0.5">
<DialogEditModel
provider={provider}
modelId={model.id}
/>
{provider && provider.provider !== 'llamacpp' && (
<DialogEditModel
provider={provider}
modelId={model.id}
/>
)}
{model.settings && (
<ModelSetting
provider={provider}

View File

@ -491,7 +491,7 @@ export const checkMmprojExistsAndUpdateOffloadMMprojSetting = async (
key: 'offload_mmproj',
title: 'Offload MMProj',
description:
'Offload multimodal projection layers to GPU',
'Offload multimodal projection model to GPU',
controller_type: 'checkbox',
controller_props: {
value: true,