Merge branch 'dev' into feat/file-attachment

This commit is contained in:
Dinh Long Nguyen 2025-10-08 16:34:45 +07:00
commit ff93dc3c5c
100 changed files with 3974 additions and 716 deletions

View File

@ -12,7 +12,7 @@ jobs:
build-and-preview:
runs-on: [ubuntu-24-04-docker]
env:
JAN_API_BASE: "https://api-dev.jan.ai/v1"
JAN_API_BASE: "https://api-dev.menlo.ai/v1"
permissions:
pull-requests: write
contents: write

View File

@ -13,7 +13,7 @@ jobs:
deployments: write
pull-requests: write
env:
JAN_API_BASE: "https://api.jan.ai/v1"
JAN_API_BASE: "https://api.menlo.ai/v1"
GA_MEASUREMENT_ID: "G-YK53MX8M8M"
CLOUDFLARE_PROJECT_NAME: "jan-server-web"
steps:

View File

@ -12,7 +12,7 @@ jobs:
build-and-preview:
runs-on: [ubuntu-24-04-docker]
env:
JAN_API_BASE: "https://api-stag.jan.ai/v1"
JAN_API_BASE: "https://api-stag.menlo.ai/v1"
permissions:
pull-requests: write
contents: write

View File

@ -49,6 +49,8 @@ jobs:
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
jq '.bundle.windows.signCommand = "echo External build - skipping signature: %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
@ -80,6 +82,36 @@ jobs:
echo "---------./src-tauri/Cargo.toml---------"
cat ./src-tauri/Cargo.toml
generate_build_version() {
### Examble
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
local new_version="$1"
local base_version
local t_value
# Check if it has a "-"
if [[ "$new_version" == *-* ]]; then
base_version="${new_version%%-*}" # part before -
suffix="${new_version#*-}" # part after -
# Check if it is rcX-beta
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
t_value="${BASH_REMATCH[1]}"
else
t_value="$suffix"
fi
else
base_version="$new_version"
t_value="0"
fi
# Export two values
new_base_version="$base_version"
new_build_version="${base_version}.${t_value}"
}
generate_build_version ${{ inputs.new_version }}
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
if [ "${{ inputs.channel }}" != "stable" ]; then
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
@ -103,7 +135,14 @@ jobs:
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
else
sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
fi
echo "---------nsis.template---------"
cat ./src-tauri/tauri.bundle.windows.nsis.template
- name: Build app
shell: bash
run: |

View File

@ -98,9 +98,15 @@ jobs:
# Update tauri.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
mv /tmp/package.json web-app/package.json
# Add sign commands to tauri.windows.conf.json
jq '.bundle.windows.signCommand = "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
# Update tauri plugin versions
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
@ -127,9 +133,35 @@ jobs:
echo "---------./src-tauri/Cargo.toml---------"
cat ./src-tauri/Cargo.toml
# Add sign commands to tauri.windows.conf.json
jq '.bundle.windows.signCommand = "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
generate_build_version() {
### Example
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
local new_version="$1"
local base_version
local t_value
# Check if it has a "-"
if [[ "$new_version" == *-* ]]; then
base_version="${new_version%%-*}" # part before -
suffix="${new_version#*-}" # part after -
# Check if it is rcX-beta
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
t_value="${BASH_REMATCH[1]}"
else
t_value="$suffix"
fi
else
base_version="$new_version"
t_value="0"
fi
# Export two values
new_base_version="$base_version"
new_build_version="${base_version}.${t_value}"
}
generate_build_version ${{ inputs.new_version }}
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
echo "---------tauri.windows.conf.json---------"
cat ./src-tauri/tauri.windows.conf.json
@ -163,7 +195,14 @@ jobs:
chmod +x .github/scripts/rename-workspace.sh
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
cat ./package.json
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
else
sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
fi
echo "---------nsis.template---------"
cat ./src-tauri/tauri.bundle.windows.nsis.template
- name: Install AzureSignTool
run: |
@ -250,13 +289,3 @@ jobs:
asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
asset_content_type: application/octet-stream
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/msi/${{ steps.metadata.outputs.MSI_FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.MSI_FILE_NAME }}
asset_content_type: application/octet-stream

View File

@ -117,7 +117,6 @@ lint: install-and-build
test: lint
yarn download:bin
ifeq ($(OS),Windows_NT)
yarn download:windows-installer
endif
yarn test
yarn copy:assets:tauri

View File

@ -25,8 +25,8 @@ export RANLIB_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x
# Additional environment variables for Rust cross-compilation
export CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
# Only set global CC and AR for Android builds (when TAURI_ANDROID_BUILD is set)
if [ "$TAURI_ANDROID_BUILD" = "true" ]; then
# Only set global CC and AR for Android builds (when IS_ANDROID is set)
if [ "$IS_ANDROID" = "true" ]; then
export CC="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
export AR="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar"
echo "Global CC and AR set for Android build"

View File

@ -39,7 +39,6 @@ import { getProxyConfig } from './util'
import { basename } from '@tauri-apps/api/path'
import {
readGgufMetadata,
estimateKVCacheSize,
getModelSize,
isModelSupported,
planModelLoadInternal,
@ -58,6 +57,8 @@ type LlamacppConfig = {
chat_template: string
n_gpu_layers: number
offload_mmproj: boolean
cpu_moe: boolean
n_cpu_moe: number
override_tensor_buffer_t: string
ctx_size: number
threads: number
@ -332,12 +333,14 @@ export default class llamacpp_extension extends AIEngine {
)
// Clear the invalid stored preference
this.clearStoredBackendType()
bestAvailableBackendString =
await this.determineBestBackend(version_backends)
bestAvailableBackendString = await this.determineBestBackend(
version_backends
)
}
} else {
bestAvailableBackendString =
await this.determineBestBackend(version_backends)
bestAvailableBackendString = await this.determineBestBackend(
version_backends
)
}
let settings = structuredClone(SETTINGS)
@ -1581,6 +1584,10 @@ export default class llamacpp_extension extends AIEngine {
])
args.push('--jinja')
args.push('-m', modelPath)
if (cfg.cpu_moe) args.push('--cpu-moe')
if (cfg.n_cpu_moe && cfg.n_cpu_moe > 0) {
args.push('--n-cpu-moe', String(cfg.n_cpu_moe))
}
// For overriding tensor buffer type, useful where
// massive MOE models can be made faster by keeping attention on the GPU
// and offloading the expert FFNs to the CPU.
@ -2163,7 +2170,12 @@ export default class llamacpp_extension extends AIEngine {
if (mmprojPath && !this.isAbsolutePath(mmprojPath))
mmprojPath = await joinPath([await getJanDataFolderPath(), path])
try {
const result = await planModelLoadInternal(path, this.memoryMode, mmprojPath, requestedCtx)
const result = await planModelLoadInternal(
path,
this.memoryMode,
mmprojPath,
requestedCtx
)
return result
} catch (e) {
throw new Error(String(e))
@ -2291,12 +2303,18 @@ export default class llamacpp_extension extends AIEngine {
}
// Calculate text tokens
const messages = JSON.stringify({ messages: opts.messages })
// Use chat_template_kwargs from opts if provided, otherwise default to disable enable_thinking
const tokenizeRequest = {
messages: opts.messages,
chat_template_kwargs: opts.chat_template_kwargs || {
enable_thinking: false,
},
}
let parseResponse = await fetch(`${baseUrl}/apply-template`, {
method: 'POST',
headers: headers,
body: messages,
body: JSON.stringify(tokenizeRequest),
})
if (!parseResponse.ok) {

View File

@ -26,17 +26,16 @@
"serve:web-app": "yarn workspace @janhq/web-app serve:web",
"build:serve:web-app": "yarn build:web-app && yarn serve:web-app",
"dev:tauri": "yarn build:icon && yarn copy:assets:tauri && cross-env IS_CLEAN=true tauri dev",
"dev:ios": "yarn build:extensions-web && yarn copy:assets:mobile && RUSTC_WRAPPER= yarn tauri ios dev --features mobile",
"dev:android": "yarn build:extensions-web && yarn copy:assets:mobile && cross-env IS_CLEAN=true TAURI_ANDROID_BUILD=true yarn tauri android dev --features mobile",
"build:android": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_CLEAN=true TAURI_ANDROID_BUILD=true yarn tauri android build -- --no-default-features --features mobile",
"build:ios": "yarn copy:assets:mobile && yarn tauri ios build -- --no-default-features --features mobile",
"build:ios:device": "yarn build:icon && yarn copy:assets:mobile && yarn tauri ios build -- --no-default-features --features mobile --export-method debugging",
"dev:ios": "yarn copy:assets:mobile && RUSTC_WRAPPER= cross-env IS_IOS=true yarn tauri ios dev --features mobile",
"dev:android": "yarn copy:assets:mobile && cross-env IS_ANDROID=true yarn tauri android dev --features mobile",
"build:android": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_CLEAN=true yarn tauri android build -- --no-default-features --features mobile",
"build:ios": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_IOS=true yarn tauri ios build -- --no-default-features --features mobile",
"build:ios:device": "yarn build:icon && yarn copy:assets:mobile && cross-env IS_IOS=true yarn tauri ios build -- --no-default-features --features mobile --export-method debugging",
"copy:assets:tauri": "cpx \"pre-install/*.tgz\" \"src-tauri/resources/pre-install/\" && cpx \"LICENSE\" \"src-tauri/resources/\"",
"copy:assets:mobile": "cpx \"pre-install/*.tgz\" \"src-tauri/resources/pre-install/\" && cpx \"LICENSE\" \"src-tauri/resources/\"",
"download:lib": "node ./scripts/download-lib.mjs",
"download:bin": "node ./scripts/download-bin.mjs",
"download:windows-installer": "node ./scripts/download-win-installer-deps.mjs",
"build:tauri:win32": "yarn download:bin && yarn download:windows-installer && yarn tauri build",
"build:tauri:win32": "yarn download:bin && yarn tauri build",
"build:tauri:linux": "yarn download:bin && NO_STRIP=1 ./src-tauri/build-utils/shim-linuxdeploy.sh yarn tauri build && ./src-tauri/build-utils/buildAppImage.sh",
"build:tauri:darwin": "yarn download:bin && yarn tauri build --target universal-apple-darwin",
"build:tauri": "yarn build:icon && yarn copy:assets:tauri && run-script-os",

View File

@ -1,83 +0,0 @@
console.log('Downloading Windows installer dependencies...')
// scripts/download-win-installer-deps.mjs
import https from 'https'
import fs, { mkdirSync } from 'fs'
import os from 'os'
import path from 'path'
import { copySync } from 'cpx'
function download(url, dest) {
return new Promise((resolve, reject) => {
console.log(`Downloading ${url} to ${dest}`)
const file = fs.createWriteStream(dest)
https
.get(url, (response) => {
console.log(`Response status code: ${response.statusCode}`)
if (
response.statusCode >= 300 &&
response.statusCode < 400 &&
response.headers.location
) {
// Handle redirect
const redirectURL = response.headers.location
console.log(`Redirecting to ${redirectURL}`)
download(redirectURL, dest).then(resolve, reject) // Recursive call
return
} else if (response.statusCode !== 200) {
reject(`Failed to get '${url}' (${response.statusCode})`)
return
}
response.pipe(file)
file.on('finish', () => {
file.close(resolve)
})
})
.on('error', (err) => {
fs.unlink(dest, () => reject(err.message))
})
})
}
async function main() {
console.log('Starting Windows installer dependencies download')
const platform = os.platform() // 'darwin', 'linux', 'win32'
const arch = os.arch() // 'x64', 'arm64', etc.
if (arch != 'x64') return
const libDir = 'src-tauri/resources/lib'
const tempDir = 'scripts/dist'
try {
mkdirSync('scripts/dist')
} catch (err) {
// Expect EEXIST error if the directory already exists
}
// Download VC++ Redistributable 17
if (platform == 'win32') {
const vcFilename = 'vc_redist.x64.exe'
const vcUrl = 'https://aka.ms/vs/17/release/vc_redist.x64.exe'
console.log(`Downloading VC++ Redistributable...`)
const vcSavePath = path.join(tempDir, vcFilename)
if (!fs.existsSync(vcSavePath)) {
await download(vcUrl, vcSavePath)
}
// copy to tauri resources
try {
copySync(vcSavePath, libDir)
} catch (err) {
// Expect EEXIST error
}
}
console.log('Windows installer dependencies downloads completed.')
}
main().catch((err) => {
console.error('Error:', err)
process.exit(1)
})

560
src-tauri/Cargo.lock generated
View File

@ -23,6 +23,7 @@ dependencies = [
"serde",
"serde_json",
"serde_yaml",
"sqlx",
"tar",
"tauri",
"tauri-build",
@ -118,6 +119,12 @@ dependencies = [
"memchr",
]
[[package]]
name = "allocator-api2"
version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "android-tzdata"
version = "0.1.1"
@ -355,6 +362,15 @@ dependencies = [
"system-deps",
]
[[package]]
name = "atoi"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528"
dependencies = [
"num-traits",
]
[[package]]
name = "atomic-waker"
version = "1.1.2"
@ -780,6 +796,12 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "const-oid"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "const-random"
version = "0.1.18"
@ -911,6 +933,21 @@ dependencies = [
"libc",
]
[[package]]
name = "crc"
version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675"
dependencies = [
"crc-catalog",
]
[[package]]
name = "crc-catalog"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
[[package]]
name = "crc32fast"
version = "1.5.0"
@ -1107,6 +1144,17 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a"
[[package]]
name = "der"
version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
dependencies = [
"const-oid",
"pem-rfc7468",
"zeroize",
]
[[package]]
name = "deranged"
version = "0.4.0"
@ -1148,6 +1196,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"const-oid",
"crypto-common",
"subtle",
]
@ -1252,6 +1301,12 @@ dependencies = [
"litrs",
]
[[package]]
name = "dotenvy"
version = "0.15.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
[[package]]
name = "downcast-rs"
version = "1.2.1"
@ -1294,6 +1349,15 @@ version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555"
[[package]]
name = "either"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
dependencies = [
"serde",
]
[[package]]
name = "embed-resource"
version = "3.0.5"
@ -1402,6 +1466,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bb7ef65b3777a325d1eeefefab5b6d4959da54747e33bd6258e789640f307ad"
dependencies = [
"num-traits",
name = "etcetera"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943"
dependencies = [
"cfg-if",
"home",
"windows-sys 0.48.0",
]
[[package]]
@ -1503,12 +1575,29 @@ dependencies = [
"miniz_oxide",
]
[[package]]
name = "flume"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095"
dependencies = [
"futures-core",
"futures-sink",
"spin",
]
[[package]]
name = "fnv"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foldhash"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "foreign-types"
version = "0.3.2"
@ -1618,6 +1707,17 @@ dependencies = [
"futures-util",
]
[[package]]
name = "futures-intrusive"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f"
dependencies = [
"futures-core",
"lock_api",
"parking_lot",
]
[[package]]
name = "futures-io"
version = "0.3.31"
@ -2070,6 +2170,20 @@ name = "hashbrown"
version = "0.15.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5"
dependencies = [
"allocator-api2",
"equivalent",
"foldhash",
]
[[package]]
name = "hashlink"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
dependencies = [
"hashbrown 0.15.4",
]
[[package]]
name = "hashlink"
@ -2104,6 +2218,15 @@ version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "hkdf"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7"
dependencies = [
"hmac",
]
[[package]]
name = "hmac"
version = "0.12.1"
@ -2281,6 +2404,20 @@ dependencies = [
"want",
]
[[package]]
name = "hyper-rustls"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590"
dependencies = [
"futures-util",
"http 0.2.12",
"hyper 0.14.32",
"rustls 0.21.12",
"tokio",
"tokio-rustls 0.24.1",
]
[[package]]
name = "hyper-rustls"
version = "0.27.7"
@ -2290,12 +2427,12 @@ dependencies = [
"http 1.3.1",
"hyper 1.6.0",
"hyper-util",
"rustls",
"rustls 0.23.31",
"rustls-pki-types",
"tokio",
"tokio-rustls",
"tokio-rustls 0.26.2",
"tower-service",
"webpki-roots",
"webpki-roots 1.0.2",
]
[[package]]
@ -2717,6 +2854,9 @@ name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
dependencies = [
"spin",
]
[[package]]
name = "libappindicator"
@ -2768,6 +2908,12 @@ dependencies = [
"windows-targets 0.53.3",
]
[[package]]
name = "libm"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
[[package]]
name = "libredox"
version = "0.1.9"
@ -3087,12 +3233,49 @@ dependencies = [
"winapi",
]
[[package]]
name = "num-bigint-dig"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151"
dependencies = [
"byteorder",
"lazy_static",
"libm",
"num-integer",
"num-iter",
"num-traits",
"rand 0.8.5",
"smallvec",
"zeroize",
]
[[package]]
name = "num-conv"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]]
name = "num-integer"
version = "0.1.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
dependencies = [
"num-traits",
]
[[package]]
name = "num-iter"
version = "0.1.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.19"
@ -3100,6 +3283,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
dependencies = [
"autocfg",
"libm",
]
[[package]]
@ -3659,6 +3843,12 @@ dependencies = [
"postscript",
"type1-encoding-parser",
"unicode-normalization",
name = "pem-rfc7468"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412"
dependencies = [
"base64ct",
]
[[package]]
@ -3834,6 +4024,27 @@ dependencies = [
"futures-io",
]
[[package]]
name = "pkcs1"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f"
dependencies = [
"der",
"pkcs8",
"spki",
]
[[package]]
name = "pkcs8"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
dependencies = [
"der",
"spki",
]
[[package]]
name = "pkg-config"
version = "0.3.32"
@ -4080,7 +4291,7 @@ dependencies = [
"quinn-proto",
"quinn-udp",
"rustc-hash",
"rustls",
"rustls 0.23.31",
"socket2 0.5.10",
"thiserror 2.0.12",
"tokio",
@ -4100,7 +4311,7 @@ dependencies = [
"rand 0.9.2",
"ring",
"rustc-hash",
"rustls",
"rustls 0.23.31",
"rustls-pki-types",
"slab",
"thiserror 2.0.12",
@ -4365,6 +4576,7 @@ dependencies = [
"http 0.2.12",
"http-body 0.4.6",
"hyper 0.14.32",
"hyper-rustls 0.24.2",
"hyper-tls",
"ipnet",
"js-sys",
@ -4374,6 +4586,7 @@ dependencies = [
"once_cell",
"percent-encoding",
"pin-project-lite",
"rustls 0.21.12",
"rustls-pemfile",
"serde",
"serde_json",
@ -4382,6 +4595,7 @@ dependencies = [
"system-configuration 0.5.1",
"tokio",
"tokio-native-tls",
"tokio-rustls 0.24.1",
"tokio-util",
"tower-service",
"url",
@ -4389,6 +4603,7 @@ dependencies = [
"wasm-bindgen-futures",
"wasm-streams",
"web-sys",
"webpki-roots 0.25.4",
"winreg 0.50.0",
]
@ -4410,7 +4625,7 @@ dependencies = [
"http-body 1.0.1",
"http-body-util",
"hyper 1.6.0",
"hyper-rustls",
"hyper-rustls 0.27.7",
"hyper-util",
"js-sys",
"log",
@ -4418,14 +4633,14 @@ dependencies = [
"percent-encoding",
"pin-project-lite",
"quinn",
"rustls",
"rustls 0.23.31",
"rustls-pki-types",
"serde",
"serde_json",
"serde_urlencoded",
"sync_wrapper 1.0.2",
"tokio",
"tokio-rustls",
"tokio-rustls 0.26.2",
"tokio-util",
"tower",
"tower-http",
@ -4435,7 +4650,7 @@ dependencies = [
"wasm-bindgen-futures",
"wasm-streams",
"web-sys",
"webpki-roots",
"webpki-roots 1.0.2",
]
[[package]]
@ -4558,6 +4773,23 @@ dependencies = [
"hashlink",
"libsqlite3-sys",
"smallvec",
name = "rsa"
version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78928ac1ed176a5ca1d17e578a1825f3d81ca54cf41053a592584b020cfd691b"
dependencies = [
"const-oid",
"digest",
"num-bigint-dig",
"num-integer",
"num-traits",
"pkcs1",
"pkcs8",
"rand_core 0.6.4",
"signature",
"spki",
"subtle",
"zeroize",
]
[[package]]
@ -4620,6 +4852,18 @@ dependencies = [
"windows-sys 0.60.2",
]
[[package]]
name = "rustls"
version = "0.21.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e"
dependencies = [
"log",
"ring",
"rustls-webpki 0.101.7",
"sct",
]
[[package]]
name = "rustls"
version = "0.23.31"
@ -4629,7 +4873,7 @@ dependencies = [
"once_cell",
"ring",
"rustls-pki-types",
"rustls-webpki",
"rustls-webpki 0.103.4",
"subtle",
"zeroize",
]
@ -4653,6 +4897,16 @@ dependencies = [
"zeroize",
]
[[package]]
name = "rustls-webpki"
version = "0.101.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
dependencies = [
"ring",
"untrusted",
]
[[package]]
name = "rustls-webpki"
version = "0.103.4"
@ -4771,6 +5025,16 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "sct"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
dependencies = [
"ring",
"untrusted",
]
[[package]]
name = "seahash"
version = "4.1.0"
@ -5068,6 +5332,16 @@ dependencies = [
"libc",
]
[[package]]
name = "signature"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
dependencies = [
"digest",
"rand_core 0.6.4",
]
[[package]]
name = "simd-adler32"
version = "0.3.7"
@ -5103,6 +5377,9 @@ name = "smallvec"
version = "1.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
dependencies = [
"serde",
]
[[package]]
name = "socket2"
@ -5172,6 +5449,213 @@ dependencies = [
"system-deps",
]
[[package]]
name = "spin"
version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
dependencies = [
"lock_api",
]
[[package]]
name = "spki"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
dependencies = [
"base64ct",
"der",
]
[[package]]
name = "sqlx"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc"
dependencies = [
"sqlx-core",
"sqlx-macros",
"sqlx-mysql",
"sqlx-postgres",
"sqlx-sqlite",
]
[[package]]
name = "sqlx-core"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6"
dependencies = [
"base64 0.22.1",
"bytes",
"crc",
"crossbeam-queue",
"either",
"event-listener",
"futures-core",
"futures-intrusive",
"futures-io",
"futures-util",
"hashbrown 0.15.4",
"hashlink",
"indexmap 2.10.0",
"log",
"memchr",
"once_cell",
"percent-encoding",
"serde",
"serde_json",
"sha2",
"smallvec",
"thiserror 2.0.12",
"tokio",
"tokio-stream",
"tracing",
"url",
]
[[package]]
name = "sqlx-macros"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d"
dependencies = [
"proc-macro2",
"quote",
"sqlx-core",
"sqlx-macros-core",
"syn 2.0.104",
]
[[package]]
name = "sqlx-macros-core"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b"
dependencies = [
"dotenvy",
"either",
"heck 0.5.0",
"hex",
"once_cell",
"proc-macro2",
"quote",
"serde",
"serde_json",
"sha2",
"sqlx-core",
"sqlx-mysql",
"sqlx-postgres",
"sqlx-sqlite",
"syn 2.0.104",
"tokio",
"url",
]
[[package]]
name = "sqlx-mysql"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526"
dependencies = [
"atoi",
"base64 0.22.1",
"bitflags 2.9.1",
"byteorder",
"bytes",
"crc",
"digest",
"dotenvy",
"either",
"futures-channel",
"futures-core",
"futures-io",
"futures-util",
"generic-array",
"hex",
"hkdf",
"hmac",
"itoa",
"log",
"md-5",
"memchr",
"once_cell",
"percent-encoding",
"rand 0.8.5",
"rsa",
"serde",
"sha1",
"sha2",
"smallvec",
"sqlx-core",
"stringprep",
"thiserror 2.0.12",
"tracing",
"whoami",
]
[[package]]
name = "sqlx-postgres"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46"
dependencies = [
"atoi",
"base64 0.22.1",
"bitflags 2.9.1",
"byteorder",
"crc",
"dotenvy",
"etcetera",
"futures-channel",
"futures-core",
"futures-util",
"hex",
"hkdf",
"hmac",
"home",
"itoa",
"log",
"md-5",
"memchr",
"once_cell",
"rand 0.8.5",
"serde",
"serde_json",
"sha2",
"smallvec",
"sqlx-core",
"stringprep",
"thiserror 2.0.12",
"tracing",
"whoami",
]
[[package]]
name = "sqlx-sqlite"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea"
dependencies = [
"atoi",
"flume",
"futures-channel",
"futures-core",
"futures-executor",
"futures-intrusive",
"futures-util",
"libsqlite3-sys",
"log",
"percent-encoding",
"serde",
"serde_urlencoded",
"sqlx-core",
"thiserror 2.0.12",
"tracing",
"url",
]
[[package]]
name = "sse-stream"
version = "0.2.1"
@ -5222,6 +5706,17 @@ dependencies = [
"quote",
]
[[package]]
name = "stringprep"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1"
dependencies = [
"unicode-bidi",
"unicode-normalization",
"unicode-properties",
]
[[package]]
name = "strip-ansi-escapes"
version = "0.2.1"
@ -6176,13 +6671,23 @@ dependencies = [
"tokio",
]
[[package]]
name = "tokio-rustls"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081"
dependencies = [
"rustls 0.21.12",
"tokio",
]
[[package]]
name = "tokio-rustls"
version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b"
dependencies = [
"rustls",
"rustls 0.23.31",
"tokio",
]
@ -6363,6 +6868,7 @@ version = "0.1.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
dependencies = [
"log",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
@ -6489,6 +6995,12 @@ dependencies = [
"unic-common",
]
[[package]]
name = "unicode-bidi"
version = "0.3.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
[[package]]
name = "unicode-ident"
version = "1.0.18"
@ -6504,6 +7016,12 @@ dependencies = [
"tinyvec",
]
[[package]]
name = "unicode-properties"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
@ -6726,6 +7244,12 @@ dependencies = [
"wit-bindgen-rt",
]
[[package]]
name = "wasite"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
[[package]]
name = "wasm-bindgen"
version = "0.2.100"
@ -6934,6 +7458,12 @@ dependencies = [
"system-deps",
]
[[package]]
name = "webpki-roots"
version = "0.25.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1"
[[package]]
name = "webpki-roots"
version = "1.0.2"
@ -6984,6 +7514,14 @@ name = "weezl"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3"
name = "whoami"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d"
dependencies = [
"libredox",
"wasite",
]
[[package]]
name = "winapi"

View File

@ -35,6 +35,7 @@ mobile = [
"tauri/protocol-asset",
"tauri/test",
"tauri/wry",
"dep:sqlx",
]
test-tauri = [
"tauri/wry",
@ -59,7 +60,6 @@ hyper = { version = "0.14", features = ["server"] }
jan-utils = { path = "./utils" }
libloading = "0.8.7"
log = "0.4"
reqwest = { version = "0.11", features = ["json", "blocking", "stream", "native-tls-vendored"] }
rmcp = { version = "0.6.0", features = [
"client",
"transport-sse-client",
@ -85,6 +85,7 @@ tauri-plugin-opener = "2.2.7"
tauri-plugin-os = "2.2.1"
tauri-plugin-shell = "2.2.0"
tauri-plugin-store = "2"
sqlx = { version = "0.8", features = ["runtime-tokio", "sqlite"], optional = true }
thiserror = "2.0.12"
tokio = { version = "1", features = ["full"] }
tokio-util = "0.7.14"
@ -107,11 +108,13 @@ libc = "0.2.172"
windows-sys = { version = "0.60.2", features = ["Win32_Storage_FileSystem"] }
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies]
reqwest = { version = "0.11", features = ["json", "blocking", "stream", "native-tls-vendored"] }
tauri-plugin-updater = "2"
once_cell = "1.18"
tauri-plugin-single-instance = { version = "2", features = ["deep-link"] }
[target.'cfg(any(target_os = "android", target_os = "ios"))'.dependencies]
reqwest = { version = "0.11", features = ["json", "blocking", "stream", "rustls-tls"], default-features = false }
tauri-plugin-dialog = { version = "2.2.1", default-features = false }
tauri-plugin-http = { version = "2", default-features = false }
tauri-plugin-log = { version = "2.0.0-rc", default-features = false }

View File

@ -12,6 +12,8 @@
"core:webview:allow-set-webview-zoom",
"core:window:allow-start-dragging",
"core:window:allow-set-theme",
"core:window:allow-get-all-windows",
"core:event:allow-listen",
"shell:allow-spawn",
"shell:allow-open",
"core:app:allow-set-app-theme",

View File

@ -1,14 +1,18 @@
{
"$schema": "../gen/schemas/desktop-schema.json",
"identifier": "logs-app-window",
"identifier": "log-app-window",
"description": "enables permissions for the logs app window",
"windows": ["logs-app-window"],
"platforms": ["linux", "macOS", "windows"],
"permissions": [
"core:default",
"core:window:allow-start-dragging",
"core:window:allow-set-theme",
"core:window:allow-get-all-windows",
"core:event:allow-listen",
"log:default",
"core:webview:allow-create-webview-window",
"core:webview:allow-get-all-webviews",
"core:window:allow-set-focus"
]
}

View File

@ -3,12 +3,16 @@
"identifier": "logs-window",
"description": "enables permissions for the logs window",
"windows": ["logs-window-local-api-server"],
"platforms": ["linux", "macOS", "windows"],
"permissions": [
"core:default",
"core:window:allow-start-dragging",
"core:window:allow-set-theme",
"core:window:allow-get-all-windows",
"core:event:allow-listen",
"log:default",
"core:webview:allow-create-webview-window",
"core:webview:allow-get-all-webviews",
"core:window:allow-set-focus"
]
}

View File

@ -8,13 +8,28 @@
"core:default",
"core:window:allow-start-dragging",
"core:window:allow-set-theme",
"core:window:allow-get-all-windows",
"core:event:allow-listen",
"log:default",
"core:webview:allow-create-webview-window",
"core:webview:allow-get-all-webviews",
"core:window:allow-set-focus",
"hardware:allow-get-system-info",
"hardware:allow-get-system-usage",
"llamacpp:allow-get-devices",
"llamacpp:allow-read-gguf-metadata",
"deep-link:allow-get-current"
"deep-link:allow-get-current",
{
"identifier": "http:default",
"allow": [
{
"url": "https://*:*"
},
{
"url": "http://*:*"
}
],
"deny": []
}
]
}

View File

@ -23,9 +23,14 @@ sysinfo = "0.34.2"
tauri = { version = "2.5.0", default-features = false, features = [] }
thiserror = "2.0.12"
tokio = { version = "1", features = ["full"] }
reqwest = { version = "0.11", features = ["json", "blocking", "stream"] }
tauri-plugin-hardware = { path = "../tauri-plugin-hardware" }
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies]
reqwest = { version = "0.11", features = ["json", "blocking", "stream", "native-tls"] }
[target.'cfg(any(target_os = "android", target_os = "ios"))'.dependencies]
reqwest = { version = "0.11", features = ["json", "blocking", "stream", "rustls-tls"], default-features = false }
# Unix-specific dependencies
[target.'cfg(unix)'.dependencies]
nix = { version = "=0.30.1", features = ["signal", "process"] }

View File

@ -87,19 +87,25 @@ pub async fn is_model_supported(
);
const RESERVE_BYTES: u64 = 2288490189;
let total_system_memory = system_info.total_memory * 1024 * 1024;
let total_system_memory: u64 = match system_info.gpus.is_empty() {
// on MacOS with unified memory, treat RAM = 0 for now
true => 0,
false => system_info.total_memory * 1024 * 1024,
};
// Calculate total VRAM from all GPUs
let total_vram: u64 = if system_info.gpus.is_empty() {
let total_vram: u64 = match system_info.gpus.is_empty() {
// On macOS with unified memory, GPU info may be empty
// Use total RAM as VRAM since memory is shared
true => {
log::info!("No GPUs detected (likely unified memory system), using total RAM as VRAM");
total_system_memory
} else {
system_info
system_info.total_memory * 1024 * 1024
}
false => system_info
.gpus
.iter()
.map(|g| g.total_memory * 1024 * 1024)
.sum::<u64>()
.sum::<u64>(),
};
log::info!("Total VRAM reported/calculated (in bytes): {}", &total_vram);
@ -113,7 +119,7 @@ pub async fn is_model_supported(
let usable_total_memory = if total_system_memory > RESERVE_BYTES {
(total_system_memory - RESERVE_BYTES) + usable_vram
} else {
0
usable_vram
};
log::info!("System RAM: {} bytes", &total_system_memory);
log::info!("Total VRAM: {} bytes", &total_vram);

View File

@ -80,25 +80,25 @@ pub async fn plan_model_load(
log::info!("Got GPUs:\n{:?}", &sys_info.gpus);
let total_ram: u64 = sys_info.total_memory * 1024 * 1024;
log::info!(
"Total system memory reported from tauri_plugin_hardware(in bytes): {}",
&total_ram
);
let total_ram: u64 = match sys_info.gpus.is_empty() {
// Consider RAM as 0 for unified memory
true => 0,
false => sys_info.total_memory * 1024 * 1024,
};
let total_vram: u64 = if sys_info.gpus.is_empty() {
// On macOS with unified memory, GPU info may be empty
// Use total RAM as VRAM since memory is shared
// Calculate total VRAM from all GPUs
let total_vram: u64 = match sys_info.gpus.is_empty() {
true => {
log::info!("No GPUs detected (likely unified memory system), using total RAM as VRAM");
total_ram
} else {
sys_info
sys_info.total_memory * 1024 * 1024
}
false => sys_info
.gpus
.iter()
.map(|g| g.total_memory * 1024 * 1024)
.sum::<u64>()
.sum::<u64>(),
};
log::info!("Total RAM reported/calculated (in bytes): {}", &total_ram);
log::info!("Total VRAM reported/calculated (in bytes): {}", &total_vram);
let usable_vram: u64 = if total_vram > RESERVE_BYTES {
(((total_vram - RESERVE_BYTES) as f64) * multiplier) as u64

View File

@ -19,6 +19,15 @@ pub fn install_extensions<R: Runtime>(app: AppHandle<R>) {
#[tauri::command]
pub fn get_active_extensions<R: Runtime>(app: AppHandle<R>) -> Vec<serde_json::Value> {
// On mobile platforms, extensions are pre-bundled in the frontend
// Return empty array so frontend's MobileCoreService handles it
#[cfg(any(target_os = "android", target_os = "ios"))]
{
return vec![];
}
#[cfg(not(any(target_os = "android", target_os = "ios")))]
{
let mut path = get_jan_extensions_path(app);
path.push("extensions.json");
log::info!("get jan extensions, path: {path:?}");
@ -49,5 +58,6 @@ pub fn get_active_extensions<R: Runtime>(app: AppHandle<R>) -> Vec<serde_json::V
vec![]
}
};
contents
return contents;
}
}

View File

@ -7,7 +7,7 @@ use std::{
};
use tar::Archive;
use tauri::{
App, Emitter, Manager, Runtime, Wry
App, Emitter, Manager, Runtime, Wry, WindowEvent
};
#[cfg(desktop)]
@ -24,6 +24,13 @@ use super::{
};
pub fn install_extensions<R: Runtime>(app: tauri::AppHandle<R>, force: bool) -> Result<(), String> {
// Skip extension installation on mobile platforms
// Mobile uses pre-bundled extensions loaded via MobileCoreService in the frontend
#[cfg(any(target_os = "android", target_os = "ios"))]
{
return Ok(());
}
let extensions_path = get_jan_extensions_path(app.clone());
let pre_install_path = app
.path()
@ -263,3 +270,32 @@ pub fn setup_tray(app: &App) -> tauri::Result<TrayIcon> {
})
.build(app)
}
pub fn setup_theme_listener<R: Runtime>(app: &App<R>) -> tauri::Result<()> {
// Setup theme listener for main window
if let Some(window) = app.get_webview_window("main") {
setup_window_theme_listener(app.handle().clone(), window);
}
Ok(())
}
fn setup_window_theme_listener<R: Runtime>(
app_handle: tauri::AppHandle<R>,
window: tauri::WebviewWindow<R>,
) {
let window_label = window.label().to_string();
let app_handle_clone = app_handle.clone();
window.on_window_event(move |event| {
if let WindowEvent::ThemeChanged(theme) = event {
let theme_str = match theme {
tauri::Theme::Light => "light",
tauri::Theme::Dark => "dark",
_ => "auto",
};
log::info!("System theme changed to: {} for window: {}", theme_str, window_label);
let _ = app_handle_clone.emit("theme-changed", theme_str);
}
});
}

View File

@ -117,3 +117,4 @@ pub fn is_library_available(library: &str) -> bool {
}
}
}

View File

@ -3,8 +3,11 @@ use std::io::Write;
use tauri::Runtime;
use uuid::Uuid;
#[cfg(any(target_os = "android", target_os = "ios"))]
use super::db;
use super::helpers::{
get_lock_for_thread, read_messages_from_file, update_thread_metadata, write_messages_to_file,
get_lock_for_thread, read_messages_from_file, should_use_sqlite, update_thread_metadata,
write_messages_to_file,
};
use super::{
constants::THREADS_FILE,
@ -14,12 +17,19 @@ use super::{
},
};
/// Lists all threads by reading their metadata from the threads directory.
/// Lists all threads by reading their metadata from the threads directory or database.
/// Returns a vector of thread metadata as JSON values.
#[tauri::command]
pub async fn list_threads<R: Runtime>(
app_handle: tauri::AppHandle<R>,
) -> Result<Vec<serde_json::Value>, String> {
if should_use_sqlite() {
// Use SQLite on mobile platforms
#[cfg(any(target_os = "android", target_os = "ios"))]
return db::db_list_threads(app_handle).await;
}
// Use file-based storage on desktop
ensure_data_dirs(app_handle.clone())?;
let data_dir = get_data_dir(app_handle.clone());
let mut threads = Vec::new();
@ -56,6 +66,12 @@ pub async fn create_thread<R: Runtime>(
app_handle: tauri::AppHandle<R>,
mut thread: serde_json::Value,
) -> Result<serde_json::Value, String> {
if should_use_sqlite() {
#[cfg(any(target_os = "android", target_os = "ios"))]
return db::db_create_thread(app_handle, thread).await;
}
// Use file-based storage on desktop
ensure_data_dirs(app_handle.clone())?;
let uuid = Uuid::new_v4().to_string();
thread["id"] = serde_json::Value::String(uuid.clone());
@ -76,6 +92,12 @@ pub async fn modify_thread<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread: serde_json::Value,
) -> Result<(), String> {
if should_use_sqlite() {
#[cfg(any(target_os = "android", target_os = "ios"))]
return db::db_modify_thread(app_handle, thread).await;
}
// Use file-based storage on desktop
let thread_id = thread
.get("id")
.and_then(|id| id.as_str())
@ -96,6 +118,12 @@ pub async fn delete_thread<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread_id: String,
) -> Result<(), String> {
if should_use_sqlite() {
#[cfg(any(target_os = "android", target_os = "ios"))]
return db::db_delete_thread(app_handle, &thread_id).await;
}
// Use file-based storage on desktop
let thread_dir = get_thread_dir(app_handle.clone(), &thread_id);
if thread_dir.exists() {
let _ = fs::remove_dir_all(thread_dir);
@ -110,6 +138,12 @@ pub async fn list_messages<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread_id: String,
) -> Result<Vec<serde_json::Value>, String> {
if should_use_sqlite() {
#[cfg(any(target_os = "android", target_os = "ios"))]
return db::db_list_messages(app_handle, &thread_id).await;
}
// Use file-based storage on desktop
read_messages_from_file(app_handle, &thread_id)
}
@ -120,6 +154,12 @@ pub async fn create_message<R: Runtime>(
app_handle: tauri::AppHandle<R>,
mut message: serde_json::Value,
) -> Result<serde_json::Value, String> {
if should_use_sqlite() {
#[cfg(any(target_os = "android", target_os = "ios"))]
return db::db_create_message(app_handle, message).await;
}
// Use file-based storage on desktop
let thread_id = {
let id = message
.get("thread_id")
@ -166,6 +206,12 @@ pub async fn modify_message<R: Runtime>(
app_handle: tauri::AppHandle<R>,
message: serde_json::Value,
) -> Result<serde_json::Value, String> {
if should_use_sqlite() {
#[cfg(any(target_os = "android", target_os = "ios"))]
return db::db_modify_message(app_handle, message).await;
}
// Use file-based storage on desktop
let thread_id = message
.get("thread_id")
.and_then(|v| v.as_str())
@ -204,6 +250,12 @@ pub async fn delete_message<R: Runtime>(
thread_id: String,
message_id: String,
) -> Result<(), String> {
if should_use_sqlite() {
#[cfg(any(target_os = "android", target_os = "ios"))]
return db::db_delete_message(app_handle, &thread_id, &message_id).await;
}
// Use file-based storage on desktop
// Acquire per-thread lock before modifying
{
let lock = get_lock_for_thread(&thread_id).await;
@ -227,6 +279,12 @@ pub async fn get_thread_assistant<R: Runtime>(
app_handle: tauri::AppHandle<R>,
thread_id: String,
) -> Result<serde_json::Value, String> {
if should_use_sqlite() {
#[cfg(any(target_os = "android", target_os = "ios"))]
return db::db_get_thread_assistant(app_handle, &thread_id).await;
}
// Use file-based storage on desktop
let path = get_thread_metadata_path(app_handle, &thread_id);
if !path.exists() {
return Err("Thread not found".to_string());
@ -252,6 +310,12 @@ pub async fn create_thread_assistant<R: Runtime>(
thread_id: String,
assistant: serde_json::Value,
) -> Result<serde_json::Value, String> {
if should_use_sqlite() {
#[cfg(any(target_os = "android", target_os = "ios"))]
return db::db_create_thread_assistant(app_handle, &thread_id, assistant).await;
}
// Use file-based storage on desktop
let path = get_thread_metadata_path(app_handle.clone(), &thread_id);
if !path.exists() {
return Err("Thread not found".to_string());
@ -277,6 +341,12 @@ pub async fn modify_thread_assistant<R: Runtime>(
thread_id: String,
assistant: serde_json::Value,
) -> Result<serde_json::Value, String> {
if should_use_sqlite() {
#[cfg(any(target_os = "android", target_os = "ios"))]
return db::db_modify_thread_assistant(app_handle, &thread_id, assistant).await;
}
// Use file-based storage on desktop
let path = get_thread_metadata_path(app_handle.clone(), &thread_id);
if !path.exists() {
return Err("Thread not found".to_string());

View File

@ -0,0 +1,397 @@
/*!
SQLite Database Module for Mobile Thread Storage
This module provides SQLite-based storage for threads and messages on mobile platforms.
It ensures data persistence and retrieval work correctly on Android and iOS devices.
Note: This module is only compiled and used on mobile platforms (Android/iOS).
On desktop, the file-based storage in helpers.rs is used instead.
*/
#![allow(dead_code)] // Functions only used on mobile platforms
use serde_json::Value;
use sqlx::sqlite::{SqliteConnectOptions, SqlitePool, SqlitePoolOptions};
use sqlx::Row;
use std::str::FromStr;
use std::sync::OnceLock;
use tauri::{AppHandle, Manager, Runtime};
use tokio::sync::Mutex;
const DB_NAME: &str = "jan.db";
/// Global database pool for mobile platforms
static DB_POOL: OnceLock<Mutex<Option<SqlitePool>>> = OnceLock::new();
/// Initialize database with connection pool and run migrations
pub async fn init_database<R: Runtime>(app: &AppHandle<R>) -> Result<(), String> {
// Get app data directory
let app_data_dir = app
.path()
.app_data_dir()
.map_err(|e| format!("Failed to get app data dir: {}", e))?;
// Ensure directory exists
std::fs::create_dir_all(&app_data_dir)
.map_err(|e| format!("Failed to create app data dir: {}", e))?;
// Create database path
let db_path = app_data_dir.join(DB_NAME);
let db_url = format!("sqlite:{}", db_path.display());
log::info!("Initializing SQLite database at: {}", db_url);
// Create connection options
let connect_options = SqliteConnectOptions::from_str(&db_url)
.map_err(|e| format!("Failed to parse connection options: {}", e))?
.create_if_missing(true);
// Create connection pool
let pool = SqlitePoolOptions::new()
.max_connections(5)
.connect_with(connect_options)
.await
.map_err(|e| format!("Failed to create connection pool: {}", e))?;
// Run migrations
sqlx::query(
r#"
CREATE TABLE IF NOT EXISTS threads (
id TEXT PRIMARY KEY,
data TEXT NOT NULL,
created_at INTEGER DEFAULT (strftime('%s', 'now')),
updated_at INTEGER DEFAULT (strftime('%s', 'now'))
);
"#,
)
.execute(&pool)
.await
.map_err(|e| format!("Failed to create threads table: {}", e))?;
sqlx::query(
r#"
CREATE TABLE IF NOT EXISTS messages (
id TEXT PRIMARY KEY,
thread_id TEXT NOT NULL,
data TEXT NOT NULL,
created_at INTEGER DEFAULT (strftime('%s', 'now')),
FOREIGN KEY (thread_id) REFERENCES threads(id) ON DELETE CASCADE
);
"#,
)
.execute(&pool)
.await
.map_err(|e| format!("Failed to create messages table: {}", e))?;
// Create indexes
sqlx::query(
"CREATE INDEX IF NOT EXISTS idx_messages_thread_id ON messages(thread_id);",
)
.execute(&pool)
.await
.map_err(|e| format!("Failed to create thread_id index: {}", e))?;
sqlx::query(
"CREATE INDEX IF NOT EXISTS idx_messages_created_at ON messages(created_at);",
)
.execute(&pool)
.await
.map_err(|e| format!("Failed to create created_at index: {}", e))?;
// Store pool globally
DB_POOL
.get_or_init(|| Mutex::new(None))
.lock()
.await
.replace(pool);
log::info!("SQLite database initialized successfully for mobile platform");
Ok(())
}
/// Get database pool
async fn get_pool() -> Result<SqlitePool, String> {
let pool_mutex = DB_POOL
.get()
.ok_or("Database not initialized")?;
let pool_guard = pool_mutex.lock().await;
pool_guard
.clone()
.ok_or("Database pool not available".to_string())
}
/// List all threads from database
pub async fn db_list_threads<R: Runtime>(
_app_handle: AppHandle<R>,
) -> Result<Vec<Value>, String> {
let pool = get_pool().await?;
let rows = sqlx::query("SELECT data FROM threads ORDER BY updated_at DESC")
.fetch_all(&pool)
.await
.map_err(|e| format!("Failed to list threads: {}", e))?;
let threads: Result<Vec<Value>, _> = rows
.iter()
.map(|row| {
let data: String = row.get("data");
serde_json::from_str(&data).map_err(|e| e.to_string())
})
.collect();
threads
}
/// Create a new thread in database
pub async fn db_create_thread<R: Runtime>(
_app_handle: AppHandle<R>,
thread: Value,
) -> Result<Value, String> {
let pool = get_pool().await?;
let thread_id = thread
.get("id")
.and_then(|v| v.as_str())
.ok_or("Missing thread id")?;
let data = serde_json::to_string(&thread).map_err(|e| e.to_string())?;
sqlx::query("INSERT INTO threads (id, data) VALUES (?1, ?2)")
.bind(thread_id)
.bind(&data)
.execute(&pool)
.await
.map_err(|e| format!("Failed to create thread: {}", e))?;
Ok(thread)
}
/// Modify an existing thread in database
pub async fn db_modify_thread<R: Runtime>(
_app_handle: AppHandle<R>,
thread: Value,
) -> Result<(), String> {
let pool = get_pool().await?;
let thread_id = thread
.get("id")
.and_then(|v| v.as_str())
.ok_or("Missing thread id")?;
let data = serde_json::to_string(&thread).map_err(|e| e.to_string())?;
sqlx::query("UPDATE threads SET data = ?1, updated_at = strftime('%s', 'now') WHERE id = ?2")
.bind(&data)
.bind(thread_id)
.execute(&pool)
.await
.map_err(|e| format!("Failed to modify thread: {}", e))?;
Ok(())
}
/// Delete a thread from database
pub async fn db_delete_thread<R: Runtime>(
_app_handle: AppHandle<R>,
thread_id: &str,
) -> Result<(), String> {
let pool = get_pool().await?;
// Messages will be auto-deleted via CASCADE
sqlx::query("DELETE FROM threads WHERE id = ?1")
.bind(thread_id)
.execute(&pool)
.await
.map_err(|e| format!("Failed to delete thread: {}", e))?;
Ok(())
}
/// List all messages for a thread from database
pub async fn db_list_messages<R: Runtime>(
_app_handle: AppHandle<R>,
thread_id: &str,
) -> Result<Vec<Value>, String> {
let pool = get_pool().await?;
let rows = sqlx::query(
"SELECT data FROM messages WHERE thread_id = ?1 ORDER BY created_at ASC",
)
.bind(thread_id)
.fetch_all(&pool)
.await
.map_err(|e| format!("Failed to list messages: {}", e))?;
let messages: Result<Vec<Value>, _> = rows
.iter()
.map(|row| {
let data: String = row.get("data");
serde_json::from_str(&data).map_err(|e| e.to_string())
})
.collect();
messages
}
/// Create a new message in database
pub async fn db_create_message<R: Runtime>(
_app_handle: AppHandle<R>,
message: Value,
) -> Result<Value, String> {
let pool = get_pool().await?;
let message_id = message
.get("id")
.and_then(|v| v.as_str())
.ok_or("Missing message id")?;
let thread_id = message
.get("thread_id")
.and_then(|v| v.as_str())
.ok_or("Missing thread_id")?;
let data = serde_json::to_string(&message).map_err(|e| e.to_string())?;
sqlx::query("INSERT INTO messages (id, thread_id, data) VALUES (?1, ?2, ?3)")
.bind(message_id)
.bind(thread_id)
.bind(&data)
.execute(&pool)
.await
.map_err(|e| format!("Failed to create message: {}", e))?;
Ok(message)
}
/// Modify an existing message in database
pub async fn db_modify_message<R: Runtime>(
_app_handle: AppHandle<R>,
message: Value,
) -> Result<Value, String> {
let pool = get_pool().await?;
let message_id = message
.get("id")
.and_then(|v| v.as_str())
.ok_or("Missing message id")?;
let data = serde_json::to_string(&message).map_err(|e| e.to_string())?;
sqlx::query("UPDATE messages SET data = ?1 WHERE id = ?2")
.bind(&data)
.bind(message_id)
.execute(&pool)
.await
.map_err(|e| format!("Failed to modify message: {}", e))?;
Ok(message)
}
/// Delete a message from database
pub async fn db_delete_message<R: Runtime>(
_app_handle: AppHandle<R>,
_thread_id: &str,
message_id: &str,
) -> Result<(), String> {
let pool = get_pool().await?;
sqlx::query("DELETE FROM messages WHERE id = ?1")
.bind(message_id)
.execute(&pool)
.await
.map_err(|e| format!("Failed to delete message: {}", e))?;
Ok(())
}
/// Get thread assistant information from thread metadata
pub async fn db_get_thread_assistant<R: Runtime>(
_app_handle: AppHandle<R>,
thread_id: &str,
) -> Result<Value, String> {
let pool = get_pool().await?;
let row = sqlx::query("SELECT data FROM threads WHERE id = ?1")
.bind(thread_id)
.fetch_optional(&pool)
.await
.map_err(|e| format!("Failed to get thread: {}", e))?
.ok_or("Thread not found")?;
let data: String = row.get("data");
let thread: Value = serde_json::from_str(&data).map_err(|e| e.to_string())?;
if let Some(assistants) = thread.get("assistants").and_then(|a| a.as_array()) {
assistants
.first()
.cloned()
.ok_or("Assistant not found".to_string())
} else {
Err("Assistant not found".to_string())
}
}
/// Create thread assistant in database
pub async fn db_create_thread_assistant<R: Runtime>(
app_handle: AppHandle<R>,
thread_id: &str,
assistant: Value,
) -> Result<Value, String> {
let pool = get_pool().await?;
let row = sqlx::query("SELECT data FROM threads WHERE id = ?1")
.bind(thread_id)
.fetch_optional(&pool)
.await
.map_err(|e| format!("Failed to get thread: {}", e))?
.ok_or("Thread not found")?;
let data: String = row.get("data");
let mut thread: Value = serde_json::from_str(&data).map_err(|e| e.to_string())?;
if let Some(assistants) = thread.get_mut("assistants").and_then(|a| a.as_array_mut()) {
assistants.push(assistant.clone());
} else {
thread["assistants"] = Value::Array(vec![assistant.clone()]);
}
db_modify_thread(app_handle, thread).await?;
Ok(assistant)
}
/// Modify thread assistant in database
pub async fn db_modify_thread_assistant<R: Runtime>(
app_handle: AppHandle<R>,
thread_id: &str,
assistant: Value,
) -> Result<Value, String> {
let pool = get_pool().await?;
let row = sqlx::query("SELECT data FROM threads WHERE id = ?1")
.bind(thread_id)
.fetch_optional(&pool)
.await
.map_err(|e| format!("Failed to get thread: {}", e))?
.ok_or("Thread not found")?;
let data: String = row.get("data");
let mut thread: Value = serde_json::from_str(&data).map_err(|e| e.to_string())?;
let assistant_id = assistant
.get("id")
.and_then(|v| v.as_str())
.ok_or("Missing assistant id")?;
if let Some(assistants) = thread.get_mut("assistants").and_then(|a| a.as_array_mut()) {
if let Some(index) = assistants
.iter()
.position(|a| a.get("id").and_then(|v| v.as_str()) == Some(assistant_id))
{
assistants[index] = assistant.clone();
db_modify_thread(app_handle, thread).await?;
}
}
Ok(assistant)
}

View File

@ -13,6 +13,11 @@ use super::utils::{get_messages_path, get_thread_metadata_path};
// Global per-thread locks for message file writes
pub static MESSAGE_LOCKS: OnceLock<Mutex<HashMap<String, Arc<Mutex<()>>>>> = OnceLock::new();
/// Check if the platform should use SQLite (mobile platforms)
pub fn should_use_sqlite() -> bool {
cfg!(any(target_os = "android", target_os = "ios"))
}
/// Get a lock for a specific thread to ensure thread-safe message file operations
pub async fn get_lock_for_thread(thread_id: &str) -> Arc<Mutex<()>> {
let locks = MESSAGE_LOCKS.get_or_init(|| Mutex::new(HashMap::new()));

View File

@ -12,6 +12,8 @@
pub mod commands;
mod constants;
#[cfg(any(target_os = "android", target_os = "ios"))]
pub mod db;
pub mod helpers;
pub mod utils;

View File

@ -1,5 +1,7 @@
use super::commands::*;
use super::helpers::should_use_sqlite;
use futures_util::future;
use serde_json::json;
use std::fs;
use std::path::PathBuf;
@ -23,6 +25,32 @@ fn mock_app_with_temp_data_dir() -> (tauri::App<MockRuntime>, PathBuf) {
(app, data_dir)
}
// Helper to create a basic thread
fn create_test_thread(title: &str) -> serde_json::Value {
json!({
"object": "thread",
"title": title,
"assistants": [],
"created": 123,
"updated": 123,
"metadata": null
})
}
// Helper to create a basic message
fn create_test_message(thread_id: &str, content_text: &str) -> serde_json::Value {
json!({
"object": "message",
"thread_id": thread_id,
"role": "user",
"content": [{"type": "text", "text": content_text}],
"status": "sent",
"created_at": 123,
"completed_at": 123,
"metadata": null
})
}
#[tokio::test]
async fn test_create_and_list_threads() {
let (app, data_dir) = mock_app_with_temp_data_dir();
@ -137,3 +165,314 @@ async fn test_create_and_get_thread_assistant() {
// Clean up
let _ = fs::remove_dir_all(data_dir);
}
#[test]
fn test_should_use_sqlite_platform_detection() {
// Test that should_use_sqlite returns correct value based on platform
// On desktop platforms (macOS, Linux, Windows), it should return false
// On mobile platforms (Android, iOS), it should return true
#[cfg(any(target_os = "android", target_os = "ios"))]
{
assert!(should_use_sqlite(), "should_use_sqlite should return true on mobile platforms");
}
#[cfg(not(any(target_os = "android", target_os = "ios")))]
{
assert!(!should_use_sqlite(), "should_use_sqlite should return false on desktop platforms");
}
}
#[tokio::test]
async fn test_desktop_storage_backend() {
// This test verifies that on desktop platforms, the file-based storage is used
#[cfg(not(any(target_os = "android", target_os = "ios")))]
{
let (app, _data_dir) = mock_app_with_temp_data_dir();
// Create a thread
let thread = json!({
"object": "thread",
"title": "Desktop Test Thread",
"assistants": [],
"created": 1234567890,
"updated": 1234567890,
"metadata": null
});
let created = create_thread(app.handle().clone(), thread.clone())
.await
.unwrap();
let thread_id = created["id"].as_str().unwrap().to_string();
// Verify we can retrieve the thread (which proves file storage works)
let threads = list_threads(app.handle().clone()).await.unwrap();
let found = threads.iter().any(|t| t["id"] == thread_id);
assert!(found, "Thread should be retrievable from file-based storage");
// Create a message
let message = json!({
"object": "message",
"thread_id": thread_id,
"role": "user",
"content": [],
"status": "sent",
"created_at": 123,
"completed_at": 123,
"metadata": null
});
let _created_msg = create_message(app.handle().clone(), message).await.unwrap();
// Verify we can retrieve the message (which proves file storage works)
let messages = list_messages(app.handle().clone(), thread_id.clone())
.await
.unwrap();
assert_eq!(messages.len(), 1, "Message should be retrievable from file-based storage");
// Clean up - get the actual data directory used by the app
use super::utils::get_data_dir;
let actual_data_dir = get_data_dir(app.handle().clone());
let _ = fs::remove_dir_all(actual_data_dir);
}
}
#[tokio::test]
async fn test_modify_and_delete_thread() {
let (app, data_dir) = mock_app_with_temp_data_dir();
// Create a thread
let thread = json!({
"object": "thread",
"title": "Original Title",
"assistants": [],
"created": 1234567890,
"updated": 1234567890,
"metadata": null
});
let created = create_thread(app.handle().clone(), thread.clone())
.await
.unwrap();
let thread_id = created["id"].as_str().unwrap().to_string();
// Modify the thread
let mut modified_thread = created.clone();
modified_thread["title"] = json!("Modified Title");
modify_thread(app.handle().clone(), modified_thread.clone())
.await
.unwrap();
// Verify modification by listing threads
let threads = list_threads(app.handle().clone()).await.unwrap();
let found_thread = threads.iter().find(|t| t["id"] == thread_id);
assert!(found_thread.is_some(), "Modified thread should exist");
assert_eq!(found_thread.unwrap()["title"], "Modified Title");
// Delete the thread
delete_thread(app.handle().clone(), thread_id.clone())
.await
.unwrap();
// Verify deletion
#[cfg(not(any(target_os = "android", target_os = "ios")))]
{
let thread_dir = data_dir.join(&thread_id);
assert!(!thread_dir.exists(), "Thread directory should be deleted");
}
// Clean up
let _ = fs::remove_dir_all(data_dir);
}
#[tokio::test]
async fn test_modify_and_delete_message() {
let (app, data_dir) = mock_app_with_temp_data_dir();
// Create a thread
let thread = json!({
"object": "thread",
"title": "Message Test Thread",
"assistants": [],
"created": 123,
"updated": 123,
"metadata": null
});
let created = create_thread(app.handle().clone(), thread.clone())
.await
.unwrap();
let thread_id = created["id"].as_str().unwrap().to_string();
// Create a message
let message = json!({
"object": "message",
"thread_id": thread_id,
"role": "user",
"content": [{"type": "text", "text": "Original content"}],
"status": "sent",
"created_at": 123,
"completed_at": 123,
"metadata": null
});
let created_msg = create_message(app.handle().clone(), message).await.unwrap();
let message_id = created_msg["id"].as_str().unwrap().to_string();
// Modify the message
let mut modified_msg = created_msg.clone();
modified_msg["content"] = json!([{"type": "text", "text": "Modified content"}]);
modify_message(app.handle().clone(), modified_msg.clone())
.await
.unwrap();
// Verify modification
let messages = list_messages(app.handle().clone(), thread_id.clone())
.await
.unwrap();
assert_eq!(messages.len(), 1);
assert_eq!(messages[0]["content"][0]["text"], "Modified content");
// Delete the message
delete_message(app.handle().clone(), thread_id.clone(), message_id.clone())
.await
.unwrap();
// Verify deletion
let messages = list_messages(app.handle().clone(), thread_id.clone())
.await
.unwrap();
assert_eq!(messages.len(), 0, "Message should be deleted");
// Clean up
let _ = fs::remove_dir_all(data_dir);
}
#[tokio::test]
async fn test_modify_thread_assistant() {
let (app, data_dir) = mock_app_with_temp_data_dir();
let app_handle = app.handle().clone();
let created = create_thread(app_handle.clone(), create_test_thread("Assistant Mod Thread"))
.await
.unwrap();
let thread_id = created["id"].as_str().unwrap();
let assistant = json!({
"id": "assistant-1",
"assistant_name": "Original Assistant",
"model": {"id": "model-1", "name": "Test Model"}
});
create_thread_assistant(app_handle.clone(), thread_id.to_string(), assistant.clone())
.await
.unwrap();
let mut modified_assistant = assistant;
modified_assistant["assistant_name"] = json!("Modified Assistant");
modify_thread_assistant(app_handle.clone(), thread_id.to_string(), modified_assistant)
.await
.unwrap();
let retrieved = get_thread_assistant(app_handle, thread_id.to_string())
.await
.unwrap();
assert_eq!(retrieved["assistant_name"], "Modified Assistant");
let _ = fs::remove_dir_all(data_dir);
}
#[tokio::test]
async fn test_thread_not_found_errors() {
let (app, data_dir) = mock_app_with_temp_data_dir();
let app_handle = app.handle().clone();
let fake_thread_id = "non-existent-thread-id".to_string();
let assistant = json!({"id": "assistant-1", "assistant_name": "Test Assistant"});
assert!(get_thread_assistant(app_handle.clone(), fake_thread_id.clone()).await.is_err());
assert!(create_thread_assistant(app_handle.clone(), fake_thread_id.clone(), assistant.clone()).await.is_err());
assert!(modify_thread_assistant(app_handle, fake_thread_id, assistant).await.is_err());
let _ = fs::remove_dir_all(data_dir);
}
#[tokio::test]
async fn test_message_without_id_gets_generated() {
let (app, data_dir) = mock_app_with_temp_data_dir();
let app_handle = app.handle().clone();
let created = create_thread(app_handle.clone(), create_test_thread("Message ID Test"))
.await
.unwrap();
let thread_id = created["id"].as_str().unwrap();
let message = json!({"object": "message", "thread_id": thread_id, "role": "user", "content": [], "status": "sent"});
let created_msg = create_message(app_handle, message).await.unwrap();
assert!(created_msg["id"].as_str().is_some_and(|id| !id.is_empty()));
let _ = fs::remove_dir_all(data_dir);
}
#[tokio::test]
async fn test_concurrent_message_operations() {
let (app, data_dir) = mock_app_with_temp_data_dir();
let app_handle = app.handle().clone();
let created = create_thread(app_handle.clone(), create_test_thread("Concurrent Test"))
.await
.unwrap();
let thread_id = created["id"].as_str().unwrap().to_string();
let handles: Vec<_> = (0..5)
.map(|i| {
let app_h = app_handle.clone();
let tid = thread_id.clone();
tokio::spawn(async move {
create_message(app_h, create_test_message(&tid, &format!("Message {}", i))).await
})
})
.collect();
let results = future::join_all(handles).await;
assert!(results.iter().all(|r| r.is_ok() && r.as_ref().unwrap().is_ok()));
let messages = list_messages(app_handle, thread_id).await.unwrap();
assert_eq!(messages.len(), 5);
let _ = fs::remove_dir_all(data_dir);
}
#[tokio::test]
async fn test_empty_thread_list() {
let (app, data_dir) = mock_app_with_temp_data_dir();
// Clean up any leftover test data
let test_data_threads = std::env::current_dir()
.unwrap_or_else(|_| PathBuf::from("."))
.join("test-data")
.join("threads");
let _ = fs::remove_dir_all(&test_data_threads);
let threads = list_threads(app.handle().clone()).await.unwrap();
assert_eq!(threads.len(), 0);
let _ = fs::remove_dir_all(data_dir);
}
#[tokio::test]
async fn test_empty_message_list() {
let (app, data_dir) = mock_app_with_temp_data_dir();
let app_handle = app.handle().clone();
let created = create_thread(app_handle.clone(), create_test_thread("Empty Messages Test"))
.await
.unwrap();
let thread_id = created["id"].as_str().unwrap();
let messages = list_messages(app_handle, thread_id.to_string()).await.unwrap();
assert_eq!(messages.len(), 0);
let _ = fs::remove_dir_all(data_dir);
}

View File

@ -182,7 +182,20 @@ pub fn run() {
use tauri_plugin_deep_link::DeepLinkExt;
app.deep_link().register_all()?;
}
// Initialize SQLite database for mobile platforms
#[cfg(any(target_os = "android", target_os = "ios"))]
{
let app_handle = app.handle().clone();
tauri::async_runtime::spawn(async move {
if let Err(e) = crate::core::threads::db::init_database(&app_handle).await {
log::error!("Failed to initialize mobile database: {}", e);
}
});
}
setup_mcp(app);
setup::setup_theme_listener(app)?;
Ok(())
})
.build(tauri::generate_context!())

View File

@ -2,7 +2,9 @@
"identifier": "jan.ai.app",
"build": {
"devUrl": null,
"frontendDist": "../web-app/dist"
"frontendDist": "../web-app/dist",
"beforeDevCommand": "cross-env IS_DEV=true IS_ANDROID=true yarn build:web",
"beforeBuildCommand": "cross-env IS_ANDROID=true yarn build:web"
},
"app": {
"security": {
@ -11,7 +13,11 @@
},
"plugins": {},
"bundle": {
"resources": ["resources/LICENSE"],
"active": true,
"resources": [
"resources/pre-install/**/*",
"resources/LICENSE"
],
"externalBin": [],
"android": {
"minSdkVersion": 24

File diff suppressed because it is too large Load Diff

View File

@ -40,7 +40,7 @@
}
],
"security": {
"capabilities": ["default"],
"capabilities": ["default", "logs-app-window", "logs-window", "system-monitor-window"],
"csp": {
"default-src": "'self' customprotocol: asset: http://localhost:* http://127.0.0.1:* ws://localhost:* ws://127.0.0.1:*",
"connect-src": "ipc: http://ipc.localhost http://127.0.0.1:* ws://localhost:* ws://127.0.0.1:* https: http:",

View File

@ -1,9 +1,11 @@
{
"identifier": "jan.ai.app.ios",
"build": {
"devUrl": null,
"frontendDist": "../web-app/dist"
"frontendDist": "../web-app/dist",
"beforeDevCommand": "cross-env IS_DEV=true IS_IOS=true yarn build:web",
"beforeBuildCommand": "cross-env IS_IOS=true yarn build:web"
},
"identifier": "jan.ai.app",
"app": {
"security": {
"capabilities": ["mobile"]
@ -15,7 +17,10 @@
"iOS": {
"developmentTeam": "<DEVELOPMENT_TEAM_ID>"
},
"resources": ["resources/LICENSE"],
"resources": [
"resources/pre-install/**/*",
"resources/LICENSE"
],
"externalBin": []
}
}

View File

@ -1,7 +1,12 @@
{
"app": {
"security": {
"capabilities": ["desktop", "system-monitor-window"]
"capabilities": [
"desktop",
"system-monitor-window",
"log-app-window",
"logs-window"
]
}
},
"bundle": {

View File

@ -1,7 +1,12 @@
{
"app": {
"security": {
"capabilities": ["desktop", "system-monitor-window"]
"capabilities": [
"desktop",
"system-monitor-window",
"log-app-window",
"logs-window"
]
}
},
"bundle": {

View File

@ -1,23 +1,23 @@
{
"app": {
"security": {
"capabilities": ["desktop"]
"capabilities": [
"desktop",
"system-monitor-window",
"log-app-window",
"logs-window"
]
}
},
"bundle": {
"targets": ["nsis"],
"targets": ["nsis", "msi"],
"resources": [
"resources/pre-install/**/*",
"resources/lib/vc_redist.x64.exe",
"resources/LICENSE"
],
"externalBin": ["resources/bin/bun", "resources/bin/uv"],
"windows": {
"nsis": {
"installerHooks": "./windows/hooks.nsh",
"installerIcon": "icons/icon.ico"
},
"webviewInstallMode": {
"silent": true,
"type": "downloadBootstrapper"

View File

@ -8,7 +8,6 @@ base64 = "0.22"
hmac = "0.12"
log = { version = "0.4", optional = true }
rand = "0.8"
reqwest = { version = "0.11", features = ["json"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
sha2 = "0.10"
@ -16,6 +15,12 @@ tokio = { version = "1", features = ["process", "fs", "macros", "rt"] }
tokio-util = "0.7.14"
url = "2.5"
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies]
reqwest = { version = "0.11", features = ["json", "native-tls"] }
[target.'cfg(any(target_os = "android", target_os = "ios"))'.dependencies]
reqwest = { version = "0.11", features = ["json", "rustls-tls"], default-features = false }
[target.'cfg(windows)'.dependencies]
windows-sys = { version = "0.60.2", features = ["Win32_Storage_FileSystem"] }

View File

@ -1,65 +0,0 @@
!macro NSIS_HOOK_POSTINSTALL
; Check if Visual C++ Redistributable is already installed
ReadRegStr $0 HKLM "SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" "Version"
${If} $0 == ""
; Try alternative registry location
ReadRegStr $0 HKLM "SOFTWARE\WOW6432Node\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" "Version"
${EndIf}
${If} $0 == ""
; VC++ Redistributable not found, need to install
DetailPrint "Visual C++ Redistributable not found, installing from bundled file..."
; Install from bundled EXE if not installed
${If} ${FileExists} "$INSTDIR\resources\lib\vc_redist.x64.exe"
DetailPrint "Installing Visual C++ Redistributable..."
; Copy to TEMP folder and then execute installer
CopyFiles "$INSTDIR\resources\lib\vc_redist.x64.exe" "$TEMP\vc_redist.x64.exe"
ExecWait '"$TEMP\vc_redist.x64.exe" /quiet /norestart' $1
; Check whether installation process exited successfully (code 0) or not
${If} $1 == 0
DetailPrint "Visual C++ Redistributable installed successfully"
${ElseIf} $1 == 1638
DetailPrint "Visual C++ Redistributable already installed (newer version)"
${ElseIf} $1 == 3010
DetailPrint "Visual C++ Redistributable installed successfully (restart required)"
${Else}
DetailPrint "Visual C++ installation failed with exit code: $1"
${EndIf}
; Clean up setup files from TEMP and your installed app
Delete "$TEMP\vc_redist.x64.exe"
Delete "$INSTDIR\resources\lib\vc_redist.x64.exe"
${Else}
DetailPrint "Visual C++ Redistributable not found at expected location: $INSTDIR\resources\lib\vc_redist.x64.exe"
${EndIf}
${Else}
DetailPrint "Visual C++ Redistributable already installed (version: $0)"
${EndIf}
; ---- Copy LICENSE to install root ----
${If} ${FileExists} "$INSTDIR\resources\LICENSE"
CopyFiles /SILENT "$INSTDIR\resources\LICENSE" "$INSTDIR\LICENSE"
DetailPrint "Copied LICENSE to install root"
; Optional cleanup - remove from resources folder
Delete "$INSTDIR\resources\LICENSE"
${Else}
DetailPrint "LICENSE not found at expected location: $INSTDIR\resources\LICENSE"
${EndIf}
; ---- Copy vulkan-1.dll to install root ----
${If} ${FileExists} "$INSTDIR\resources\lib\vulkan-1.dll"
CopyFiles /SILENT "$INSTDIR\resources\lib\vulkan-1.dll" "$INSTDIR\vulkan-1.dll"
DetailPrint "Copied vulkan-1.dll to install root"
; Optional cleanup - remove from resources folder
Delete "$INSTDIR\resources\lib\vulkan-1.dll"
; Only remove the lib directory if it's empty after removing both files
RMDir "$INSTDIR\resources\lib"
${Else}
DetailPrint "vulkan-1.dll not found at expected location: $INSTDIR\resources\lib\vulkan-1.dll"
${EndIf}
!macroend

View File

@ -16,7 +16,7 @@ Before testing, set-up the following in the old version to make sure that we can
- [ ] Change the `App Data` to some other folder
- [ ] Create a Custom Provider
- [ ] Disable some model providers
- [NEW] Change llama.cpp setting of 2 models
- [ ] Change llama.cpp setting of 2 models
#### Validate that the update does not corrupt existing user data or settings (before and after update show the same information):
- [ ] Threads
- [ ] Previously used model and assistants is shown correctly
@ -73,35 +73,44 @@ Before testing, set-up the following in the old version to make sure that we can
- [ ] Ensure that when this value is changed, there is no broken UI caused by it
- [ ] Code Block
- [ ] Show Line Numbers
- [ENG] Ensure that when click on `Reset` in the `Appearance` section, it reset back to the default values
- [ENG] Ensure that when click on `Reset` in the `Code Block` section, it reset back to the default values
- [ ] [0.7.0] Compact Token Counter will show token counter in side chat input when toggle, if not it will show a small token counter below the chat input
- [ ] [ENG] Ensure that when click on `Reset` in the `Appearance` section, it reset back to the default values
- [ ] [ENG] Ensure that when click on `Reset` in the `Code Block` section, it reset back to the default values
#### In `Model Providers`:
In `Llama.cpp`:
- [ ] After downloading a model from hub, the model is listed with the correct name under `Models`
- [ ] Can import `gguf` model with no error
- [ ] [0.7.0] While importing, there should be an import indication appear under `Models`
- [ ] Imported model will be listed with correct name under the `Models`
- [ ] [0.6.9] Take a `gguf` file and delete the `.gguf` extensions from the file name, import it into Jan and verify that it works.
- [ ] [0.6.10] Can import vlm models and chat with images
- [ ] [0.6.10] Import a file that is not `mmproj` in the `mmproj field` should show validation error
- [ ] [0.6.10] Import `mmproj` from different models should error
- [ ] [0.7.0] Users can customize model display names according to their own preferences.
- [ ] Check that when click `delete` the model will be removed from the list
- [ ] Deleted model doesn't appear in the selectable models section in chat input (even in old threads that use the model previously)
- [ ] Ensure that user can re-import deleted imported models
- [ ] [0.6.8] Ensure that there is a recommended `llama.cpp` for each system and that it works out of the box for users.
- [ ] [0.6.10] Change to an older version of llama.cpp backend. Click on `Check for Llamacpp Updates` it should alert that there is a new version.
- [ ] [0.7.0] Users can cancel a backend download while it is in progress.
- [ ] [0.6.10] Try `Install backend from file` for a backend and it should show as an option for backend
- [ ] [0.7.0] User can install a backend from file in both .tar.gz and .zip formats, and the backend appears in the backend selection menu
- [ ] [0.7.0] A manually installed backend is automatically selected after import, and the backend menu updates to show it as the latest imported backend.
- [ ] Enable `Auto-Unload Old Models`, and ensure that only one model can run / start at a time. If there are two model running at the time of enable, both of them will be stopped.
- [ ] Disable `Auto-Unload Old Models`, and ensure that multiple models can run at the same time.
- [ ] Enable `Context Shift` and ensure that context can run for long without encountering memory error. Use the `banana test` by turn on fetch MCP => ask local model to fetch and summarize the history of banana (banana has a very long history on wiki it turns out). It should run out of context memory sufficiently fast if `Context Shift` is not enabled.
In `Model Settings`:
- [ ] [0.6.8] Ensure that user can change the Jinja chat template of individual model and it doesn't affect the template of other model
- [ ] [0.6.8] Ensure that there is a recommended `llama.cpp` for each system and that it works out of the box for users.
- [ ] [0.6.8] Ensure we can override Tensor Buffer Type in the model settings to offload layers between GPU and CPU => Download any MoE Model (i.e., gpt-oss-20b) => Set tensor buffer type as `blk\\.([0-30]*[02468])\\.ffn_.*_exps\\.=CPU` => check if those tensors are in cpu and run inference (you can view the app.log if it contains `--override-tensor", "blk\\\\.([0-30]*[02468])\\\\.ffn_.*_exps\\\\.=CPU`)
- [ ] [0.6.9] Take a `gguf` file and delete the `.gguf` extensions from the file name, import it into Jan and verify that it works.
- [ ] [0.6.10] Can import vlm models and chat with images
- [ ] [0.6.10] Import model on mmproj field should show validation error
- [ ] [0.6.10] Import mmproj from different models should not be able to chat with the models
- [ ] [0.6.10] Change to an older version of llama.cpp backend. Click on `Check for Llamacpp Updates` it should alert that there is a new version.
- [ ] [0.6.10] Try `Install backend from file` for a backend and it should show as an option for backend
In Remote Model Providers:
- [ ] Check that the following providers are presence:
- [ ] OpenAI
- [ ] Anthropic
- [ ] [0.7.0] Azure
- [ ] Cohere
- [ ] OpenRouter
- [ ] Mistral
@ -113,12 +122,15 @@ In Remote Model Providers:
- [ ] Delete a model and ensure that it doesn't show up in the `Models` list view or in the selectable dropdown in chat input.
- [ ] Ensure that a deleted model also not selectable or appear in old threads that used it.
- [ ] Adding of new model manually works and user can chat with the newly added model without error (you can add back the model you just delete for testing)
- [ ] [0.6.9] Make sure that Ollama set-up as a custom provider work with Jan
- [ ] [0.7.0] Vision capabilities are now automatically detected for vision models
- [ ] [0.7.0] New default models are available for adding to remote providers through a drop down (OpenAI, Mistral, Groq)
In Custom Providers:
- [ ] Ensure that user can create a new custom providers with the right baseURL and API key.
- [ ] Click `Refresh` should retrieve a list of available models from the Custom Providers.
- [ ] User can chat with the custom providers
- [ ] Ensure that Custom Providers can be deleted and won't reappear in a new session
- [ ] [0.6.9] Make sure that Ollama set-up as a custom provider work with Jan
In general:
- [ ] Disabled Model Provider should not show up as selectable in chat input of new thread and old thread alike (old threads' chat input should show `Select Model` instead of disabled model)
@ -162,9 +174,10 @@ Ensure that the following section information show up for hardware
- [ ] When the user click `Always Allow` on the pop up, the tool will retain permission and won't ask for confirmation again. (this applied at an individual tool level, not at the MCP server level)
- [ ] If `Allow All MCP Tool Permissions` is enabled, in every new thread, there should not be any confirmation dialog pop up when a tool is called.
- [ ] When the pop-up appear, make sure that the `Tool Parameters` is also shown with detail in the pop-up
- [ ] [0.6.9] Go to Enter JSON configuration when created a new MCp => paste the JSON config inside => click `Save` => server works
- [ ] [0.6.9] Go to Enter JSON configuration when created a new MCP => paste the JSON config inside => click `Save` => server works
- [ ] [0.6.9] If individual JSON config format is failed, the MCP server should not be activated
- [ ] [0.6.9] Make sure that MCP server can be used with streamable-http transport => connect to Smithery and test MCP server
- [ ] [0.7.0] When deleting an MCP Server, a toast notification is shown
#### In `Local API Server`:
- [ ] User can `Start Server` and chat with the default endpoint
@ -175,7 +188,8 @@ Ensure that the following section information show up for hardware
- [ ] [0.6.9] When the startup configuration, the last used model is also automatically start (users does not have to manually start a model before starting the server)
- [ ] [0.6.9] Make sure that you can send an image to a Local API Server and it also works (can set up Local API Server as a Custom Provider in Jan to test)
- [ ] [0.6.10] Make sure you are still able to see API key when server local status is running
- [ ] [0.7.0] Users can see the Jan API Server Swagger UI by opening the following path in their browser `http://<ip>:<port>`
- [ ] [0.7.0] Users can set the trusted host to * in the server configuration to accept requests from all host or without host
#### In `HTTPS Proxy`:
- [ ] Model download request goes through proxy endpoint
@ -188,6 +202,7 @@ Ensure that the following section information show up for hardware
- [ ] Clicking download work inside the Model card HTML
- [ ] [0.6.9] Check that the model recommendation base on user hardware work as expected in the Model Hub
- [ ] [0.6.10] Check that model of the same name but different author can be found in the Hub catalog (test with [https://huggingface.co/unsloth/Qwen3-4B-Thinking-2507-GGUF](https://huggingface.co/unsloth/Qwen3-4B-Thinking-2507-GGUF))
- [ ] [0.7.0] Support downloading models with the same name from different authors, models not listed on the hub will be prefixed with the author name
## D. Threads
@ -214,19 +229,30 @@ Ensure that the following section information show up for hardware
- [ ] User can send message with different type of text content (e.g text, emoji, ...)
- [ ] When request model to generate a markdown table, the table is correctly formatted as returned from the model.
- [ ] When model generate code, ensure that the code snippets is properly formatted according to the `Appearance -> Code Block` setting.
- [ ] [0.7.0] LaTeX formulas now render correctly in chat. Both inline \(...\) and block \[...\] formats are supported. Code blocks and HTML tags are not affected
- [ ] Users can edit their old message and user can regenerate the answer based on the new message
- [ ] User can click `Copy` to copy the model response
- [ ] [0.6.10] When click on copy code block from model generation, it will only copy one code-block at a time instead of multiple code block at once
- [ ] User can click `Delete` to delete either the user message or the model response.
- [ ] The token speed appear when a response from model is being generated and the final value is show under the response.
- [ ] Make sure that user when using IME keyboard to type Chinese and Japanese character and they press `Enter`, the `Send` button doesn't trigger automatically after each words.
- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a remote model
- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a local model
- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a Remote model & Local model
- [ ] [0.6.9] Check that you can paste an image to text box from your system clipboard (Copy - Paste)
- [ ] [0.6.9] Make sure that user can favourite a model in the llama.cpp list and see the favourite model selection in chat input
- [ ] [0.6.10] User can Paste (e.g Ctrl + v) text into chat input when it is a vision model
- [ ] [0.6.9] Make sure that user can favourite a model in the Model list and see the favourite model selection in chat input
- [ ] [0.6.10] User can click mode's setting on chat, enable Auto-Optimize Settings, and continue chatting with the model without interruption.
- [ ] Verify this works with at least two models of different sizes (e.g., 1B and 7B).
- [ ] [0.6.10] User can Paste (e.g Ctrl + v) text into chat input when it is a vision model
- [ ] [0.6.10] When click on copy code block from model generation, it will only copy one code-block at a time instead of multiple code block at once
- [ ] [0.7.0] When chatting with a model, the UI displays a token usage counter showing the percentage of context consumed.
- [ ] [0.7.0] When chatting with a model, the scroll no longer follows the models streaming response; it only auto-scrolls when the user sends a new message
#### In Project
- [ ] [0.7.0] User can create new project
- [ ] [0.7.0] User can add existing threads to a project
- [ ] [0.7.0] When the user attempts to delete a project, a confirmation dialog must appear warning that this action will permanently delete the project and all its associated threads.
- [ ] [0.7.0] The user can successfully delete a project, and all threads contained within that project are also permanently deleted.
- [ ] [0.7.0] A thread that already belongs to a project cannot be re-added to the same project.
- [ ] [0.7.0] Favorited threads retain their "favorite" status even after being added to a project
## E. Assistants
- [ ] There is always at least one default Assistant which is Jan
- [ ] The default Jan assistant has `stream = True` by default
@ -238,6 +264,7 @@ Ensure that the following section information show up for hardware
In `Settings -> General`:
- [ ] Change the location of the `App Data` to some other path that is not the default path
- [ ] [0.7.0] Users cannot set the data location to root directories (e.g., C:\, D:\ on Windows), but can select subfolders within those drives (e.g., C:\data, D:\data)
- [ ] Click on `Reset` button in `Other` to factory reset the app:
- [ ] All threads deleted
- [ ] All Assistant deleted except for default Jan Assistant

View File

@ -17,7 +17,7 @@
<link rel="apple-touch-icon" href="/images/jan-logo.png" />
<meta
name="viewport"
content="width=device-width, initial-scale=1.0, user-scalable=no, maximum-scale=1.0, minimum-scale=1.0, viewport-fit=cover, interactive-widget=resizes-visual"
content="width=device-width, initial-scale=1.0, user-scalable=no, maximum-scale=1.0, minimum-scale=1.0, viewport-fit=cover"
/>
<title>Jan</title>
<!-- INJECT_GOOGLE_ANALYTICS -->

View File

@ -82,7 +82,7 @@
"remark-math": "6.0.0",
"sonner": "2.0.5",
"tailwindcss": "4.1.4",
"token.js": "npm:token.js-fork@0.7.27",
"token.js": "npm:token.js-fork@0.7.29",
"tw-animate-css": "1.2.8",
"ulidx": "2.4.1",
"unified": "11.0.5",

View File

@ -41,7 +41,7 @@ function DropdownMenuContent({
data-slot="dropdown-menu-content"
sideOffset={sideOffset}
className={cn(
'bg-main-view select-none text-main-view-fg border-main-view-fg/5 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-[51] max-h-(--radix-dropdown-menu-content-available-height) min-w-[8rem] origin-(--radix-dropdown-menu-content-transform-origin) overflow-x-hidden overflow-y-auto rounded-md border p-1 shadow-md',
'bg-main-view select-none text-main-view-fg border-main-view-fg/5 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 max-h-(--radix-dropdown-menu-content-available-height) min-w-[8rem] origin-(--radix-dropdown-menu-content-transform-origin) overflow-x-hidden overflow-y-auto rounded-md border p-1 shadow-md z-[90]',
className
)}
{...props}
@ -229,7 +229,7 @@ function DropdownMenuSubContent({
<DropdownMenuPrimitive.SubContent
data-slot="dropdown-menu-sub-content"
className={cn(
'bg-main-view text-main-view-fg border-main-view-fg/5 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-[51] min-w-[8rem] origin-(--radix-dropdown-menu-content-transform-origin) overflow-hidden rounded-md border p-1 shadow-lg',
'bg-main-view text-main-view-fg border-main-view-fg/5 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2 z-[51] min-w-[8rem] origin-(--radix-dropdown-menu-content-transform-origin) overflow-y-auto max-h-[var(--radix-dropdown-menu-content-available-height)] rounded-md border p-1 shadow-lg',
className
)}
{...props}

View File

@ -96,7 +96,7 @@ export const predefinedProviders = [
{
active: true,
api_key: '',
base_url: 'https://api.anthropic.com',
base_url: 'https://api.anthropic.com/v1',
provider: 'anthropic',
explore_models_url:
'https://docs.anthropic.com/en/docs/about-claude/models',
@ -127,11 +127,21 @@ export const predefinedProviders = [
},
],
models: [],
custom_header: [
{
header: 'anthropic-version',
value: '2023-06-01'
},
{
header: 'anthropic-dangerous-direct-browser-access',
value: 'true'
}
]
},
{
active: true,
api_key: '',
base_url: 'https://api.cohere.ai/compatibility/v1',
base_url: 'https://api.cohere.ai/v1',
explore_models_url: 'https://docs.cohere.com/v2/docs/models',
provider: 'cohere',
settings: [

View File

@ -47,6 +47,8 @@ import { open } from '@tauri-apps/plugin-dialog'
import { toast } from 'sonner'
import { PlatformFeatures } from '@/lib/platform/const'
import { PlatformFeature } from '@/lib/platform/types'
import { useAnalytic } from '@/hooks/useAnalytic'
import posthog from 'posthog-js'
type ChatInputProps = {
className?: string
@ -95,6 +97,7 @@ const ChatInput = ({
const selectedModel = useModelProvider((state) => state.selectedModel)
const selectedProvider = useModelProvider((state) => state.selectedProvider)
const sendMessage = useChat()
const { productAnalytic } = useAnalytic()
const [message, setMessage] = useState('')
const [dropdownToolsAvailable, setDropdownToolsAvailable] = useState(false)
const [tooltipToolsAvailable, setTooltipToolsAvailable] = useState(false)
@ -153,7 +156,10 @@ const ChatInput = ({
const activeModels = await serviceHub
.models()
.getActiveModels('llamacpp')
setHasActiveModels(activeModels.length > 0)
const hasMatchingActiveModel = activeModels.some(
(model) => String(model) === selectedModel?.id
)
setHasActiveModels(activeModels.length > 0 && hasMatchingActiveModel)
} catch (error) {
console.error('Failed to get active models:', error)
setHasActiveModels(false)
@ -166,7 +172,7 @@ const ChatInput = ({
const intervalId = setInterval(checkActiveModels, 3000)
return () => clearInterval(intervalId)
}, [serviceHub])
}, [serviceHub, selectedModel?.id])
// Check for mmproj existence or vision capability when model changes
useEffect(() => {
@ -197,8 +203,7 @@ const ChatInput = ({
const mcpExtension = extensionManager.get<MCPExtension>(ExtensionTypeEnum.MCP)
const MCPToolComponent = mcpExtension?.getToolComponent?.()
const handleSendMesage = async (prompt: string) => {
const handleSendMessage = async (prompt: string) => {
if (!selectedModel) {
setMessage('Please select a model to start chatting.')
return
@ -234,6 +239,19 @@ const ChatInput = ({
setIngestingDocs(false)
}
setMessage('')
// Track message send event with PostHog (only if product analytics is enabled)
if (productAnalytic && selectedModel && selectedProvider) {
try {
posthog.capture('message_sent', {
model_provider: selectedProvider,
model_id: selectedModel.id,
})
} catch (error) {
console.debug('Failed to track message send event:', error)
}
}
sendMessage(
prompt,
true,
@ -720,7 +738,7 @@ const ChatInput = ({
) {
e.preventDefault()
// Submit the message when Enter is pressed without Shift
handleSendMesage(prompt)
handleSendMessage(prompt)
// When Shift+Enter is pressed, a new line is added (default behavior)
}
}}
@ -989,7 +1007,7 @@ const ChatInput = ({
ingestingDocs
}
data-test-id="send-message-button"
onClick={() => handleSendMesage(prompt)}
onClick={() => handleSendMessage(prompt)}
>
{streamingContent || ingestingDocs ? (
<span className="animate-spin h-4 w-4 border-2 border-current border-t-transparent rounded-full" />

View File

@ -1,4 +1,4 @@
import { useAppearance, isDefaultColor } from '@/hooks/useAppearance'
import { useAppearance, useBlurSupport } from '@/hooks/useAppearance'
import { cn } from '@/lib/utils'
import { RgbaColor, RgbaColorPicker } from 'react-colorful'
import { IconColorPicker } from '@tabler/icons-react'
@ -14,6 +14,12 @@ export function ColorPickerAppBgColor() {
const { appBgColor, setAppBgColor } = useAppearance()
const { isDark } = useTheme()
const { t } = useTranslation()
const showAlphaSlider = useBlurSupport()
// Helper to get alpha value based on blur support
const getAlpha = (defaultAlpha: number) => {
return showAlphaSlider ? defaultAlpha : 1
}
const predefineAppBgColor: RgbaColor[] = [
isDark
@ -21,41 +27,50 @@ export function ColorPickerAppBgColor() {
r: 25,
g: 25,
b: 25,
a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.4,
a: getAlpha(0.4),
}
: {
r: 255,
g: 255,
b: 255,
a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.4,
a: getAlpha(0.4),
},
{
r: 70,
g: 79,
b: 229,
a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.5,
a: getAlpha(0.5),
},
{
r: 238,
g: 130,
b: 238,
a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.5,
a: getAlpha(0.5),
},
{
r: 255,
g: 99,
b: 71,
a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.5,
a: getAlpha(0.5),
},
{
r: 255,
g: 165,
b: 0,
a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.5,
a: getAlpha(0.5),
},
]
// Check if a color is the default color (considering both dark and light themes)
const isColorDefault = (color: RgbaColor): boolean => {
const isDarkDefault = color.r === 25 && color.g === 25 && color.b === 25
const isLightDefault = color.r === 255 && color.g === 255 && color.b === 255
// Accept both 0.4 and 1 as valid default alpha values (handles blur detection timing)
const hasDefaultAlpha = Math.abs(color.a - 0.4) < 0.01 || Math.abs(color.a - 1) < 0.01
return (isDarkDefault || isLightDefault) && hasDefaultAlpha
}
return (
<div className="flex items-center gap-1.5">
{predefineAppBgColor.map((item, i) => {
@ -63,13 +78,13 @@ export function ColorPickerAppBgColor() {
(item.r === appBgColor.r &&
item.g === appBgColor.g &&
item.b === appBgColor.b &&
item.a === appBgColor.a) ||
(isDefaultColor(appBgColor) && isDefaultColor(item))
Math.abs(item.a - appBgColor.a) < 0.01) ||
(isColorDefault(appBgColor) && isColorDefault(item))
return (
<div
key={i}
className={cn(
'size-4 rounded-full border border-main-view-fg/20',
'size-4 rounded-full border border-main-view-fg/20 cursor-pointer',
isSelected && 'ring-2 ring-accent border-none'
)}
onClick={() => {

View File

@ -33,7 +33,7 @@ const DropdownAssistant = () => {
return (
<>
<DropdownMenu open={dropdownOpen} onOpenChange={setDropdownOpen}>
<div className="flex items-center justify-between gap-2 bg-main-view-fg/5 py-1 hover:bg-main-view-fg/8 px-2 rounded-sm">
<div className="inline-flex items-center justify-between gap-2 bg-main-view-fg/5 py-1 hover:bg-main-view-fg/8 px-2 rounded-sm">
<DropdownMenuTrigger asChild>
<button className="font-medium cursor-pointer flex items-center gap-1.5 relative z-20 max-w-40">
<div className="text-main-view-fg/80 flex items-center gap-1">

View File

@ -24,6 +24,7 @@ import { predefinedProviders } from '@/consts/providers'
import { useServiceHub } from '@/hooks/useServiceHub'
import { PlatformFeatures } from '@/lib/platform/const'
import { PlatformFeature } from '@/lib/platform/types'
import { getLastUsedModel } from '@/utils/getModelToStart'
type DropdownModelProviderProps = {
model?: ThreadModel
@ -39,16 +40,6 @@ interface SearchableModel {
}
// Helper functions for localStorage
const getLastUsedModel = (): { provider: string; model: string } | null => {
try {
const stored = localStorage.getItem(localStorageKey.lastUsedModel)
return stored ? JSON.parse(stored) : null
} catch (error) {
console.debug('Failed to get last used model from localStorage:', error)
return null
}
}
const setLastUsedModel = (provider: string, model: string) => {
try {
localStorage.setItem(
@ -199,8 +190,19 @@ const DropdownModelProvider = ({
return
}
}
// Fallback: auto-select first llamacpp model if available
const llamacppProvider = providers.find(
(p) => p.provider === 'llamacpp' && p.active && p.models.length > 0
)
if (llamacppProvider && llamacppProvider.models.length > 0) {
const firstModel = llamacppProvider.models[0]
selectModelProvider('llamacpp', firstModel.id)
setLastUsedModel('llamacpp', firstModel.id)
} else {
selectModelProvider('', '')
}
}
} else {
// Get current state for web auto-selection check
const currentState = { selectedModel, selectedProvider }
@ -228,12 +230,11 @@ const DropdownModelProvider = ({
selectModelProvider,
updateCurrentThreadModel,
providers,
useLastUsedModel,
checkModelExists,
updateProvider,
getProviderByName,
checkAndUpdateModelVisionCapability,
serviceHub,
// selectedModel and selectedProvider intentionally excluded to prevent race conditions
])
@ -326,7 +327,8 @@ const DropdownModelProvider = ({
// Create Fzf instance for fuzzy search
const fzfInstance = useMemo(() => {
return new Fzf(searchableItems, {
selector: (item) => `${getModelDisplayName(item.model)} ${item.model.id}`.toLowerCase(),
selector: (item) =>
`${getModelDisplayName(item.model)} ${item.model.id}`.toLowerCase(),
})
}, [searchableItems])
@ -404,12 +406,10 @@ const DropdownModelProvider = ({
})
// Store the selected model as last used
if (useLastUsedModel) {
setLastUsedModel(
searchableModel.provider.provider,
searchableModel.model.id
)
}
// Check mmproj existence for llamacpp models (async, don't block UI)
if (searchableModel.provider.provider === 'llamacpp') {
@ -443,7 +443,6 @@ const DropdownModelProvider = ({
[
selectModelProvider,
updateCurrentThreadModel,
useLastUsedModel,
updateProvider,
getProviderByName,
checkAndUpdateModelVisionCapability,
@ -461,7 +460,7 @@ const DropdownModelProvider = ({
return (
<Popover open={open} onOpenChange={onOpenChange}>
<div className="flex items-center gap-1.5 mr-2">
<div className="bg-main-view-fg/5 hover:bg-main-view-fg/8 px-2 py-1 flex items-center gap-1.5 rounded-sm mr-2">
<PopoverTrigger asChild>
<button
type="button"

View File

@ -56,7 +56,7 @@ const mainMenus = [
title: 'common:projects.title',
icon: IconFolderPlus,
route: route.project,
isEnabled: true,
isEnabled: !(IS_IOS || IS_ANDROID),
},
]
@ -154,7 +154,6 @@ const LeftPanel = () => {
}
}, [setLeftPanel, open])
const currentPath = useRouterState({
select: (state) => state.location.pathname,
})
@ -403,7 +402,7 @@ const LeftPanel = () => {
})}
</div>
{filteredProjects.length > 0 && (
{filteredProjects.length > 0 && !(IS_IOS || IS_ANDROID) && (
<div className="space-y-1 py-1">
<div className="flex items-center justify-between mb-2">
<span className="block text-xs text-left-panel-fg/50 px-1 font-semibold">
@ -581,6 +580,10 @@ const LeftPanel = () => {
{filteredThreads.length === 0 && searchTerm.length > 0 && (
<div className="px-1 mt-2">
<span className="block text-xs text-left-panel-fg/50 px-1 font-semibold mb-2">
{t('common:recents')}
</span>
<div className="flex items-center gap-1 text-left-panel-fg/80">
<IconSearch size={18} />
<h6 className="font-medium text-base">
@ -640,7 +643,7 @@ const LeftPanel = () => {
data-test-id={`menu-${menu.title}`}
activeOptions={{ exact: true }}
className={cn(
'flex items-center gap-1.5 cursor-pointer hover:bg-left-panel-fg/10 py-1 px-1 rounded',
'flex items-center gap-1.5 cursor-pointer hover:bg-left-panel-fg/10 py-1 my-0.5 px-1 rounded',
isActive && 'bg-left-panel-fg/10'
)}
>

View File

@ -171,7 +171,9 @@ export function ModelSetting({
key === 'ngl' ||
key === 'chat_template' ||
key === 'offload_mmproj' ||
key === 'batch_size'
key === 'batch_size' ||
key === 'cpu_moe' ||
key === 'n_cpu_moe'
)
if (requiresRestart) {
@ -231,7 +233,9 @@ export function ModelSetting({
key === 'ngl' ||
key === 'chat_template' ||
key === 'offload_mmproj' ||
key === 'batch_size'
key === 'batch_size' ||
key === 'cpu_moe' ||
key === 'n_cpu_moe'
) {
// Check if model is running before stopping it
serviceHub
@ -261,7 +265,9 @@ export function ModelSetting({
<SheetContent className="h-[calc(100%-8px)] top-1 right-1 rounded-e-md overflow-y-auto">
<SheetHeader>
<SheetTitle>
{t('common:modelSettings.title', { modelId: getModelDisplayName(model) })}
{t('common:modelSettings.title', {
modelId: getModelDisplayName(model),
})}
</SheetTitle>
<SheetDescription>
{t('common:modelSettings.description')}

View File

@ -205,7 +205,7 @@ const SortableItem = memo(
}}
/>
</DropdownMenuTrigger>
<DropdownMenuContent side="bottom" align="end" className="w-44">
<DropdownMenuContent side="bottom" align="end" className="min-w-44">
{thread.isFavorite ? (
<DropdownMenuItem
onClick={(e) => {
@ -237,13 +237,13 @@ const SortableItem = memo(
<DropdownMenuSub>
<DropdownMenuSubTrigger className="gap-2">
<IconFolder size={16} />
<span>Add to project</span>
<span>{t('common:projects.addToProject')}</span>
</DropdownMenuSubTrigger>
<DropdownMenuSubContent>
<DropdownMenuSubContent className="max-h-60 min-w-44 overflow-y-auto">
{availableProjects.length === 0 ? (
<DropdownMenuItem disabled>
<span className="text-left-panel-fg/50">
No projects available
{t('common:projects.noProjectsAvailable')}
</span>
</DropdownMenuItem>
) : (
@ -262,6 +262,8 @@ const SortableItem = memo(
</DropdownMenuItem>
))
)}
</DropdownMenuSubContent>
</DropdownMenuSub>
{thread.metadata?.project && (
<>
<DropdownMenuSeparator />
@ -286,8 +288,6 @@ const SortableItem = memo(
</DropdownMenuItem>
</>
)}
</DropdownMenuSubContent>
</DropdownMenuSub>
<DropdownMenuSeparator />
<DeleteThreadDialog
thread={thread}
@ -311,7 +311,11 @@ type ThreadListProps = {
currentProjectId?: string
}
function ThreadList({ threads, variant = 'default', currentProjectId }: ThreadListProps) {
function ThreadList({
threads,
variant = 'default',
currentProjectId,
}: ThreadListProps) {
const sortedThreads = useMemo(() => {
return threads.sort((a, b) => {
return (b.updated || 0) - (a.updated || 0)
@ -335,7 +339,12 @@ function ThreadList({ threads, variant = 'default', currentProjectId }: ThreadLi
strategy={verticalListSortingStrategy}
>
{sortedThreads.map((thread, index) => (
<SortableItem key={index} thread={thread} variant={variant} currentProjectId={currentProjectId} />
<SortableItem
key={index}
thread={thread}
variant={variant}
currentProjectId={currentProjectId}
/>
))}
</SortableContext>
</DndContext>

View File

@ -243,11 +243,7 @@ export default function AddEditAssistant({
return (
<Dialog open={open} onOpenChange={onOpenChange}>
<DialogContent
onInteractOutside={(e) => {
e.preventDefault()
}}
>
<DialogContent>
<DialogHeader>
<DialogTitle>
{editingKey

View File

@ -421,13 +421,12 @@ export default function AddEditMCPServer({
}}
onPaste={() => setError(null)}
style={{
fontFamily: 'ui-monospace',
backgroundColor: 'transparent',
wordBreak: 'break-all',
overflowWrap: 'anywhere',
whiteSpace: 'pre-wrap',
}}
className="w-full !text-sm min-h-[300px]"
className="w-full !text-sm min-h-[300px] !font-mono"
/>
</div>
{error && <div className="text-destructive text-sm">{error}</div>}

View File

@ -93,13 +93,12 @@ export default function EditJsonMCPserver({
onChange={(e) => setJsonContent(e.target.value)}
onPaste={handlePaste}
style={{
fontFamily: 'ui-monospace',
backgroundColor: 'transparent',
wordBreak: 'break-all',
overflowWrap: 'anywhere',
whiteSpace: 'pre-wrap',
}}
className="w-full !text-sm overflow-hidden break-all"
className="w-full !text-sm overflow-hidden !break-all !font-mono"
/>
</div>
{error && <div className="text-destructive text-sm">{error}</div>}

View File

@ -31,6 +31,8 @@ vi.mock('zustand/middleware', () => ({
// Mock global constants
Object.defineProperty(global, 'IS_WINDOWS', { value: false, writable: true })
Object.defineProperty(global, 'IS_LINUX', { value: false, writable: true })
Object.defineProperty(global, 'IS_MACOS', { value: false, writable: true })
Object.defineProperty(global, 'IS_TAURI', { value: false, writable: true })
Object.defineProperty(global, 'IS_WEB_APP', { value: false, writable: true })
describe('useAppearance', () => {
@ -217,7 +219,8 @@ describe('useAppearance', () => {
result.current.setAppBgColor(testColor)
})
expect(result.current.appBgColor).toEqual(testColor)
// In web environment (IS_TAURI=false), alpha is forced to 1
expect(result.current.appBgColor).toEqual({ ...testColor, a: 1 })
})
it('should handle transparent colors', () => {
@ -230,6 +233,36 @@ describe('useAppearance', () => {
expect(result.current.appAccentBgColor).toEqual(transparentColor)
})
it('should preserve alpha when blur is supported (macOS)', () => {
// Mock macOS environment
Object.defineProperty(global, 'IS_MACOS', { value: true, writable: true })
Object.defineProperty(global, 'IS_TAURI', { value: true, writable: true })
Object.defineProperty(global, 'IS_WINDOWS', { value: false, writable: true })
Object.defineProperty(global, 'IS_LINUX', { value: false, writable: true })
const setPropertySpy = vi.fn()
Object.defineProperty(document.documentElement, 'style', {
value: {
setProperty: setPropertySpy,
},
writable: true,
})
const { result } = renderHook(() => useAppearance())
const testColor = { r: 128, g: 64, b: 192, a: 0.5 }
act(() => {
result.current.setAppBgColor(testColor)
})
// On macOS with Tauri, alpha should be preserved
expect(result.current.appBgColor).toEqual(testColor)
// Reset for other tests
Object.defineProperty(global, 'IS_MACOS', { value: false, writable: true })
Object.defineProperty(global, 'IS_TAURI', { value: false, writable: true })
})
})
describe('Edge cases', () => {

View File

@ -4,6 +4,9 @@ import { localStorageKey } from '@/constants/localStorage'
import { RgbaColor } from 'react-colorful'
import { rgb, oklch, formatCss } from 'culori'
import { useTheme } from './useTheme'
import { useEffect, useState } from 'react'
import { getServiceHub } from '@/hooks/useServiceHub'
import { supportsBlurEffects } from '@/utils/blurSupport'
export type FontSize = '14px' | '15px' | '16px' | '18px'
export type ChatWidth = 'full' | 'compact'
@ -41,19 +44,37 @@ export const fontSizeOptions = [
{ label: 'Extra Large', value: '18px' as FontSize },
]
// Helper to determine if blur effects are supported
// This will be dynamically checked on Windows and Linux
let blurEffectsSupported = true
if ((IS_WINDOWS || IS_LINUX) && IS_TAURI) {
// Default to false for Windows/Linux, will be checked async
blurEffectsSupported = false
}
// Helper to get the appropriate alpha value
const getAlphaValue = () => {
// Web always uses alpha = 1
if (!IS_TAURI) return 1
// Windows/Linux use 1 if blur not supported, 0.4 if supported
if ((IS_WINDOWS || IS_LINUX) && !blurEffectsSupported) return 1
// macOS and Windows/Linux with blur support use 0.4
return 0.4
}
// Default appearance settings
const defaultFontSize: FontSize = '15px'
const defaultAppBgColor: RgbaColor = {
r: 25,
g: 25,
b: 25,
a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.4,
a: getAlphaValue(),
}
const defaultLightAppBgColor: RgbaColor = {
r: 255,
g: 255,
b: 255,
a: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : 0.4,
a: getAlphaValue(),
}
const defaultAppMainViewBgColor: RgbaColor = { r: 25, g: 25, b: 25, a: 1 }
const defaultLightAppMainViewBgColor: RgbaColor = {
@ -89,10 +110,15 @@ const isColorEqual = (color1: RgbaColor, color2: RgbaColor): boolean => {
// Helper function to check if color is default (not customized)
export const isDefaultColor = (color: RgbaColor): boolean => {
return (
isColorEqual(color, defaultAppBgColor) ||
isColorEqual(color, defaultLightAppBgColor)
)
// Check if RGB matches default (ignore alpha since it changes based on blur support)
const isDarkDefault = color.r === 25 && color.g === 25 && color.b === 25
const isLightDefault = color.r === 255 && color.g === 255 && color.b === 255
// Consider it default if RGB matches and alpha is either 0.4 or 1 (common values)
const hasDefaultAlpha =
Math.abs(color.a - 0.4) < 0.01 || Math.abs(color.a - 1) < 0.01
return (isDarkDefault || isLightDefault) && hasDefaultAlpha
}
export const isDefaultColorMainView = (color: RgbaColor): boolean => {
@ -128,6 +154,59 @@ export const getDefaultTextColor = (isDark: boolean): string => {
return isDark ? defaultDarkLeftPanelTextColor : defaultLightLeftPanelTextColor
}
// Hook to check if alpha slider should be shown
export const useBlurSupport = () => {
const [supportsBlur, setSupportsBlur] = useState(
IS_MACOS && IS_TAURI // Default to true only for macOS
)
useEffect(() => {
const checkBlurSupport = async () => {
if ((IS_WINDOWS || IS_LINUX) && IS_TAURI) {
try {
// Get hardware info to check OS version
const hardwareInfo = await getServiceHub()
.hardware()
.getHardwareInfo()
const supported = supportsBlurEffects(hardwareInfo)
blurEffectsSupported = supported
setSupportsBlur(supported)
const platform = IS_WINDOWS ? 'Windows' : 'Linux'
if (supported) {
console.log(
`${platform} blur effects: SUPPORTED - Alpha slider will be shown`
)
} else {
console.log(
`${platform} blur effects: NOT SUPPORTED - Alpha slider will be hidden, alpha set to 1`
)
}
} catch (error) {
console.error(
`❌ Failed to check ${IS_WINDOWS ? 'Windows' : 'Linux'} blur support:`,
error
)
setSupportsBlur(false)
}
} else if (IS_MACOS && IS_TAURI) {
console.log(
'🍎 macOS platform: Blur effects supported, alpha slider shown'
)
} else if (!IS_TAURI) {
console.log('🌐 Web platform: Alpha slider hidden, alpha set to 1')
}
}
checkBlurSupport()
}, [])
// Return true if alpha slider should be shown
// Show on macOS (always), and conditionally on Windows/Linux based on detection
return IS_TAURI && (IS_MACOS || supportsBlur)
}
export const useAppearance = create<AppearanceState>()(
persist(
(set) => {
@ -154,8 +233,11 @@ export const useAppearance = create<AppearanceState>()(
defaultFontSize
)
// Reset app background color
const defaultBg = isDark ? defaultAppBgColor : defaultLightAppBgColor
// Reset app background color with correct alpha based on blur support
const currentAlpha = blurEffectsSupported && IS_TAURI ? 0.4 : 1
const defaultBg = isDark
? { r: 25, g: 25, b: 25, a: currentAlpha }
: { r: 255, g: 255, b: 255, a: currentAlpha }
const culoriRgbBg = rgb({
mode: 'rgb',
r: defaultBg.r / 255,
@ -295,6 +377,11 @@ export const useAppearance = create<AppearanceState>()(
finalColor = isDark ? defaultAppBgColor : defaultLightAppBgColor
}
// Force alpha to 1 if blur effects are not supported
if (!blurEffectsSupported && (IS_WINDOWS || IS_LINUX || !IS_TAURI)) {
finalColor = { ...finalColor, a: 1 }
}
// Convert RGBA to a format culori can work with
const culoriRgb = rgb({
mode: 'rgb',
@ -565,11 +652,9 @@ export const useAppearance = create<AppearanceState>()(
// Get the current theme state
const { isDark } = useTheme.getState()
// If stored color is default, use theme-appropriate default
let finalColor = state.appBgColor
if (isDefaultColor(state.appBgColor)) {
finalColor = isDark ? defaultAppBgColor : defaultLightAppBgColor
}
// Just use the stored color as-is during rehydration
// The AppearanceProvider will handle alpha normalization after blur detection
const finalColor = state.appBgColor
let finalColorMainView = state.appMainViewBgColor
if (isDefaultColorMainView(state.appMainViewBgColor)) {

View File

@ -149,7 +149,7 @@ export const useChat = () => {
})
}
return currentThread
}, [createThread, retrieveThread, router, setMessages])
}, [createThread, retrieveThread, router, setMessages, serviceHub])
const restartModel = useCallback(
async (provider: ProviderObject, modelId: string) => {
@ -639,6 +639,7 @@ export const useChat = () => {
toggleOnContextShifting,
setModelLoadError,
serviceHub,
setTokenSpeed,
]
)

View File

@ -320,9 +320,112 @@ export const useModelProvider = create<ModelProviderState>()(
})
}
if (version <= 3 && state?.providers) {
state.providers.forEach((provider) => {
// Migrate Anthropic provider base URL and add custom headers
if (provider.provider === 'anthropic') {
if (provider.base_url === 'https://api.anthropic.com') {
provider.base_url = 'https://api.anthropic.com/v1'
}
// Update base-url in settings
if (provider.settings) {
const baseUrlSetting = provider.settings.find(
(s) => s.key === 'base-url'
)
if (
baseUrlSetting?.controller_props?.value ===
'https://api.anthropic.com'
) {
baseUrlSetting.controller_props.value =
'https://api.anthropic.com/v1'
}
if (
baseUrlSetting?.controller_props?.placeholder ===
'https://api.anthropic.com'
) {
baseUrlSetting.controller_props.placeholder =
'https://api.anthropic.com/v1'
}
}
if (!provider.custom_header) {
provider.custom_header = [
{
header: 'anthropic-version',
value: '2023-06-01',
},
{
header: 'anthropic-dangerous-direct-browser-access',
value: 'true',
},
]
}
}
if (provider.provider === 'cohere') {
if (
provider.base_url === 'https://api.cohere.ai/compatibility/v1'
) {
provider.base_url = 'https://api.cohere.ai/v1'
}
// Update base-url in settings
if (provider.settings) {
const baseUrlSetting = provider.settings.find(
(s) => s.key === 'base-url'
)
if (
baseUrlSetting?.controller_props?.value ===
'https://api.cohere.ai/compatibility/v1'
) {
baseUrlSetting.controller_props.value =
'https://api.cohere.ai/v1'
}
if (
baseUrlSetting?.controller_props?.placeholder ===
'https://api.cohere.ai/compatibility/v1'
) {
baseUrlSetting.controller_props.placeholder =
'https://api.cohere.ai/v1'
}
}
}
})
}
if (version <= 4 && state?.providers) {
state.providers.forEach((provider) => {
// Migrate model settings
if (provider.models && provider.provider === 'llamacpp') {
provider.models.forEach((model) => {
if (!model.settings) model.settings = {}
if (!model.settings.cpu_moe) {
model.settings.cpu_moe = {
...modelSettings.cpu_moe,
controller_props: {
...modelSettings.cpu_moe.controller_props,
},
}
}
if (!model.settings.n_cpu_moe) {
model.settings.n_cpu_moe = {
...modelSettings.n_cpu_moe,
controller_props: {
...modelSettings.n_cpu_moe.controller_props,
},
}
}
})
}
})
}
return state
},
version: 3,
version: 5,
}
)
)

View File

@ -32,7 +32,9 @@ export const useTheme = create<ThemeState>()(
await getServiceHub().theme().setTheme(null)
set(() => ({ activeTheme, isDark: isDarkMode }))
} else {
await getServiceHub().theme().setTheme(activeTheme as ThemeMode)
await getServiceHub()
.theme()
.setTheme(activeTheme as ThemeMode)
set(() => ({ activeTheme, isDark: activeTheme === 'dark' }))
}
},

View File

@ -3,6 +3,7 @@ import { ThreadMessage, ContentType } from '@janhq/core'
import { useServiceHub } from './useServiceHub'
import { useModelProvider } from './useModelProvider'
import { usePrompt } from './usePrompt'
import { removeReasoningContent } from '@/utils/reasoning'
export interface TokenCountData {
tokenCount: number
@ -69,7 +70,19 @@ export const useTokensCount = (
} as ThreadMessage)
}
}
return result
return result.map((e) => ({
...e,
content: e.content.map((c) => ({
...c,
text:
c.type === 'text'
? {
value: removeReasoningContent(c.text?.value ?? '.'),
annotations: [],
}
: c.text,
})),
}))
}, [messages, prompt, uploadedFiles])
// Debounced calculation that includes current prompt

View File

@ -56,7 +56,6 @@
@layer base {
body {
@apply overflow-hidden;
background-color: white;
min-height: 100vh;
min-height: -webkit-fill-available;
padding-top: env(safe-area-inset-top, 0px);

View File

@ -2,6 +2,7 @@
import { ChatCompletionMessageParam } from 'token.js'
import { ChatCompletionMessageToolCall } from 'openai/resources'
import { ThreadMessage } from '@janhq/core'
import { removeReasoningContent } from '@/utils/reasoning'
/**
* @fileoverview Helper functions for creating chat completion request.
@ -24,7 +25,7 @@ export class CompletionMessagesBuilder {
if (msg.role === 'assistant') {
return {
role: msg.role,
content: this.normalizeContent(
content: removeReasoningContent(
msg.content[0]?.text?.value || '.'
),
} as ChatCompletionMessageParam
@ -135,7 +136,7 @@ export class CompletionMessagesBuilder {
) {
this.messages.push({
role: 'assistant',
content: this.normalizeContent(content),
content: removeReasoningContent(content),
refusal: refusal,
tool_calls: calls,
})
@ -202,30 +203,4 @@ export class CompletionMessagesBuilder {
return result
}
/**
* Normalize the content of a message by removing reasoning content.
* This is useful to ensure that reasoning content does not get sent to the model.
* @param content
* @returns
*/
private normalizeContent = (content: string): string => {
// Reasoning content should not be sent to the model
if (content.includes('<think>')) {
const match = content.match(/<think>([\s\S]*?)<\/think>/)
if (match?.index !== undefined) {
const splitIndex = match.index + match[0].length
content = content.slice(splitIndex).trim()
}
}
if (content.includes('<|channel|>analysis<|message|>')) {
const match = content.match(
/<\|channel\|>analysis<\|message\|>([\s\S]*?)<\|start\|>assistant<\|channel\|>final<\|message\|>/
)
if (match?.index !== undefined) {
const splitIndex = match.index + match[0].length
content = content.slice(splitIndex).trim()
}
}
return content
}
}

View File

@ -57,7 +57,7 @@ export const PlatformFeatures: Record<PlatformFeature, boolean> = {
// Extensions settings page - disabled for web
[PlatformFeature.EXTENSIONS_SETTINGS]:
isPlatformTauri() && !isPlatformIOS() && !isPlatformAndroid(),
isPlatformTauri(),
// Assistant functionality - disabled for web
[PlatformFeature.ASSISTANTS]: isPlatformTauri(),
@ -75,8 +75,8 @@ export const PlatformFeatures: Record<PlatformFeature, boolean> = {
// Shortcut
[PlatformFeature.SHORTCUT]: !isPlatformIOS() && !isPlatformAndroid(),
// First message persisted thread - enabled for web only
[PlatformFeature.FIRST_MESSAGE_PERSISTED_THREAD]: !isPlatformTauri(),
// First message persisted thread - enabled for web and mobile platforms
[PlatformFeature.FIRST_MESSAGE_PERSISTED_THREAD]: !isPlatformTauri() || isPlatformIOS() || isPlatformAndroid(),
// Temporary chat mode - enabled for web only
[PlatformFeature.TEMPORARY_CHAT]: !isPlatformTauri(),

View File

@ -133,6 +133,28 @@ export const modelSettings = {
textAlign: 'right',
},
},
cpu_moe: {
key: 'cpu_moe',
title: 'Keep all Experts in CPU',
description:
'Keep all Mixture of Experts (MoE) weights in the CPU (if GPU is used).',
controller_type: 'checkbox',
controller_props: {
value: false,
},
},
n_cpu_moe: {
key: 'n_cpu_moe',
title: 'Number of MoE weights in the CPU',
description:
'Keep the Mixture of Experts (MoE) weights of the first N layers in the CPU (if GPU is used)',
controller_type: 'input',
controller_props: {
value: '',
placeholder: '24',
type: 'number',
},
},
override_tensor_buffer_t: {
key: 'override_tensor_buffer_t',
title: 'Override Tensor Buffer Type',

View File

@ -8,7 +8,6 @@ export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs))
}
export function basenameNoExt(filePath: string): string {
const base = path.basename(filePath);
const VALID_EXTENSIONS = [".tar.gz", ".zip"];
@ -24,7 +23,6 @@ export function basenameNoExt(filePath: string): string {
return base.slice(0, -path.extname(base).length);
}
/**
* Get the display name for a model, falling back to the model ID if no display name is set
*/

View File

@ -272,9 +272,12 @@
"thread": "Thread",
"threads": "Threads",
"updated": "Aktualisiert:",
"collapseThreads": "Threads einklappen",
"expandThreads": "Threads ausklappen",
"update": "Aktualisieren"
"collapseProject": "Projekt einklappen",
"expandProject": "Projekt ausklappen",
"update": "Aktualisieren",
"searchProjects": "Projekte durchsuchen...",
"noProjectsFound": "Keine Projekte gefunden",
"tryDifferentSearch": "Versuchen Sie einen anderen Suchbegriff"
},
"toast": {
"allThreadsUnfavorited": {
@ -356,80 +359,6 @@
"downloadAndVerificationComplete": {
"title": "Download abgeschlossen",
"description": "Modell \"{{item}}\" erfolgreich heruntergeladen und verifiziert"
},
"projectCreated": {
"title": "Projekt erstellt",
"description": "Projekt \"{{projectName}}\" erfolgreich erstellt"
},
"projectRenamed": {
"title": "Projekt umbenannt",
"description": "Projekt von \"{{oldName}}\" zu \"{{newName}}\" umbenannt"
},
"projectDeleted": {
"title": "Projekt gelöscht",
"description": "Projekt \"{{projectName}}\" erfolgreich gelöscht"
},
"projectAlreadyExists": {
"title": "Projekt existiert bereits",
"description": "Projekt \"{{projectName}}\" existiert bereits"
},
"projectDeleteFailed": {
"title": "Löschen fehlgeschlagen",
"description": "Projekt konnte nicht gelöscht werden. Bitte versuchen Sie es erneut."
},
"threadAssignedToProject": {
"title": "Thread zugewiesen",
"description": "Thread erfolgreich zu \"{{projectName}}\" hinzugefügt"
},
"threadRemovedFromProject": {
"title": "Thread entfernt",
"description": "Thread erfolgreich von \"{{projectName}}\" entfernt"
}
},
"projects": {
"title": "Projekte",
"addProject": "Projekt hinzufügen",
"addToProject": "Zu Projekt hinzufügen",
"removeFromProject": "Von Projekt entfernen",
"createNewProject": "Neues Projekt erstellen",
"editProject": "Projekt bearbeiten",
"deleteProject": "Projekt löschen",
"projectName": "Projektname",
"enterProjectName": "Projektname eingeben...",
"noProjectsAvailable": "Keine Projekte verfügbar",
"noProjectsYet": "Noch keine Projekte",
"noProjectsYetDesc": "Starten Sie ein neues Projekt, indem Sie auf die Schaltfläche Projekt hinzufügen klicken.",
"projectNotFound": "Projekt nicht gefunden",
"projectNotFoundDesc": "Das gesuchte Projekt existiert nicht oder wurde gelöscht.",
"deleteProjectDialog": {
"title": "Projekt löschen",
"description": "Sind Sie sicher, dass Sie dieses Projekt löschen möchten? Diese Aktion kann nicht rückgängig gemacht werden.",
"deleteButton": "Löschen",
"successWithName": "Projekt \"{{projectName}}\" erfolgreich gelöscht",
"successWithoutName": "Projekt erfolgreich gelöscht",
"error": "Projekt konnte nicht gelöscht werden. Bitte versuchen Sie es erneut.",
"ariaLabel": "{{projectName}} löschen"
},
"addProjectDialog": {
"createTitle": "Neues Projekt erstellen",
"editTitle": "Projekt bearbeiten",
"nameLabel": "Projektname",
"namePlaceholder": "Projektname eingeben...",
"createButton": "Erstellen",
"updateButton": "Aktualisieren",
"alreadyExists": "Projekt \"{{projectName}}\" existiert bereits",
"createSuccess": "Projekt \"{{projectName}}\" erfolgreich erstellt",
"renameSuccess": "Projekt von \"{{oldName}}\" zu \"{{newName}}\" umbenannt"
},
"noConversationsIn": "Keine Gespräche in {{projectName}}",
"startNewConversation": "Starten Sie ein neues Gespräch mit {{projectName}} unten",
"conversationsIn": "Gespräche in {{projectName}}",
"conversationsDescription": "Klicken Sie auf ein Gespräch, um weiterzuchatten, oder starten Sie unten ein neues.",
"thread": "Thread",
"threads": "Threads",
"updated": "Aktualisiert:",
"collapseThreads": "Threads einklappen",
"expandThreads": "Threads ausklappen",
"update": "Aktualisieren"
}
}
}

View File

@ -169,6 +169,12 @@
"serverLogs": "Server Logs",
"serverLogsDesc": "Zeige detaillierte Logs des lokalen API-Servers an.",
"openLogs": "Logs öffnen",
"swaggerDocs": "API-Dokumentation",
"swaggerDocsDesc": "Zeige interaktive API-Dokumentation (Swagger UI) an.",
"openDocs": "Dokumentation öffnen",
"startupConfiguration": "Startkonfiguration",
"runOnStartup": "Auto start",
"runOnStartupDesc": "Starte den lokalen API-Server automatisch beim Anwendungsstart. Verwendet das zuletzt verwendete Modell oder wählt das erste verfügbare Modell, falls nicht verfügbar.",
"serverConfiguration": "Server Konfiguration",
"serverHost": "Server Host",
"serverHostDesc": "Netzwerkadresse für den Server.",

View File

@ -283,9 +283,12 @@
"thread": "thread",
"threads": "threads",
"updated": "Updated:",
"collapseThreads": "Collapse threads",
"expandThreads": "Expand threads",
"update": "Update"
"collapseProject": "Collapse project",
"expandProject": "Expand project",
"update": "Update",
"searchProjects": "Search projects...",
"noProjectsFound": "No projects found",
"tryDifferentSearch": "Try a different search term"
},
"toast": {
"allThreadsUnfavorited": {

View File

@ -169,9 +169,12 @@
"serverLogs": "Server Logs",
"serverLogsDesc": "View detailed logs of the local API server.",
"openLogs": "Open Logs",
"swaggerDocs": "API Documentation",
"swaggerDocsDesc": "View interactive API documentation (Swagger UI).",
"openDocs": "Open Docs",
"startupConfiguration": "Startup Configuration",
"runOnStartup": "Enable by default on startup",
"runOnStartupDesc": "Automatically start the Local API Server when the application launches.",
"runOnStartup": "Auto start",
"runOnStartupDesc": "Automatically start the Local API Server when the application launches. Uses last used model, or picks the first available model if unavailable.",
"serverConfiguration": "Server Configuration",
"serverHost": "Server Host",
"serverHostDesc": "Network address for the server.",

View File

@ -354,8 +354,11 @@
"thread": "utas",
"threads": "utas",
"updated": "Diperbarui:",
"collapseThreads": "Tutup utas",
"expandThreads": "Buka utas",
"update": "Perbarui"
"collapseProject": "Tutup proyek",
"expandProject": "Buka proyek",
"update": "Perbarui",
"searchProjects": "Cari proyek...",
"noProjectsFound": "Tidak ada proyek ditemukan",
"tryDifferentSearch": "Coba kata kunci pencarian lain"
}
}

View File

@ -167,6 +167,12 @@
"serverLogs": "Log Server",
"serverLogsDesc": "Lihat log terperinci dari server API lokal.",
"openLogs": "Buka Log",
"swaggerDocs": "Dokumentasi API",
"swaggerDocsDesc": "Lihat dokumentasi API interaktif (Swagger UI).",
"openDocs": "Buka Dokumentasi",
"startupConfiguration": "Konfigurasi Startup",
"runOnStartup": "Auto start",
"runOnStartupDesc": "Mulai Server API Lokal secara otomatis saat aplikasi diluncurkan. Menggunakan model terakhir yang digunakan, atau memilih model pertama yang tersedia jika tidak tersedia.",
"serverConfiguration": "Konfigurasi Server",
"serverHost": "Host Server",
"serverHostDesc": "Alamat jaringan untuk server.",

View File

@ -272,9 +272,12 @@
"thread": "wątek",
"threads": "wątki",
"updated": "Zaktualizowano:",
"collapseThreads": "Zwiń wątki",
"expandThreads": "Rozwiń wątki",
"update": "Aktualizuj"
"collapseProject": "Zwiń projekt",
"expandProject": "Rozwiń projekt",
"update": "Aktualizuj",
"searchProjects": "Szukaj projektów...",
"noProjectsFound": "Nie znaleziono projektów",
"tryDifferentSearch": "Spróbuj innego wyszukiwania"
},
"toast": {
"allThreadsUnfavorited": {

View File

@ -167,9 +167,12 @@
"serverLogs": "Dzienniki Serwera",
"serverLogsDesc": "Wyświetl szczegółowe dzienniki lokalnego serwera API.",
"openLogs": "Otwórz Dzienniki",
"swaggerDocs": "Dokumentacja API",
"swaggerDocsDesc": "Wyświetl interaktywną dokumentację API (Swagger UI).",
"openDocs": "Otwórz Dokumentację",
"startupConfiguration": "Konfiguracja Startowa",
"runOnStartup": "Domyślnie włączaj przy starcie",
"runOnStartupDesc": "Automatycznie uruchamiaj lokalny serwer API podczas uruchamiania aplikacji.",
"runOnStartup": "Auto start",
"runOnStartupDesc": "Automatycznie uruchamiaj lokalny serwer API podczas uruchamiania aplikacji. Używa ostatnio używanego modelu lub wybiera pierwszy dostępny model, jeśli nie jest dostępny.",
"serverConfiguration": "Konfiguracja Serwera",
"serverHost": "Host",
"serverHostDesc": "Adres sieciowy serwera.",

View File

@ -199,6 +199,35 @@
"title": "Cài đặt mô hình - {{modelId}}",
"description": "Định cấu hình cài đặt mô hình để tối ưu hóa hiệu suất và hành vi."
},
"projects": {
"title": "Dự án",
"addProject": "Thêm dự án",
"editProject": "Chỉnh sửa dự án",
"deleteProject": "Xóa dự án",
"projectName": "Tên dự án",
"enterProjectName": "Nhập tên dự án",
"noProjectsYet": "Chưa có dự án nào",
"noProjectsYetDesc": "Tạo dự án đầu tiên của bạn để tổ chức các cuộc trò chuyện.",
"projectNotFound": "Không tìm thấy dự án",
"projectNotFoundDesc": "Dự án mà bạn đang tìm kiếm không tồn tại.",
"deleteProjectConfirm": "Bạn có chắc chắn muốn xóa dự án này không? Hành động này không thể hoàn tác.",
"addToProject": "Thêm vào dự án",
"removeFromProject": "Xóa khỏi dự án",
"noConversationsIn": "Chưa có cuộc trò chuyện nào trong {{projectName}}",
"startNewConversation": "Bắt đầu một cuộc trò chuyện mới với {{projectName}} bên dưới",
"conversationsIn": "Cuộc trò chuyện trong {{projectName}}",
"conversationsDescription": "Nhấp vào bất kỳ cuộc trò chuyện nào để tiếp tục trò chuyện hoặc bắt đầu một cuộc trò chuyện mới bên dưới.",
"thread": "chủ đề",
"threads": "chủ đề",
"updated": "Đã cập nhật:",
"collapseProject": "Thu gọn dự án",
"expandProject": "Mở rộng dự án",
"update": "Cập nhật",
"noProjectsAvailable": "Không có dự án nào",
"searchProjects": "Tìm kiếm dự án...",
"noProjectsFound": "Không tìm thấy dự án nào",
"tryDifferentSearch": "Thử từ khóa tìm kiếm khác"
},
"dialogs": {
"changeDataFolder": {
"title": "Thay đổi vị trí thư mục dữ liệu",

View File

@ -169,6 +169,12 @@
"serverLogs": "Nhật ký máy chủ",
"serverLogsDesc": "Xem nhật ký chi tiết của máy chủ API cục bộ.",
"openLogs": "Mở nhật ký",
"swaggerDocs": "Tài liệu API",
"swaggerDocsDesc": "Xem tài liệu API tương tác (Swagger UI).",
"openDocs": "Mở tài liệu",
"startupConfiguration": "Cấu hình khởi động",
"runOnStartup": "Auto start",
"runOnStartupDesc": "Tự động khởi động Máy chủ API Cục bộ khi ứng dụng khởi chạy. Sử dụng mô hình đã dùng gần nhất hoặc chọn mô hình đầu tiên có sẵn nếu không khả dụng.",
"serverConfiguration": "Cấu hình máy chủ",
"serverHost": "Máy chủ lưu trữ",
"serverHostDesc": "Địa chỉ mạng cho máy chủ.",

View File

@ -199,6 +199,35 @@
"title": "模型设置 - {{modelId}}",
"description": "配置模型设置以优化性能和行为。"
},
"projects": {
"title": "项目",
"addProject": "添加项目",
"editProject": "编辑项目",
"deleteProject": "删除项目",
"projectName": "项目名称",
"enterProjectName": "输入项目名称",
"noProjectsYet": "还没有项目",
"noProjectsYetDesc": "创建您的第一个项目来组织对话。",
"projectNotFound": "未找到项目",
"projectNotFoundDesc": "您正在查找的项目不存在。",
"deleteProjectConfirm": "您确定要删除此项目吗?此操作无法撤销。",
"addToProject": "添加到项目",
"removeFromProject": "从项目中删除",
"noConversationsIn": "{{projectName}} 中还没有对话",
"startNewConversation": "在下方开始与 {{projectName}} 的新对话",
"conversationsIn": "{{projectName}} 中的对话",
"conversationsDescription": "点击任何对话以继续聊天,或在下方开始新的对话。",
"thread": "线程",
"threads": "线程",
"updated": "已更新:",
"collapseProject": "收起项目",
"expandProject": "展开项目",
"update": "更新",
"noProjectsAvailable": "没有可用的项目",
"searchProjects": "搜索项目...",
"noProjectsFound": "未找到项目",
"tryDifferentSearch": "尝试不同的搜索词"
},
"dialogs": {
"changeDataFolder": {
"title": "更改数据文件夹位置",

View File

@ -169,6 +169,12 @@
"serverLogs": "服务器日志",
"serverLogsDesc": "查看本地 API 服务器的详细日志。",
"openLogs": "打开日志",
"swaggerDocs": "API 文档",
"swaggerDocsDesc": "查看交互式 API 文档Swagger UI。",
"openDocs": "打开文档",
"startupConfiguration": "启动配置",
"runOnStartup": "Auto start",
"runOnStartupDesc": "应用程序启动时自动启动本地 API 服务器。使用上次使用的模型,如果不可用则选择第一个可用模型。",
"serverConfiguration": "服务器配置",
"serverHost": "服务器主机",
"serverHostDesc": "服务器的网络地址。",

View File

@ -199,6 +199,35 @@
"title": "模型設定 - {{modelId}}",
"description": "設定模型設定以最佳化效能和行為。"
},
"projects": {
"title": "專案",
"addProject": "新增專案",
"editProject": "編輯專案",
"deleteProject": "刪除專案",
"projectName": "專案名稱",
"enterProjectName": "輸入專案名稱",
"noProjectsYet": "尚無專案",
"noProjectsYetDesc": "建立您的第一個專案來組織對話。",
"projectNotFound": "找不到專案",
"projectNotFoundDesc": "您正在尋找的專案不存在。",
"deleteProjectConfirm": "您確定要刪除此專案嗎?此操作無法復原。",
"addToProject": "加入專案",
"removeFromProject": "從專案中移除",
"noConversationsIn": "{{projectName}} 中尚無對話",
"startNewConversation": "在下方開始與 {{projectName}} 的新對話",
"conversationsIn": "{{projectName}} 中的對話",
"conversationsDescription": "點擊任何對話以繼續聊天,或在下方開始新的對話。",
"thread": "執行緒",
"threads": "執行緒",
"updated": "已更新:",
"collapseProject": "收合專案",
"expandProject": "展開專案",
"update": "更新",
"noProjectsAvailable": "沒有可用的專案",
"searchProjects": "搜尋專案...",
"noProjectsFound": "找不到專案",
"tryDifferentSearch": "嘗試不同的搜尋詞"
},
"dialogs": {
"changeDataFolder": {
"title": "變更資料夾位置",

View File

@ -167,6 +167,12 @@
"serverLogs": "伺服器日誌",
"serverLogsDesc": "檢視本機 API 伺服器的詳細日誌。",
"openLogs": "開啟日誌",
"swaggerDocs": "API 文件",
"swaggerDocsDesc": "查看互動式 API 文件Swagger UI。",
"openDocs": "開啟文件",
"startupConfiguration": "啟動設定",
"runOnStartup": "Auto start",
"runOnStartupDesc": "應用程式啟動時自動啟動本機 API 伺服器。使用上次使用的模型,如果不可用則選擇第一個可用模型。",
"serverConfiguration": "伺服器設定",
"serverHost": "伺服器主機",
"serverHostDesc": "伺服器的網路位址。",

View File

@ -1,5 +1,5 @@
import { useEffect } from 'react'
import { useAppearance } from '@/hooks/useAppearance'
import { useAppearance, useBlurSupport } from '@/hooks/useAppearance'
import { useTheme } from '@/hooks/useTheme'
import {
isDefaultColor,
@ -29,14 +29,37 @@ export function AppearanceProvider() {
appDestructiveTextColor,
} = useAppearance()
const { isDark } = useTheme()
const showAlphaSlider = useBlurSupport()
// Force re-apply appearance on mount to fix theme desync issues on Windows
// This ensures that when navigating to routes (like logs), the theme is properly applied
useEffect(() => {
const {
setAppBgColor,
setAppMainViewBgColor,
appBgColor,
appMainViewBgColor,
} = useAppearance.getState()
// Re-trigger setters to ensure CSS variables are applied with correct theme
setAppBgColor(appBgColor)
setAppMainViewBgColor(appMainViewBgColor)
}, []) // Run once on mount
// Update colors when blur support changes (important for Windows/Linux)
useEffect(() => {
const { setAppBgColor, appBgColor } = useAppearance.getState()
// Re-apply color to update alpha based on blur support
setAppBgColor(appBgColor)
}, [showAlphaSlider])
// Apply appearance settings on mount and when they change
useEffect(() => {
// Apply font size
document.documentElement.style.setProperty('--font-size-base', fontSize)
// Hide alpha slider when IS_LINUX || !IS_TAURI
const shouldHideAlpha = IS_LINUX || !IS_TAURI
// Hide alpha slider when blur is not supported
const shouldHideAlpha = !showAlphaSlider
let alphaStyleElement = document.getElementById('alpha-slider-style')
if (shouldHideAlpha) {
@ -55,12 +78,13 @@ export function AppearanceProvider() {
// Import culori functions dynamically to avoid SSR issues
import('culori').then(({ rgb, oklch, formatCss }) => {
// Convert RGBA to a format culori can work with
// Use alpha = 1 when blur is not supported
const culoriRgb = rgb({
mode: 'rgb',
r: appBgColor.r / 255,
g: appBgColor.g / 255,
b: appBgColor.b / 255,
alpha: IS_WINDOWS || IS_LINUX || !IS_TAURI ? 1 : appBgColor.a,
alpha: showAlphaSlider ? appBgColor.a : 1,
})
const culoriRgbMainView = rgb({
@ -176,6 +200,7 @@ export function AppearanceProvider() {
appAccentTextColor,
appDestructiveBgColor,
appDestructiveTextColor,
showAlphaSlider,
])
// Update appearance when theme changes
@ -194,6 +219,10 @@ export function AppearanceProvider() {
setAppDestructiveBgColor,
} = useAppearance.getState()
// Force re-apply all colors when theme changes to ensure correct dark/light defaults
// This is especially important on Windows where the theme might not be properly
// synchronized when navigating to different routes (e.g., logs page)
// If using default background color, update it when theme changes
if (isDefaultColor(appBgColor)) {
// This will trigger the appropriate updates for both background and text color

View File

@ -11,8 +11,8 @@ import { useThreads } from '@/hooks/useThreads'
import { useLocalApiServer } from '@/hooks/useLocalApiServer'
import { useAppState } from '@/hooks/useAppState'
import { AppEvent, events } from '@janhq/core'
import { localStorageKey } from '@/constants/localStorage'
import { SystemEvent } from '@/types/events'
import { getModelToStart } from '@/utils/getModelToStart'
export function DataProvider() {
const { setProviders, selectedModel, selectedProvider, getProviderByName } =
@ -65,10 +65,13 @@ export function DataProvider() {
// Listen for deep link events
let unsubscribe = () => {}
serviceHub.events().listen(SystemEvent.DEEP_LINK, (event) => {
serviceHub
.events()
.listen(SystemEvent.DEEP_LINK, (event) => {
const deep_link = event.payload as string
handleDeepLink([deep_link])
}).then((unsub) => {
})
.then((unsub) => {
unsubscribe = unsub
})
return () => {
@ -102,54 +105,6 @@ export function DataProvider() {
})
}, [serviceHub, setProviders])
const getLastUsedModel = (): { provider: string; model: string } | null => {
try {
const stored = localStorage.getItem(localStorageKey.lastUsedModel)
return stored ? JSON.parse(stored) : null
} catch (error) {
console.debug('Failed to get last used model from localStorage:', error)
return null
}
}
// Helper function to determine which model to start
const getModelToStart = () => {
// Use last used model if available
const lastUsedModel = getLastUsedModel()
if (lastUsedModel) {
const provider = getProviderByName(lastUsedModel.provider)
if (
provider &&
provider.models.some((m) => m.id === lastUsedModel.model)
) {
return { model: lastUsedModel.model, provider }
}
}
// Use selected model if available
if (selectedModel && selectedProvider) {
const provider = getProviderByName(selectedProvider)
if (provider) {
return { model: selectedModel.id, provider }
}
}
// Use first model from llamacpp provider
const llamacppProvider = getProviderByName('llamacpp')
if (
llamacppProvider &&
llamacppProvider.models &&
llamacppProvider.models.length > 0
) {
return {
model: llamacppProvider.models[0].id,
provider: llamacppProvider,
}
}
return null
}
// Auto-start Local API Server on app startup if enabled
useEffect(() => {
if (enableOnStartup) {
@ -159,7 +114,11 @@ export function DataProvider() {
return
}
const modelToStart = getModelToStart()
const modelToStart = getModelToStart({
selectedModel,
selectedProvider,
getProviderByName,
})
// Only start server if we have a model to load
if (!modelToStart) {

View File

@ -1,5 +1,6 @@
import { useEffect } from 'react'
import { useTheme, checkOSDarkMode } from '@/hooks/useTheme'
import { isPlatformTauri } from '@/lib/platform/utils'
/**
* ThemeProvider ensures theme settings are applied on every page load
@ -11,12 +12,21 @@ export function ThemeProvider() {
// Detect OS theme on mount and apply it
useEffect(() => {
// If theme is set to auto, detect OS preference
// Force refresh theme on mount to handle Linux startup timing issues
const refreshTheme = () => {
if (activeTheme === 'auto') {
const isDarkMode = checkOSDarkMode()
setIsDark(isDarkMode)
setTheme('auto')
}
}
// Initial refresh
refreshTheme()
// On Linux, desktop environment may not be ready immediately
// Add a delayed refresh to catch the correct OS theme
const timeoutId = setTimeout(refreshTheme, 100)
// Listen for changes in OS theme preference
const mediaQuery = window.matchMedia('(prefers-color-scheme: dark)')
@ -26,16 +36,41 @@ export function ThemeProvider() {
if (activeTheme === 'auto') {
setIsDark(e.matches)
} else {
setTheme(e.matches ? 'dark' : 'light')
setTheme(activeTheme)
}
}
// Add event listener
// Add event listener for browser/web
mediaQuery.addEventListener('change', handleThemeChange)
// Listen to Tauri native theme events (uses XDG Desktop Portal on Linux)
let unlistenTauri: (() => void) | undefined
if (isPlatformTauri()) {
import('@tauri-apps/api/event')
.then(({ listen }) => {
return listen<string>('theme-changed', (event) => {
if (activeTheme === 'auto') {
const isDark = event.payload === 'dark'
setIsDark(isDark)
}
})
})
.then((unlisten) => {
unlistenTauri = unlisten
})
.catch((err) => {
console.error('Failed to setup Tauri theme listener:', err)
})
}
// Clean up
return () => {
clearTimeout(timeoutId)
mediaQuery.removeEventListener('change', handleThemeChange)
if (unlistenTauri) {
unlistenTauri()
}
}
}, [activeTheme, setIsDark, setTheme])

View File

@ -11,7 +11,7 @@ import SetupScreen from '@/containers/SetupScreen'
import { route } from '@/constants/routes'
type SearchParams = {
model?: {
'model'?: {
id: string
provider: string
}
@ -33,7 +33,10 @@ export const Route = createFileRoute(route.home as any)({
}
// Only include temporary-chat if it's explicitly true
if (search[TEMPORARY_CHAT_QUERY_ID] === 'true' || search[TEMPORARY_CHAT_QUERY_ID] === true) {
if (
search[TEMPORARY_CHAT_QUERY_ID] === 'true' ||
search[TEMPORARY_CHAT_QUERY_ID] === true
) {
result['temporary-chat'] = true
}
@ -77,7 +80,7 @@ function Index() {
</HeaderPage>
<div
className={cn(
'h-full overflow-y-auto flex flex-col gap-2 justify-center px-3 sm:px-4 md:px-8 py-4 md:py-0'
'h-full overflow-y-auto inline-flex flex-col gap-2 justify-center px-3 sm:px-4 md:px-8 py-4 md:py-0'
)}
>
<div
@ -110,7 +113,9 @@ function Index() {
isMobile ? 'text-base' : 'text-lg'
)}
>
{isTemporaryChat ? t('chat:temporaryChatDescription') : t('chat:description')}
{isTemporaryChat
? t('chat:temporaryChatDescription')
: t('chat:description')}
</p>
</div>
<div className="flex-1 shrink-0">

View File

@ -31,7 +31,10 @@ function LogsViewer() {
useEffect(() => {
let lastLogsLength = 0
function updateLogs() {
serviceHub.app().readLogs().then((logData) => {
serviceHub
.app()
.readLogs()
.then((logData) => {
let needScroll = false
const filteredLogs = logData.filter(Boolean) as LogEntry[]
if (filteredLogs.length > lastLogsLength) needScroll = true

View File

@ -14,6 +14,8 @@ import {
IconFolder,
IconChevronDown,
IconChevronRight,
IconSearch,
IconX,
} from '@tabler/icons-react'
import AddProjectDialog from '@/containers/dialogs/AddProjectDialog'
import { DeleteProjectDialog } from '@/containers/dialogs/DeleteProjectDialog'
@ -42,6 +44,7 @@ function ProjectContent() {
const [expandedProjects, setExpandedProjects] = useState<Set<string>>(
new Set()
)
const [searchQuery, setSearchQuery] = useState('')
const handleDelete = (id: string) => {
setDeletingId(id)
@ -93,6 +96,16 @@ function ProjectContent() {
})
}
// Filter projects based on search query
const filteredProjects = useMemo(() => {
if (!searchQuery.trim()) {
return folders
}
return folders.filter((folder) =>
folder.name.toLowerCase().includes(searchQuery.toLowerCase())
)
}, [folders, searchQuery])
return (
<div className="flex h-full flex-col justify-center">
<HeaderPage>
@ -113,6 +126,33 @@ function ProjectContent() {
</HeaderPage>
<div className="h-full overflow-y-auto flex flex-col">
<div className="p-4 w-full md:w-3/4 mx-auto mt-2">
{/* Search Bar */}
{folders.length > 0 && (
<div className="mb-4">
<div className="relative">
<IconSearch
size={18}
className="absolute left-3 top-1/2 transform -translate-y-1/2 text-main-view-fg/50"
/>
<input
type="text"
placeholder={t('projects.searchProjects')}
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
className="w-full pl-10 pr-4 py-2.5 bg-main-view-fg/5 border border-main-view-fg/10 rounded-lg text-main-view-fg placeholder:text-main-view-fg/50 focus:outline-none focus:ring-2 focus:ring-main-view-fg/20 focus:border-main-view-fg/20 transition-all"
/>
{searchQuery && (
<button
onClick={() => setSearchQuery('')}
className="absolute right-3 top-1/2 transform -translate-y-1/2 text-main-view-fg/50 hover:text-main-view-fg transition-colors"
>
<IconX size={18} />
</button>
)}
</div>
</div>
)}
{folders.length === 0 ? (
<div className="flex flex-col items-center justify-center py-12 text-center">
<IconFolder size={48} className="text-main-view-fg/30 mb-4" />
@ -123,9 +163,19 @@ function ProjectContent() {
{t('projects.noProjectsYetDesc')}
</p>
</div>
) : filteredProjects.length === 0 ? (
<div className="flex flex-col items-center justify-center py-12 text-center">
<IconSearch size={48} className="text-main-view-fg/30 mb-4" />
<h3 className="text-lg font-medium text-main-view-fg/60 mb-2">
{t('projects.noProjectsFound')}
</h3>
<p className="text-main-view-fg/50 text-sm">
{t('projects.tryDifferentSearch')}
</p>
</div>
) : (
<div className="space-y-3">
{folders
{filteredProjects
.slice()
.sort((a, b) => b.updated_at - a.updated_at)
.map((folder) => {
@ -172,8 +222,8 @@ function ProjectContent() {
className="size-8 cursor-pointer flex items-center justify-center rounded-md hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out mr-1"
title={
isExpanded
? t('projects.collapseThreads')
: t('projects.expandThreads')
? t('projects.collapseProject')
: t('projects.expandProject')
}
onClick={() => toggleProjectExpansion(folder.id)}
>
@ -218,7 +268,9 @@ function ProjectContent() {
{/* Thread List */}
{isExpanded && projectThreads.length > 0 && (
<div className="mt-3 pl-2">
<div
className="mt-3 pl-2 pr-2 max-h-[190px] overflow-y-auto overflow-x-hidden [&::-webkit-scrollbar]:w-1.5 [&::-webkit-scrollbar-track]:bg-transparent [&::-webkit-scrollbar-thumb]:bg-main-view-fg/20 [&::-webkit-scrollbar-thumb]:rounded-full hover:[&::-webkit-scrollbar-thumb]:bg-main-view-fg/30"
>
<ThreadList
threads={projectThreads}
variant="project"

View File

@ -15,7 +15,6 @@ import { useLocalApiServer } from '@/hooks/useLocalApiServer'
import { useAppState } from '@/hooks/useAppState'
import { useModelProvider } from '@/hooks/useModelProvider'
import { useServiceHub } from '@/hooks/useServiceHub'
import { localStorageKey } from '@/constants/localStorage'
import { IconLogs } from '@tabler/icons-react'
import { cn } from '@/lib/utils'
import { ApiKeyInput } from '@/containers/ApiKeyInput'
@ -23,6 +22,7 @@ import { useEffect, useState } from 'react'
import { PlatformGuard } from '@/lib/platform/PlatformGuard'
import { PlatformFeature } from '@/lib/platform'
import { toast } from 'sonner'
import { getModelToStart } from '@/utils/getModelToStart'
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const Route = createFileRoute(route.settings.local_api_server as any)({
@ -82,54 +82,6 @@ function LocalAPIServerContent() {
setIsApiKeyEmpty(!isValid)
}
const getLastUsedModel = (): { provider: string; model: string } | null => {
try {
const stored = localStorage.getItem(localStorageKey.lastUsedModel)
return stored ? JSON.parse(stored) : null
} catch (error) {
console.debug('Failed to get last used model from localStorage:', error)
return null
}
}
// Helper function to determine which model to start
const getModelToStart = () => {
// Use last used model if available
const lastUsedModel = getLastUsedModel()
if (lastUsedModel) {
const provider = getProviderByName(lastUsedModel.provider)
if (
provider &&
provider.models.some((m) => m.id === lastUsedModel.model)
) {
return { model: lastUsedModel.model, provider }
}
}
// Use selected model if available
if (selectedModel && selectedProvider) {
const provider = getProviderByName(selectedProvider)
if (provider) {
return { model: selectedModel.id, provider }
}
}
// Use first model from llamacpp provider
const llamacppProvider = getProviderByName('llamacpp')
if (
llamacppProvider &&
llamacppProvider.models &&
llamacppProvider.models.length > 0
) {
return {
model: llamacppProvider.models[0].id,
provider: llamacppProvider,
}
}
return null
}
const [isModelLoading, setIsModelLoading] = useState(false)
const toggleAPIServer = async () => {
@ -137,7 +89,7 @@ function LocalAPIServerContent() {
if (serverStatus === 'stopped') {
console.log('Starting server with port:', serverPort)
toast.info('Starting server...', {
description: `Attempting to start server on port ${serverPort}`
description: `Attempting to start server on port ${serverPort}`,
})
if (!apiKey || apiKey.toString().trim().length === 0) {
@ -146,20 +98,37 @@ function LocalAPIServerContent() {
}
setShowApiKeyError(false)
const modelToStart = getModelToStart()
setServerStatus('pending')
// Check if there's already a loaded model
serviceHub
.models()
.getActiveModels()
.then((loadedModels) => {
if (loadedModels && loadedModels.length > 0) {
console.log(`Using already loaded model: ${loadedModels[0]}`)
// Model already loaded, just start the server
return Promise.resolve()
} else {
// No loaded model, start one first
const modelToStart = getModelToStart({
selectedModel,
selectedProvider,
getProviderByName,
})
// Only start server if we have a model to load
if (!modelToStart) {
console.warn(
'Cannot start Local API Server: No model available to load'
)
return
throw new Error('No model available to load')
}
setServerStatus('pending')
setIsModelLoading(true) // Start loading state
// Start the model first
serviceHub
return serviceHub
.models()
.startModel(modelToStart.provider, modelToStart.model)
.then(() => {
@ -169,6 +138,8 @@ function LocalAPIServerContent() {
// Add a small delay for the backend to update state
return new Promise((resolve) => setTimeout(resolve, 500))
})
}
})
.then(() => {
// Then start the server
return window.core?.api?.startServer({
@ -196,31 +167,31 @@ function LocalAPIServerContent() {
toast.dismiss()
// Extract error message from various error formats
const errorMsg = error && typeof error === 'object' && 'message' in error
const errorMsg =
error && typeof error === 'object' && 'message' in error
? String(error.message)
: String(error)
// Port-related errors (highest priority)
if (errorMsg.includes('Address already in use')) {
toast.error('Port has been occupied', {
description: `Port ${serverPort} is already in use. Please try a different port.`
description: `Port ${serverPort} is already in use. Please try a different port.`,
})
}
// Model-related errors
else if (errorMsg.includes('Invalid or inaccessible model path')) {
toast.error('Invalid or inaccessible model path', {
description: errorMsg
description: errorMsg,
})
}
else if (errorMsg.includes('model')) {
} else if (errorMsg.includes('model')) {
toast.error('Failed to start model', {
description: errorMsg
description: errorMsg,
})
}
// Generic server errors
else {
toast.error('Failed to start server', {
description: errorMsg
description: errorMsg,
})
}
})
@ -294,6 +265,22 @@ function LocalAPIServerContent() {
</div>
}
>
<CardItem
title={t('settings:localApiServer.runOnStartup')}
description={t('settings:localApiServer.runOnStartupDesc')}
actions={
<Switch
checked={enableOnStartup}
onCheckedChange={(checked) => {
if (!apiKey || apiKey.toString().trim().length === 0) {
setShowApiKeyError(true)
return
}
setEnableOnStartup(checked)
}}
/>
}
/>
<CardItem
title={t('settings:localApiServer.serverLogs')}
description={t('settings:localApiServer.serverLogsDesc')}
@ -312,24 +299,34 @@ function LocalAPIServerContent() {
</Button>
}
/>
</Card>
{/* Startup Configuration */}
<Card title={t('settings:localApiServer.startupConfiguration')}>
<CardItem
title={t('settings:localApiServer.runOnStartup')}
description={t('settings:localApiServer.runOnStartupDesc')}
title={t('settings:localApiServer.swaggerDocs')}
description={t('settings:localApiServer.swaggerDocsDesc')}
actions={
<Switch
checked={enableOnStartup}
onCheckedChange={(checked) => {
if (!apiKey || apiKey.toString().trim().length === 0) {
setShowApiKeyError(true)
return
}
setEnableOnStartup(checked)
}}
/>
<a
href={`http://${serverHost}:${serverPort}`}
target="_blank"
rel="noopener noreferrer"
>
<Button
asChild
variant="link"
size="sm"
className="p-0 text-main-view-fg/80"
disabled={!isServerRunning}
title={t('settings:localApiServer.swaggerDocs')}
>
<div
className={cn(
'cursor-pointer flex items-center justify-center rounded-sm hover:bg-main-view-fg/15 bg-main-view-fg/10 transition-all duration-200 ease-in-out px-2 py-1 gap-1',
!isServerRunning && 'opacity-50 cursor-not-allowed'
)}
>
<span>{t('settings:localApiServer.openDocs')}</span>
</div>
</Button>
</a>
}
/>
</Card>

View File

@ -39,7 +39,7 @@ function ModelProviders() {
toast.error(t('providerAlreadyExists', { name }))
return
}
const newProvider = {
const newProvider: ProviderObject = {
provider: name,
active: true,
models: [],

View File

@ -4,7 +4,11 @@ import { isPlatformTauri } from '@/lib/platform/utils'
// Mock platform detection
vi.mock('@/lib/platform/utils', () => ({
isPlatformTauri: vi.fn().mockReturnValue(false)
isPlatformTauri: vi.fn().mockReturnValue(false),
isPlatformIOS: vi.fn().mockReturnValue(false),
isPlatformAndroid: vi.fn().mockReturnValue(false),
isIOS: vi.fn().mockReturnValue(false),
isAndroid: vi.fn().mockReturnValue(false)
}))
// Mock @jan/extensions-web to return empty extensions for testing

View File

@ -0,0 +1,69 @@
/**
* Mobile Core Service - Android/iOS implementation
*
* This service extends TauriCoreService but provides mobile-specific
* extension loading. Instead of reading extensions from the filesystem,
* it returns pre-bundled web extensions.
*/
import { TauriCoreService } from './tauri'
import type { ExtensionManifest } from '@/lib/extension'
import JanConversationalExtension from '@janhq/conversational-extension'
export class MobileCoreService extends TauriCoreService {
/**
* Override: Return pre-bundled extensions instead of reading from filesystem
*/
override async getActiveExtensions(): Promise<ExtensionManifest[]> {
return this.getBundledExtensions()
}
/**
* Override: No-op on mobile - extensions are pre-bundled in the app
*/
override async installExtensions(): Promise<void> {
console.log('[Mobile] Extensions are pre-bundled, skipping installation')
}
/**
* Override: No-op on mobile - cannot install additional extensions
*/
override async installExtension(): Promise<ExtensionManifest[]> {
console.log('[Mobile] Cannot install extensions on mobile, they are pre-bundled')
return this.getBundledExtensions()
}
/**
* Override: No-op on mobile - cannot uninstall bundled extensions
*/
override async uninstallExtension(): Promise<boolean> {
console.log('[Mobile] Cannot uninstall pre-bundled extensions on mobile')
return false
}
/**
* Private method to return pre-bundled mobile extensions
*/
private getBundledExtensions(): ExtensionManifest[] {
const conversationalExt = new JanConversationalExtension(
'built-in',
'@janhq/conversational-extension',
'Conversational Extension',
true,
'Manages conversation threads and messages',
'1.0.0'
)
return [
{
name: '@janhq/conversational-extension',
productName: 'Conversational Extension',
url: 'built-in',
active: true,
description: 'Manages conversation threads and messages',
version: '1.0.0',
extensionInstance: conversationalExt,
},
]
}
}

View File

@ -5,7 +5,7 @@
* then provides synchronous access to service instances throughout the app.
*/
import { isPlatformTauri } from '@/lib/platform/utils'
import { isPlatformTauri, isPlatformIOS, isPlatformAndroid } from '@/lib/platform/utils'
// Import default services
import { DefaultThemeService } from './theme/default'
@ -106,11 +106,14 @@ class PlatformServiceHub implements ServiceHub {
console.log(
'Initializing service hub for platform:',
isPlatformTauri() ? 'Tauri' : 'Web'
isPlatformTauri() && !isPlatformIOS() && !isPlatformAndroid() ? 'Tauri' :
isPlatformIOS() ? 'iOS' :
isPlatformAndroid() ? 'Android' : 'Web'
)
try {
if (isPlatformTauri()) {
if (isPlatformTauri() && !isPlatformIOS() && !isPlatformAndroid()) {
// Desktop Tauri
const [
themeModule,
windowModule,
@ -154,6 +157,44 @@ class PlatformServiceHub implements ServiceHub {
this.pathService = new pathModule.TauriPathService()
this.coreService = new coreModule.TauriCoreService()
this.deepLinkService = new deepLinkModule.TauriDeepLinkService()
} else if (isPlatformIOS() || isPlatformAndroid()) {
const [
themeModule,
windowModule,
eventsModule,
appModule,
mcpModule,
providersModule,
dialogModule,
openerModule,
pathModule,
coreModule,
deepLinkModule,
] = await Promise.all([
import('./theme/tauri'),
import('./window/tauri'),
import('./events/tauri'),
import('./app/tauri'),
import('./mcp/tauri'),
import('./providers/tauri'),
import('./dialog/tauri'),
import('./opener/tauri'),
import('./path/tauri'),
import('./core/mobile'), // Use mobile-specific core service
import('./deeplink/tauri'),
])
this.themeService = new themeModule.TauriThemeService()
this.windowService = new windowModule.TauriWindowService()
this.eventsService = new eventsModule.TauriEventsService()
this.appService = new appModule.TauriAppService()
this.mcpService = new mcpModule.TauriMCPService()
this.providersService = new providersModule.TauriProvidersService()
this.dialogService = new dialogModule.TauriDialogService()
this.openerService = new openerModule.TauriOpenerService()
this.pathService = new pathModule.TauriPathService()
this.coreService = new coreModule.MobileCoreService() // Mobile service with pre-loaded extensions
this.deepLinkService = new deepLinkModule.TauriDeepLinkService()
} else {
const [
themeModule,

View File

@ -578,6 +578,9 @@ export class DefaultModelsService implements ModelsService {
}
}>
}>
chat_template_kwargs?: {
enable_thinking: boolean
}
}) => Promise<number>
}
@ -654,6 +657,9 @@ export class DefaultModelsService implements ModelsService {
return await engine.getTokensCount({
model: modelId,
messages: transformedMessages,
chat_template_kwargs: {
enable_thinking: false,
},
})
}

View File

@ -151,6 +151,12 @@ export class TauriProvidersService extends DefaultProvidersService {
headers['Authorization'] = `Bearer ${provider.api_key}`
}
if (provider.custom_header) {
provider.custom_header.forEach((header) => {
headers[header.header] = header.value
})
}
// Always use Tauri's fetch to avoid CORS issues
const response = await fetchTauri(`${provider.base_url}/models`, {
method: 'GET',

View File

@ -2,7 +2,8 @@
* Tauri Theme Service - Desktop implementation
*/
import { getCurrentWindow, Theme } from '@tauri-apps/api/window'
import { Theme } from '@tauri-apps/api/window'
import { getAllWebviewWindows, type WebviewWindow } from '@tauri-apps/api/webviewWindow'
import type { ThemeMode } from './types'
import { DefaultThemeService } from './default'
@ -10,7 +11,27 @@ export class TauriThemeService extends DefaultThemeService {
async setTheme(theme: ThemeMode): Promise<void> {
try {
const tauriTheme = theme as Theme | null
await getCurrentWindow().setTheme(tauriTheme)
// Update all open windows, not just the current one
const allWindows = await getAllWebviewWindows()
// Convert to array if it's not already
const windowsArray: WebviewWindow[] = Array.isArray(allWindows)
? allWindows
: Object.values(allWindows)
await Promise.all(
windowsArray.map(async (window) => {
try {
await window.setTheme(tauriTheme)
} catch (error) {
console.error(
`Failed to set theme for window ${window.label}:`,
error
)
}
})
)
} catch (error) {
console.error('Error setting theme in Tauri:', error)
throw error
@ -21,7 +42,7 @@ export class TauriThemeService extends DefaultThemeService {
return {
setTheme: (theme: ThemeMode): Promise<void> => {
return this.setTheme(theme)
}
},
}
}
}

View File

@ -7,8 +7,39 @@ import type { WindowConfig, WebviewWindowInstance } from './types'
import { DefaultWindowService } from './default'
export class TauriWindowService extends DefaultWindowService {
async createWebviewWindow(config: WindowConfig): Promise<WebviewWindowInstance> {
async createWebviewWindow(
config: WindowConfig
): Promise<WebviewWindowInstance> {
try {
// Get current theme from localStorage
const storedTheme = localStorage.getItem('jan-theme')
let theme: 'light' | 'dark' | undefined = undefined
if (storedTheme) {
try {
const themeData = JSON.parse(storedTheme)
const activeTheme = themeData?.state?.activeTheme
const isDark = themeData?.state?.isDark
// Set theme based on stored preference
if (activeTheme === 'auto') {
theme = undefined // Let OS decide
} else if (
activeTheme === 'dark' ||
(activeTheme === 'auto' && isDark)
) {
theme = 'dark'
} else if (
activeTheme === 'light' ||
(activeTheme === 'auto' && !isDark)
) {
theme = 'light'
}
} catch (e) {
console.warn('Failed to parse theme from localStorage:', e)
}
}
const webviewWindow = new WebviewWindow(config.label, {
url: config.url,
title: config.title,
@ -20,8 +51,12 @@ export class TauriWindowService extends DefaultWindowService {
maximizable: config.maximizable,
closable: config.closable,
fullscreen: config.fullscreen,
theme: theme,
})
// Setup theme listener for this window
this.setupThemeListenerForWindow(webviewWindow)
return {
label: config.label,
async close() {
@ -38,7 +73,7 @@ export class TauriWindowService extends DefaultWindowService {
},
async setTitle(title: string) {
await webviewWindow.setTitle(title)
}
},
}
} catch (error) {
console.error('Error creating Tauri window:', error)
@ -46,7 +81,9 @@ export class TauriWindowService extends DefaultWindowService {
}
}
async getWebviewWindowByLabel(label: string): Promise<WebviewWindowInstance | null> {
async getWebviewWindowByLabel(
label: string
): Promise<WebviewWindowInstance | null> {
try {
const existingWindow = await WebviewWindow.getByLabel(label)
@ -67,7 +104,7 @@ export class TauriWindowService extends DefaultWindowService {
},
async setTitle(title: string) {
await existingWindow.setTitle(title)
}
},
}
}
@ -135,8 +172,35 @@ export class TauriWindowService extends DefaultWindowService {
center: true,
})
} catch (error) {
console.error('Error opening local API server logs window in Tauri:', error)
console.error(
'Error opening local API server logs window in Tauri:',
error
)
throw error
}
}
private setupThemeListenerForWindow(window: WebviewWindow): void {
// Listen to theme change events from Tauri backend
import('@tauri-apps/api/event')
.then(({ listen }) => {
return listen<string>('theme-changed', async (event) => {
const theme = event.payload
try {
if (theme === 'dark') {
await window.setTheme('dark')
} else if (theme === 'light') {
await window.setTheme('light')
} else {
await window.setTheme(null)
}
} catch (err) {
console.error('Failed to update window theme:', err)
}
})
})
.catch((err) => {
console.error('Failed to setup theme listener for window:', err)
})
}
}

View File

@ -48,6 +48,7 @@ type ProviderObject = {
settings: ProviderSetting[]
models: Model[]
persist?: boolean
custom_header?: ProviderCustomHeader[] | null
}
/**
@ -71,3 +72,8 @@ type ProxyOptions = {
verifyHostSSL: boolean
noProxy: string
}
type ProviderCustomHeader = {
header: string
value: string
}

View File

@ -0,0 +1,65 @@
/**
* Utility to check if the system supports blur/acrylic effects
* based on OS information from hardware data
*/
import type { HardwareData } from '@/hooks/useHardware'
/**
* Check if Windows supports blur effects based on build number
* Windows 10 build 17134 (version 1803) and later support acrylic effects
*/
function checkWindowsBlurSupport(osName: string): boolean {
// os_name format: "Windows 10 Pro (build 22631)" or similar
const buildMatch = osName.match(/build\s+(\d+)/i)
if (buildMatch && buildMatch[1]) {
const build = parseInt(buildMatch[1], 10)
return build >= 17134
}
// If we can't detect build number, assume modern Windows supports blur
return true
}
/**
* Check if Linux supports blur effects based on desktop environment
*/
function checkLinuxBlurSupport(): boolean {
// Check environment variables (only available in Tauri)
if (typeof window === 'undefined') return false
// These checks would need to be done on the backend
// For now, we'll assume Linux with common DEs supports blur
return true
}
/**
* Check if the system supports blur/acrylic effects
*
* @param hardwareData - Hardware data from the hardware plugin
* @returns true if blur effects are supported
*/
export function supportsBlurEffects(hardwareData: HardwareData | null): boolean {
if (!hardwareData) return false
const { os_type, os_name } = hardwareData
// macOS always supports blur/vibrancy effects
if (os_type === 'macos') {
return true
}
// Windows: Check build number
if (os_type === 'windows') {
return checkWindowsBlurSupport(os_name)
}
// Linux: Check desktop environment (simplified for now)
if (os_type === 'linux') {
return checkLinuxBlurSupport()
}
// Unknown platforms: assume no blur support
return false
}

View File

@ -0,0 +1,69 @@
import { localStorageKey } from '@/constants/localStorage'
import type { ModelInfo } from '@janhq/core'
export const getLastUsedModel = (): {
provider: string
model: string
} | null => {
try {
const stored = localStorage.getItem(localStorageKey.lastUsedModel)
return stored ? JSON.parse(stored) : null
} catch (error) {
console.debug('Failed to get last used model from localStorage:', error)
return null
}
}
// Helper function to determine which model to start
export const getModelToStart = (params: {
selectedModel?: ModelInfo | null
selectedProvider?: string | null
getProviderByName: (name: string) => ModelProvider | undefined
}): { model: string; provider: ModelProvider } | null => {
const { selectedModel, selectedProvider, getProviderByName } = params
// Use last used model if available
const lastUsedModel = getLastUsedModel()
if (lastUsedModel) {
const provider = getProviderByName(lastUsedModel.provider)
if (provider && provider.models.some((m) => m.id === lastUsedModel.model)) {
return { model: lastUsedModel.model, provider }
} else {
// Last used model not found under provider, fallback to first llamacpp model
const llamacppProvider = getProviderByName('llamacpp')
if (
llamacppProvider &&
llamacppProvider.models &&
llamacppProvider.models.length > 0
) {
return {
model: llamacppProvider.models[0].id,
provider: llamacppProvider,
}
}
}
}
// Use selected model if available
if (selectedModel && selectedProvider) {
const provider = getProviderByName(selectedProvider)
if (provider) {
return { model: selectedModel.id, provider }
}
}
// Use first model from llamacpp provider
const llamacppProvider = getProviderByName('llamacpp')
if (
llamacppProvider &&
llamacppProvider.models &&
llamacppProvider.models.length > 0
) {
return {
model: llamacppProvider.models[0].id,
provider: llamacppProvider,
}
}
return null
}

View File

@ -6,10 +6,42 @@ import {
} from '@janhq/core'
// Helper function to get reasoning content from an object
function getReasoning(obj: { reasoning_content?: string | null; reasoning?: string | null } | null | undefined): string | null {
function getReasoning(
obj:
| { reasoning_content?: string | null; reasoning?: string | null }
| null
| undefined
): string | null {
return obj?.reasoning_content ?? obj?.reasoning ?? null
}
/**
* Normalize the content of a message by removing reasoning content.
* This is useful to ensure that reasoning content does not get sent to the model.
* @param content
* @returns
*/
export function removeReasoningContent(content: string): string {
// Reasoning content should not be sent to the model
if (content.includes('<think>')) {
const match = content.match(/<think>([\s\S]*?)<\/think>/)
if (match?.index !== undefined) {
const splitIndex = match.index + match[0].length
content = content.slice(splitIndex).trim()
}
}
if (content.includes('<|channel|>analysis<|message|>')) {
const match = content.match(
/<\|channel\|>analysis<\|message\|>([\s\S]*?)<\|start\|>assistant<\|channel\|>final<\|message\|>/
)
if (match?.index !== undefined) {
const splitIndex = match.index + match[0].length
content = content.slice(splitIndex).trim()
}
}
return content
}
// Extract reasoning from a message (for completed responses)
export function extractReasoningFromMessage(
message: chatCompletionRequestMessage | ChatCompletionMessage

View File

@ -25,7 +25,8 @@
/* Url */
"baseUrl": ".",
"paths": {
"@/*": ["./src/*"]
"@/*": ["./src/*"],
"@janhq/conversational-extension": ["../extensions/conversational-extension/src/index.ts"]
}
},
"include": ["src"],

View File

@ -7,7 +7,8 @@
"compilerOptions": {
"baseUrl": ".",
"paths": {
"@/*": ["./src/*"]
"@/*": ["./src/*"],
"@janhq/conversational-extension": ["../extensions/conversational-extension/src/index.ts"]
}
}
}

View File

@ -64,6 +64,7 @@ export default defineConfig(({ mode }) => {
resolve: {
alias: {
'@': path.resolve(__dirname, './src'),
'@janhq/conversational-extension': path.resolve(__dirname, '../extensions/conversational-extension/src/index.ts'),
},
},
optimizeDeps: {

View File

@ -79,6 +79,7 @@ export default defineConfig({
resolve: {
alias: {
'@': path.resolve(__dirname, './src'),
'@janhq/conversational-extension': path.resolve(__dirname, '../extensions-web/src/conversational-web/index.ts'),
},
},
define: {