Merge pull request #6674 from menloresearch/feat/sync-release=to-dev

This commit is contained in:
Dinh Long Nguyen 2025-09-30 23:18:22 +07:00 committed by GitHub
commit 46d55030f0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
29 changed files with 470 additions and 111 deletions

View File

@ -54,6 +54,8 @@ on:
value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }}
FILE_NAME:
value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }}
MSI_FILE_NAME:
value: ${{ jobs.build-windows-x64.outputs.MSI_FILE_NAME }}
jobs:
build-windows-x64:
@ -61,6 +63,7 @@ jobs:
outputs:
WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }}
FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }}
MSI_FILE_NAME: ${{ steps.metadata.outputs.MSI_FILE_NAME }}
permissions:
contents: write
steps:
@ -189,9 +192,15 @@ jobs:
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-windows-${{ inputs.new_version }}
name: jan-windows-exe-${{ inputs.new_version }}
path: |
./src-tauri/target/release/bundle/nsis/*.exe
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: jan-windows-msi-${{ inputs.new_version }}
path: |
./src-tauri/target/release/bundle/msi/*.msi
## Set output filename for windows
- name: Set output filename for windows
@ -201,13 +210,18 @@ jobs:
if [ "${{ inputs.channel }}" != "stable" ]; then
FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe
WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig)
MSI_FILE="Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64_en-US.msi"
else
FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe
WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig)
MSI_FILE="Jan_${{ inputs.new_version }}_x64_en-US.msi"
fi
echo "::set-output name=WIN_SIG::$WIN_SIG"
echo "::set-output name=FILE_NAME::$FILE_NAME"
echo "::set-output name=MSI_FILE_NAME::$MSI_FILE"
id: metadata
## Upload to s3 for nightly and beta
@ -220,6 +234,8 @@ jobs:
# Upload for tauri updater
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }}.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}.sig
aws s3 cp ./src-tauri/target/release/bundle/msi/${{ steps.metadata.outputs.MSI_FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.MSI_FILE_NAME }}
env:
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
@ -236,3 +252,13 @@ jobs:
asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
asset_content_type: application/octet-stream
- name: Upload release assert if public provider is github
if: inputs.public_provider == 'github'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: actions/upload-release-asset@v1.0.1
with:
upload_url: ${{ inputs.upload_url }}
asset_path: ./src-tauri/target/release/bundle/msi/${{ steps.metadata.outputs.MSI_FILE_NAME }}
asset_name: ${{ steps.metadata.outputs.MSI_FILE_NAME }}
asset_content_type: application/octet-stream

View File

@ -62,6 +62,7 @@ pub async fn estimate_kv_cache_internal(
ctx_size: Option<u64>,
) -> Result<KVCacheEstimate, KVCacheError> {
log::info!("Received ctx_size parameter: {:?}", ctx_size);
log::info!("Received model metadata:\n{:?}", &meta);
let arch = meta
.get("general.architecture")
.ok_or(KVCacheError::ArchitectureNotFound)?;
@ -94,15 +95,43 @@ pub async fn estimate_kv_cache_internal(
let key_len_key = format!("{}.attention.key_length", arch);
let val_len_key = format!("{}.attention.value_length", arch);
let key_len = meta
let mut key_len = meta
.get(&key_len_key)
.and_then(|s| s.parse::<u64>().ok())
.unwrap_or(0);
let val_len = meta
let mut val_len = meta
.get(&val_len_key)
.and_then(|s| s.parse::<u64>().ok())
.unwrap_or(0);
// Fallback: calculate from embedding_length if key/val lengths not found
if key_len == 0 || val_len == 0 {
let emb_len_key = format!("{}.embedding_length", arch);
let emb_len = meta
.get(&emb_len_key)
.and_then(|s| s.parse::<u64>().ok())
.unwrap_or(0);
if emb_len > 0 && n_head > 0 {
// For most transformers: head_dim = embedding_length / total_heads
let total_heads = meta
.get(&n_head_key)
.and_then(|s| s.parse::<u64>().ok())
.unwrap_or(n_head);
let head_dim = emb_len / total_heads;
key_len = head_dim;
val_len = head_dim;
log::info!(
"Calculated key_len and val_len from embedding_length: {} / {} heads = {} per head",
emb_len,
total_heads,
head_dim
);
}
}
if key_len == 0 || val_len == 0 {
return Err(KVCacheError::EmbeddingLengthInvalid);
}

View File

@ -946,3 +946,47 @@ pub async fn should_restart_server(
}
}
}
// Add a new server configuration to the MCP config file
pub fn add_server_config<R: Runtime>(
app_handle: tauri::AppHandle<R>,
server_key: String,
server_value: Value,
) -> Result<(), String> {
add_server_config_with_path(app_handle, server_key, server_value, None)
}
// Add a new server configuration to the MCP config file with custom path support
pub fn add_server_config_with_path<R: Runtime>(
app_handle: tauri::AppHandle<R>,
server_key: String,
server_value: Value,
config_filename: Option<&str>,
) -> Result<(), String> {
let config_filename = config_filename.unwrap_or("mcp_config.json");
let config_path = get_jan_data_folder_path(app_handle).join(config_filename);
let mut config: Value = serde_json::from_str(
&std::fs::read_to_string(&config_path)
.map_err(|e| format!("Failed to read config file: {e}"))?,
)
.map_err(|e| format!("Failed to parse config: {e}"))?;
config
.as_object_mut()
.ok_or("Config root is not an object")?
.entry("mcpServers")
.or_insert_with(|| Value::Object(serde_json::Map::new()))
.as_object_mut()
.ok_or("mcpServers is not an object")?
.insert(server_key, server_value);
std::fs::write(
&config_path,
serde_json::to_string_pretty(&config)
.map_err(|e| format!("Failed to serialize config: {e}"))?,
)
.map_err(|e| format!("Failed to write config file: {e}"))?;
Ok(())
}

View File

@ -1,4 +1,4 @@
use super::helpers::run_mcp_commands;
use super::helpers::{add_server_config, add_server_config_with_path, run_mcp_commands};
use crate::core::app::commands::get_jan_data_folder_path;
use crate::core::state::SharedMcpServers;
use std::collections::HashMap;
@ -38,6 +38,150 @@ async fn test_run_mcp_commands() {
std::fs::remove_file(&config_path).expect("Failed to remove config file");
}
#[test]
fn test_add_server_config_new_file() {
let app = mock_app();
let app_path = get_jan_data_folder_path(app.handle().clone());
let config_path = app_path.join("mcp_config_test_new.json");
// Ensure the directory exists
if let Some(parent) = config_path.parent() {
std::fs::create_dir_all(parent).expect("Failed to create parent directory");
}
// Create initial config file with empty mcpServers
let mut file = File::create(&config_path).expect("Failed to create config file");
file.write_all(b"{\"mcpServers\":{}}")
.expect("Failed to write to config file");
drop(file);
// Test adding a new server config
let server_value = serde_json::json!({
"command": "npx",
"args": ["-y", "test-server"],
"env": { "TEST_API_KEY": "test_key" },
"active": false
});
let result = add_server_config_with_path(
app.handle().clone(),
"test_server".to_string(),
server_value.clone(),
Some("mcp_config_test_new.json"),
);
assert!(result.is_ok(), "Failed to add server config: {:?}", result);
// Verify the config was added correctly
let config_content = std::fs::read_to_string(&config_path)
.expect("Failed to read config file");
let config: serde_json::Value = serde_json::from_str(&config_content)
.expect("Failed to parse config");
assert!(config["mcpServers"]["test_server"].is_object());
assert_eq!(config["mcpServers"]["test_server"]["command"], "npx");
assert_eq!(config["mcpServers"]["test_server"]["args"][0], "-y");
assert_eq!(config["mcpServers"]["test_server"]["args"][1], "test-server");
// Clean up
std::fs::remove_file(&config_path).expect("Failed to remove config file");
}
#[test]
fn test_add_server_config_existing_servers() {
let app = mock_app();
let app_path = get_jan_data_folder_path(app.handle().clone());
let config_path = app_path.join("mcp_config_test_existing.json");
// Ensure the directory exists
if let Some(parent) = config_path.parent() {
std::fs::create_dir_all(parent).expect("Failed to create parent directory");
}
// Create config file with existing server
let initial_config = serde_json::json!({
"mcpServers": {
"existing_server": {
"command": "existing_command",
"args": ["arg1"],
"active": true
}
}
});
let mut file = File::create(&config_path).expect("Failed to create config file");
file.write_all(serde_json::to_string_pretty(&initial_config).unwrap().as_bytes())
.expect("Failed to write to config file");
drop(file);
// Add new server
let new_server_value = serde_json::json!({
"command": "new_command",
"args": ["new_arg"],
"active": false
});
let result = add_server_config_with_path(
app.handle().clone(),
"new_server".to_string(),
new_server_value,
Some("mcp_config_test_existing.json"),
);
assert!(result.is_ok(), "Failed to add server config: {:?}", result);
// Verify both servers exist
let config_content = std::fs::read_to_string(&config_path)
.expect("Failed to read config file");
let config: serde_json::Value = serde_json::from_str(&config_content)
.expect("Failed to parse config");
// Check existing server is still there
assert!(config["mcpServers"]["existing_server"].is_object());
assert_eq!(config["mcpServers"]["existing_server"]["command"], "existing_command");
// Check new server was added
assert!(config["mcpServers"]["new_server"].is_object());
assert_eq!(config["mcpServers"]["new_server"]["command"], "new_command");
// Clean up
std::fs::remove_file(&config_path).expect("Failed to remove config file");
}
#[test]
fn test_add_server_config_missing_config_file() {
let app = mock_app();
let app_path = get_jan_data_folder_path(app.handle().clone());
// Ensure the directory exists
if let Some(parent) = app_path.parent() {
std::fs::create_dir_all(parent).ok();
}
std::fs::create_dir_all(&app_path).ok();
let config_path = app_path.join("mcp_config.json");
// Ensure the file doesn't exist
if config_path.exists() {
std::fs::remove_file(&config_path).ok();
}
let server_value = serde_json::json!({
"command": "test",
"args": [],
"active": false
});
let result = add_server_config(
app.handle().clone(),
"test".to_string(),
server_value,
);
assert!(result.is_err(), "Expected error when config file doesn't exist");
assert!(result.unwrap_err().contains("Failed to read config file"));
}
#[cfg(not(target_os = "windows"))]
#[test]
fn test_bin_path_construction_with_join() {

View File

@ -3,39 +3,23 @@ use std::{
fs::{self, File},
io::Read,
path::PathBuf,
sync::Arc,
};
use tar::Archive;
use tauri::{
menu::{Menu, MenuItem, PredefinedMenuItem},
tray::{MouseButton, MouseButtonState, TrayIcon, TrayIconBuilder, TrayIconEvent},
App, Emitter, Manager,
App, Emitter, Manager, Wry,
};
use tauri_plugin_store::StoreExt;
// use tokio::sync::Mutex;
// use tokio::time::{sleep, Duration}; // Using tokio::sync::Mutex
// // MCP
use tauri_plugin_store::Store;
use crate::core::mcp::helpers::add_server_config;
// MCP
use super::{
app::commands::get_jan_data_folder_path, extensions::commands::get_jan_extensions_path,
mcp::helpers::run_mcp_commands, state::AppState,
extensions::commands::get_jan_extensions_path, mcp::helpers::run_mcp_commands, state::AppState,
};
pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), String> {
let mut store_path = get_jan_data_folder_path(app.clone());
store_path.push("store.json");
let store = app.store(store_path).expect("Store not initialized");
let stored_version = store
.get("version")
.and_then(|v| v.as_str().map(String::from))
.unwrap_or_default();
let app_version = app
.config()
.version
.clone()
.unwrap_or_else(|| "".to_string());
let extensions_path = get_jan_extensions_path(app.clone());
let pre_install_path = app
.path()
@ -50,13 +34,8 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), Stri
if std::env::var("IS_CLEAN").is_ok() {
clean_up = true;
}
log::info!(
"Installing extensions. Clean up: {}, Stored version: {}, App version: {}",
clean_up,
stored_version,
app_version
);
if !clean_up && stored_version == app_version && extensions_path.exists() {
log::info!("Installing extensions. Clean up: {}", clean_up);
if !clean_up && extensions_path.exists() {
return Ok(());
}
@ -160,10 +139,36 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), Stri
)
.map_err(|e| e.to_string())?;
// Store the new app version
store.set("version", serde_json::json!(app_version));
store.save().expect("Failed to save store");
Ok(())
}
// Migrate MCP servers configuration
pub fn migrate_mcp_servers(
app_handle: tauri::AppHandle,
store: Arc<Store<Wry>>,
) -> Result<(), String> {
let mcp_version = store
.get("mcp_version")
.and_then(|v| v.as_i64())
.unwrap_or_else(|| 0);
if mcp_version < 1 {
log::info!("Migrating MCP schema version 1");
let result = add_server_config(
app_handle,
"exa".to_string(),
serde_json::json!({
"command": "npx",
"args": ["-y", "exa-mcp-server"],
"env": { "EXA_API_KEY": "YOUR_EXA_API_KEY_HERE" },
"active": false
}),
);
if let Err(e) = result {
log::error!("Failed to add server config: {}", e);
}
}
store.set("mcp_version", 1);
store.save().expect("Failed to save store");
Ok(())
}

View File

@ -148,6 +148,9 @@ pub async fn create_message<R: Runtime>(
let data = serde_json::to_string(&message).map_err(|e| e.to_string())?;
writeln!(file, "{}", data).map_err(|e| e.to_string())?;
// Explicitly flush to ensure data is written before returning
file.flush().map_err(|e| e.to_string())?;
}
Ok(message)

View File

@ -82,7 +82,7 @@ async fn test_create_and_list_messages() {
let messages = list_messages(app.handle().clone(), thread_id.clone())
.await
.unwrap();
assert!(messages.len() > 0);
assert!(messages.len() > 0, "Expected at least one message, but got none. Thread ID: {}", thread_id);
assert_eq!(messages[0]["role"], "user");
// Clean up

View File

@ -10,6 +10,7 @@ use jan_utils::generate_app_token;
use std::{collections::HashMap, sync::Arc};
use tauri::{Emitter, Manager, RunEvent};
use tauri_plugin_llamacpp::cleanup_llama_processes;
use tauri_plugin_store::StoreExt;
use tokio::sync::Mutex;
use crate::core::setup::setup_tray;
@ -151,11 +152,40 @@ pub fn run() {
)?;
app.handle()
.plugin(tauri_plugin_updater::Builder::new().build())?;
// Install extensions
if let Err(e) = setup::install_extensions(app.handle().clone(), false) {
// Start migration
let mut store_path = get_jan_data_folder_path(app.handle().clone());
store_path.push("store.json");
let store = app
.handle()
.store(store_path)
.expect("Store not initialized");
let stored_version = store
.get("version")
.and_then(|v| v.as_str().map(String::from))
.unwrap_or_default();
let app_version = app
.config()
.version
.clone()
.unwrap_or_else(|| "".to_string());
// Migrate extensions
if let Err(e) =
setup::install_extensions(app.handle().clone(), stored_version != app_version)
{
log::error!("Failed to install extensions: {}", e);
}
// Migrate MCP servers
if let Err(e) = setup::migrate_mcp_servers(app.handle().clone(), store.clone()) {
log::error!("Failed to migrate MCP servers: {}", e);
}
// Store the new app version
store.set("version", serde_json::json!(app_version));
store.save().expect("Failed to save store");
// Migration completed
if option_env!("ENABLE_SYSTEM_TRAY_ICON").unwrap_or("false") == "true" {
log::info!("Enabling system tray icon");
let _ = setup_tray(app);

View File

@ -76,6 +76,7 @@
"deep-link": { "schemes": ["jan"] }
},
"bundle": {
"publisher": "Menlo Research Pte. Ltd.",
"active": true,
"createUpdaterArtifacts": false,
"icon": [

View File

@ -1,4 +1,4 @@
import { Link, useRouterState } from '@tanstack/react-router'
import { Link, useRouterState, useNavigate } from '@tanstack/react-router'
import { useLeftPanel } from '@/hooks/useLeftPanel'
import { cn } from '@/lib/utils'
import {
@ -58,6 +58,9 @@ const mainMenus = [
route: route.project,
isEnabled: true,
},
]
const secondaryMenus = [
{
title: 'common:assistants',
icon: IconClipboardSmile,
@ -82,6 +85,7 @@ const LeftPanel = () => {
const open = useLeftPanel((state) => state.open)
const setLeftPanel = useLeftPanel((state) => state.setLeftPanel)
const { t } = useTranslation()
const navigate = useNavigate()
const [searchTerm, setSearchTerm] = useState('')
const { isAuthenticated } = useAuth()
@ -212,7 +216,12 @@ const LeftPanel = () => {
if (editingProjectKey) {
updateFolder(editingProjectKey, name)
} else {
addFolder(name)
const newProject = addFolder(name)
// Navigate to the newly created project
navigate({
to: '/project/$projectId',
params: { projectId: newProject.id },
})
}
setProjectDialogOpen(false)
setEditingProjectKey(null)
@ -487,7 +496,7 @@ const LeftPanel = () => {
)}
<div className="flex flex-col h-full overflow-y-scroll w-[calc(100%+6px)]">
<div className="flex flex-col w-full h-full overflow-y-auto overflow-x-hidden">
<div className="flex flex-col w-full h-full overflow-y-auto overflow-x-hidden mb-3">
<div className="h-full w-full overflow-y-auto">
{favoritedThreads.length > 0 && (
<>
@ -607,6 +616,44 @@ const LeftPanel = () => {
</div>
</div>
</div>
{secondaryMenus.map((menu) => {
if (!menu.isEnabled) {
return null
}
// Regular menu items must have route and icon
if (!menu.route || !menu.icon) return null
const isActive = (() => {
// Settings routes
if (menu.route.includes(route.settings.index)) {
return currentPath.includes(route.settings.index)
}
// Default exact match for other routes
return currentPath === menu.route
})()
return (
<Link
key={menu.title}
to={menu.route}
onClick={() => isSmallScreen && setLeftPanel(false)}
data-test-id={`menu-${menu.title}`}
activeOptions={{ exact: true }}
className={cn(
'flex items-center gap-1.5 cursor-pointer hover:bg-left-panel-fg/10 py-1 px-1 rounded',
isActive && 'bg-left-panel-fg/10'
)}
>
<menu.icon size={18} className="text-left-panel-fg/70" />
<span className="font-medium text-left-panel-fg/90">
{t(menu.title)}
</span>
</Link>
)
})}
{PlatformFeatures[PlatformFeature.AUTHENTICATION] && (
<div className="space-y-1 shrink-0 py-1">
<div>

View File

@ -1,4 +1,3 @@
/* eslint-disable react-hooks/exhaustive-deps */
import ReactMarkdown, { Components } from 'react-markdown'
import remarkGfm from 'remark-gfm'
import remarkEmoji from 'remark-emoji'

View File

@ -3,6 +3,7 @@ import { create } from 'zustand'
import { RenderMarkdown } from './RenderMarkdown'
import { useAppState } from '@/hooks/useAppState'
import { useTranslation } from '@/i18n/react-i18next-compat'
import { extractThinkingContent } from '@/lib/utils'
interface Props {
text: string
@ -43,19 +44,6 @@ const ThinkingBlock = ({ id, text }: Props) => {
setThinkingState(id, newExpandedState)
}
// Extract thinking content from either format
const extractThinkingContent = (text: string) => {
return text
.replace(/<\/?think>/g, '')
.replace(/<\|channel\|>analysis<\|message\|>/g, '')
.replace(/<\|start\|>assistant<\|channel\|>final<\|message\|>/g, '')
.replace(/assistant<\|channel\|>final<\|message\|>/g, '')
.replace(/<\|channel\|>/g, '') // remove any remaining channel markers
.replace(/<\|message\|>/g, '') // remove any remaining message markers
.replace(/<\|start\|>/g, '') // remove any remaining start markers
.trim()
}
const thinkingContent = extractThinkingContent(text)
if (!thinkingContent) return null

View File

@ -23,7 +23,7 @@ import { useThreads } from '@/hooks/useThreads'
import { useThreadManagement } from '@/hooks/useThreadManagement'
import { useLeftPanel } from '@/hooks/useLeftPanel'
import { useMessages } from '@/hooks/useMessages'
import { cn } from '@/lib/utils'
import { cn, extractThinkingContent } from '@/lib/utils'
import { useSmallScreen } from '@/hooks/useMediaQuery'
import {
@ -167,14 +167,10 @@ const SortableItem = memo(
)}
>
<span>{thread.title || t('common:newThread')}</span>
{variant === 'project' && (
<>
{variant === 'project' && getLastMessageInfo?.content && (
<div className="text-sm text-main-view-fg/60 mt-0.5 line-clamp-2">
{getLastMessageInfo.content}
</div>
)}
</>
{variant === 'project' && getLastMessageInfo?.content && (
<span className="block text-sm text-main-view-fg/60 mt-0.5 truncate">
{extractThinkingContent(getLastMessageInfo.content)}
</span>
)}
</div>
<div className="flex items-center">
@ -185,7 +181,10 @@ const SortableItem = memo(
<DropdownMenuTrigger asChild>
<IconDots
size={14}
className="text-left-panel-fg/60 shrink-0 cursor-pointer px-0.5 -mr-1 data-[state=open]:bg-left-panel-fg/10 rounded group-hover/thread-list:data-[state=closed]:size-5 size-5 data-[state=closed]:size-0"
className={cn(
'text-left-panel-fg/60 shrink-0 cursor-pointer px-0.5 -mr-1 data-[state=open]:bg-left-panel-fg/10 rounded group-hover/thread-list:data-[state=closed]:size-5 size-5 data-[state=closed]:size-0',
variant === 'project' && 'text-main-view-fg/60'
)}
onClick={(e) => {
e.preventDefault()
e.stopPropagation()

View File

@ -0,0 +1,19 @@
import { useThreadScrolling } from '@/hooks/useThreadScrolling'
export const ThreadPadding = ({
threadId,
scrollContainerRef,
}: {
threadId: string
scrollContainerRef: React.RefObject<HTMLDivElement | null>
}) => {
// Get padding height for ChatGPT-style message positioning
const { paddingHeight } = useThreadScrolling(threadId, scrollContainerRef)
return (
<div
style={{ height: paddingHeight }}
className="flex-shrink-0"
data-testid="chat-padding"
/>
)
}

View File

@ -17,6 +17,7 @@ import { getProviderTitle } from '@/lib/utils'
import { useTranslation } from '@/i18n/react-i18next-compat'
import { ModelCapabilities } from '@/types/models'
import { models as providerModels } from 'token.js'
import { toast } from 'sonner'
type DialogAddModelProps = {
provider: ModelProvider
@ -37,8 +38,13 @@ export const DialogAddModel = ({ provider, trigger }: DialogAddModelProps) => {
// Handle form submission
const handleSubmit = () => {
if (!modelId.trim()) {
return // Don't submit if model ID is empty
if (!modelId.trim()) return // Don't submit if model ID is empty
if (provider.models.some((e) => e.id === modelId)) {
toast.error(t('providers:addModel.modelExists'), {
description: t('providers:addModel.modelExistsDesc'),
})
return // Don't submit if model ID already exists
}
// Create the new model

View File

@ -131,7 +131,7 @@ export const useChat = () => {
})
}
return currentThread
}, [createThread, retrieveThread, router])
}, [createThread, retrieveThread, router, setMessages])
const restartModel = useCallback(
async (provider: ProviderObject, modelId: string) => {

View File

@ -13,7 +13,7 @@ type ThreadFolder = {
type ThreadManagementState = {
folders: ThreadFolder[]
setFolders: (folders: ThreadFolder[]) => void
addFolder: (name: string) => void
addFolder: (name: string) => ThreadFolder
updateFolder: (id: string, name: string) => void
deleteFolder: (id: string) => void
getFolderById: (id: string) => ThreadFolder | undefined
@ -37,6 +37,7 @@ export const useThreadManagement = create<ThreadManagementState>()(
set((state) => ({
folders: [...state.folders, newFolder],
}))
return newFolder
},
updateFolder: (id, name) => {

View File

@ -78,7 +78,7 @@ export const useThreadScrolling = (
return () =>
scrollContainer.removeEventListener('scroll', handleScroll)
}
}, [handleScroll])
}, [handleScroll, scrollContainerRef])
const checkScrollState = useCallback(() => {
const scrollContainer = scrollContainerRef.current
@ -90,7 +90,7 @@ export const useThreadScrolling = (
setIsAtBottom(isBottom)
setHasScrollbar(hasScroll)
}, [])
}, [scrollContainerRef])
useEffect(() => {
if (!scrollContainerRef.current) return
@ -101,7 +101,7 @@ export const useThreadScrolling = (
scrollToBottom(false)
checkScrollState()
}
}, [checkScrollState, scrollToBottom])
}, [checkScrollState, scrollToBottom, scrollContainerRef])
const prevCountRef = useRef(messageCount)
@ -146,7 +146,7 @@ export const useThreadScrolling = (
}
prevCountRef.current = messageCount
}, [messageCount, lastMessageRole])
}, [messageCount, lastMessageRole, getDOMElements, setPaddingHeight])
useEffect(() => {
const previouslyStreaming = wasStreamingRef.current
@ -197,7 +197,7 @@ export const useThreadScrolling = (
}
wasStreamingRef.current = currentlyStreaming
}, [streamingContent, threadId])
}, [streamingContent, threadId, getDOMElements, setPaddingHeight])
useEffect(() => {
userIntendedPositionRef.current = null
@ -207,7 +207,7 @@ export const useThreadScrolling = (
prevCountRef.current = messageCount
scrollToBottom(false)
checkScrollState()
}, [threadId])
}, [threadId, messageCount, scrollToBottom, checkScrollState, setPaddingHeight])
return useMemo(
() => ({

View File

@ -3,10 +3,12 @@ import { twMerge } from 'tailwind-merge'
import { ExtensionManager } from './extension'
import path from "path"
export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs))
}
export function basenameNoExt(filePath: string): string {
const base = path.basename(filePath);
const VALID_EXTENSIONS = [".tar.gz", ".zip"];
@ -22,6 +24,7 @@ export function basenameNoExt(filePath: string): string {
return base.slice(0, -path.extname(base).length);
}
/**
* Get the display name for a model, falling back to the model ID if no display name is set
*/
@ -188,3 +191,15 @@ export function formatDuration(startTime: number, endTime?: number): string {
export function sanitizeModelId(modelId: string): string {
return modelId.replace(/[^a-zA-Z0-9/_\-.]/g, '').replace(/\./g, '_')
}
export const extractThinkingContent = (text: string) => {
return text
.replace(/<\/?think>/g, '')
.replace(/<\|channel\|>analysis<\|message\|>/g, '')
.replace(/<\|start\|>assistant<\|channel\|>final<\|message\|>/g, '')
.replace(/assistant<\|channel\|>final<\|message\|>/g, '')
.replace(/<\|channel\|>/g, '') // remove any remaining channel markers
.replace(/<\|message\|>/g, '') // remove any remaining message markers
.replace(/<\|start\|>/g, '') // remove any remaining start markers
.trim()
}

View File

@ -35,7 +35,9 @@
"modelId": "Modell ID",
"enterModelId": "Modell ID eingeben",
"exploreModels": "Sehe Modellliste von {{provider}}",
"addModel": "Modell hinzufügen"
"addModel": "Modell hinzufügen",
"modelExists": "Modell bereits vorhanden",
"modelExistsDesc": "Bitte wähle eine andere Modell-ID."
},
"deleteModel": {
"title": "Lösche Modell: {{modelId}}",

View File

@ -35,7 +35,9 @@
"modelId": "Model ID",
"enterModelId": "Enter model ID",
"exploreModels": "See model list from {{provider}}",
"addModel": "Add Model"
"addModel": "Add Model",
"modelExists": "Model already exists",
"modelExistsDesc": "Please choose a different model ID."
},
"deleteModel": {
"title": "Delete Model: {{modelId}}",
@ -69,4 +71,4 @@
"addProvider": "Add Provider",
"addOpenAIProvider": "Add OpenAI Provider",
"enterNameForProvider": "Enter name for provider"
}
}

View File

@ -35,7 +35,9 @@
"modelId": "ID Model",
"enterModelId": "Masukkan ID model",
"exploreModels": "Lihat daftar model dari {{provider}}",
"addModel": "Tambah Model"
"addModel": "Tambah Model",
"modelExists": "Model sudah ada",
"modelExistsDesc": "Silakan pilih ID model yang berbeda."
},
"deleteModel": {
"title": "Hapus Model: {{modelId}}",
@ -69,4 +71,4 @@
"addProvider": "Tambah Penyedia",
"addOpenAIProvider": "Tambah Penyedia OpenAI",
"enterNameForProvider": "Masukkan nama untuk penyedia"
}
}

View File

@ -35,7 +35,9 @@
"modelId": "Identyfikator Modelu",
"enterModelId": "Wprowadź identyfikator modelu",
"exploreModels": "Zobacz listę modeli dostawcy {{provider}}",
"addModel": "Dodaj Model"
"addModel": "Dodaj Model",
"modelExists": "Model już istnieje",
"modelExistsDesc": "Wybierz inny identyfikator modelu."
},
"deleteModel": {
"title": "Usuń Model: {{modelId}}",

View File

@ -35,7 +35,9 @@
"modelId": "ID mô hình",
"enterModelId": "Nhập ID mô hình",
"exploreModels": "Xem danh sách mô hình từ {{provider}}",
"addModel": "Thêm mô hình"
"addModel": "Thêm mô hình",
"modelExists": "Mô hình đã tồn tại",
"modelExistsDesc": "Vui lòng chọn một ID mô hình khác."
},
"deleteModel": {
"title": "Xóa mô hình: {{modelId}}",
@ -69,4 +71,4 @@
"addProvider": "Thêm nhà cung cấp",
"addOpenAIProvider": "Thêm nhà cung cấp OpenAI",
"enterNameForProvider": "Nhập tên cho nhà cung cấp"
}
}

View File

@ -35,7 +35,9 @@
"modelId": "模型 ID",
"enterModelId": "输入模型 ID",
"exploreModels": "查看 {{provider}} 的模型列表",
"addModel": "添加模型"
"addModel": "添加模型",
"modelExists": "模型已存在",
"modelExistsDesc": "请选择不同的模型 ID。"
},
"deleteModel": {
"title": "删除模型:{{modelId}}",
@ -69,4 +71,4 @@
"addProvider": "添加提供商",
"addOpenAIProvider": "添加 OpenAI 提供商",
"enterNameForProvider": "输入提供商名称"
}
}

View File

@ -35,7 +35,9 @@
"modelId": "模型 ID",
"enterModelId": "輸入模型 ID",
"exploreModels": "查看 {{provider}} 的模型清單",
"addModel": "新增模型"
"addModel": "新增模型",
"modelExists": "模型已存在",
"modelExistsDesc": "請選擇不同的模型 ID。"
},
"deleteModel": {
"title": "刪除模型:{{modelId}}",
@ -69,4 +71,4 @@
"addProvider": "新增提供者",
"addOpenAIProvider": "新增 OpenAI 提供者",
"enterNameForProvider": "輸入提供者名稱"
}
}

View File

@ -1,4 +1,4 @@
import { createFileRoute } from '@tanstack/react-router'
import { createFileRoute, useNavigate } from '@tanstack/react-router'
import { useState, useMemo } from 'react'
import { useThreadManagement } from '@/hooks/useThreadManagement'
@ -31,6 +31,7 @@ function Project() {
function ProjectContent() {
const { t } = useTranslation()
const navigate = useNavigate()
const { folders, addFolder, updateFolder, deleteFolder, getFolderById } =
useThreadManagement()
const threads = useThreads((state) => state.threads)
@ -59,7 +60,12 @@ function ProjectContent() {
if (editingKey) {
updateFolder(editingKey, name)
} else {
addFolder(name)
const newProject = addFolder(name)
// Navigate to the newly created project
navigate({
to: '/project/$projectId',
params: { projectId: newProject.id },
})
}
setOpen(false)
setEditingKey(null)

View File

@ -318,17 +318,7 @@ function ProviderDetail() {
.getActiveModels()
.then((models) => setActiveModels(models || []))
} catch (error) {
console.error('Error starting model:', error)
if (
error &&
typeof error === 'object' &&
'message' in error &&
typeof error.message === 'string'
) {
setModelLoadError({ message: error.message })
} else {
setModelLoadError(typeof error === 'string' ? error : `${error}`)
}
setModelLoadError(error as ErrorObject)
} finally {
// Remove model from loading state
setLoadingModels((prev) => prev.filter((id) => id !== modelId))

View File

@ -23,8 +23,8 @@ import { PlatformFeatures } from '@/lib/platform/const'
import { PlatformFeature } from '@/lib/platform/types'
import ScrollToBottom from '@/containers/ScrollToBottom'
import { PromptProgress } from '@/components/PromptProgress'
import { ThreadPadding } from '@/containers/ThreadPadding'
import { TEMPORARY_CHAT_ID, TEMPORARY_CHAT_QUERY_ID } from '@/constants/chat'
import { useThreadScrolling } from '@/hooks/useThreadScrolling'
import { IconInfoCircle } from '@tabler/icons-react'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
@ -99,9 +99,6 @@ function ThreadDetail() {
const scrollContainerRef = useRef<HTMLDivElement>(null)
// Get padding height for ChatGPT-style message positioning
const { paddingHeight } = useThreadScrolling(threadId, scrollContainerRef)
// Listen for conversation not found events
useEffect(() => {
const handleConversationNotFound = (event: CustomEvent) => {
@ -123,7 +120,7 @@ function ThreadDetail() {
return () => {
window.removeEventListener(CONVERSATION_NOT_FOUND_EVENT, handleConversationNotFound as EventListener)
}
}, [threadId, navigate])
}, [threadId, navigate, t])
useEffect(() => {
setCurrentThreadId(threadId)
@ -270,11 +267,7 @@ function ThreadDetail() {
data-test-id="thread-content-text"
/>
{/* Persistent padding element for ChatGPT-style message positioning */}
<div
style={{ height: paddingHeight }}
className="flex-shrink-0"
data-testid="chat-padding"
/>
<ThreadPadding threadId={threadId} scrollContainerRef={scrollContainerRef} />
</div>
</div>
<div