diff --git a/.github/workflows/template-tauri-build-windows-x64.yml b/.github/workflows/template-tauri-build-windows-x64.yml index 643fef5ac..ed00ef90f 100644 --- a/.github/workflows/template-tauri-build-windows-x64.yml +++ b/.github/workflows/template-tauri-build-windows-x64.yml @@ -54,6 +54,8 @@ on: value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }} FILE_NAME: value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }} + MSI_FILE_NAME: + value: ${{ jobs.build-windows-x64.outputs.MSI_FILE_NAME }} jobs: build-windows-x64: @@ -61,6 +63,7 @@ jobs: outputs: WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }} FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }} + MSI_FILE_NAME: ${{ steps.metadata.outputs.MSI_FILE_NAME }} permissions: contents: write steps: @@ -189,9 +192,15 @@ jobs: - name: Upload Artifact uses: actions/upload-artifact@v4 with: - name: jan-windows-${{ inputs.new_version }} + name: jan-windows-exe-${{ inputs.new_version }} path: | ./src-tauri/target/release/bundle/nsis/*.exe + - name: Upload Artifact + uses: actions/upload-artifact@v4 + with: + name: jan-windows-msi-${{ inputs.new_version }} + path: | + ./src-tauri/target/release/bundle/msi/*.msi ## Set output filename for windows - name: Set output filename for windows @@ -201,13 +210,18 @@ jobs: if [ "${{ inputs.channel }}" != "stable" ]; then FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig) + + MSI_FILE="Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64_en-US.msi" else FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig) + + MSI_FILE="Jan_${{ inputs.new_version }}_x64_en-US.msi" fi echo "::set-output name=WIN_SIG::$WIN_SIG" echo "::set-output name=FILE_NAME::$FILE_NAME" + echo "::set-output name=MSI_FILE_NAME::$MSI_FILE" id: metadata ## Upload to s3 for nightly and beta @@ -220,6 +234,8 @@ jobs: # Upload for tauri updater aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }} aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }}.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}.sig + + aws s3 cp ./src-tauri/target/release/bundle/msi/${{ steps.metadata.outputs.MSI_FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.MSI_FILE_NAME }} env: AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }} @@ -236,3 +252,13 @@ jobs: asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }} asset_name: ${{ steps.metadata.outputs.FILE_NAME }} asset_content_type: application/octet-stream + - name: Upload release assert if public provider is github + if: inputs.public_provider == 'github' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: actions/upload-release-asset@v1.0.1 + with: + upload_url: ${{ inputs.upload_url }} + asset_path: ./src-tauri/target/release/bundle/msi/${{ steps.metadata.outputs.MSI_FILE_NAME }} + asset_name: ${{ steps.metadata.outputs.MSI_FILE_NAME }} + asset_content_type: application/octet-stream diff --git a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/utils.rs b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/utils.rs index 50e3f4a14..cdbbf92d5 100644 --- a/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/utils.rs +++ b/src-tauri/plugins/tauri-plugin-llamacpp/src/gguf/utils.rs @@ -62,6 +62,7 @@ pub async fn estimate_kv_cache_internal( ctx_size: Option, ) -> Result { log::info!("Received ctx_size parameter: {:?}", ctx_size); + log::info!("Received model metadata:\n{:?}", &meta); let arch = meta .get("general.architecture") .ok_or(KVCacheError::ArchitectureNotFound)?; @@ -94,15 +95,43 @@ pub async fn estimate_kv_cache_internal( let key_len_key = format!("{}.attention.key_length", arch); let val_len_key = format!("{}.attention.value_length", arch); - let key_len = meta + let mut key_len = meta .get(&key_len_key) .and_then(|s| s.parse::().ok()) .unwrap_or(0); - let val_len = meta + let mut val_len = meta .get(&val_len_key) .and_then(|s| s.parse::().ok()) .unwrap_or(0); + // Fallback: calculate from embedding_length if key/val lengths not found + if key_len == 0 || val_len == 0 { + let emb_len_key = format!("{}.embedding_length", arch); + let emb_len = meta + .get(&emb_len_key) + .and_then(|s| s.parse::().ok()) + .unwrap_or(0); + + if emb_len > 0 && n_head > 0 { + // For most transformers: head_dim = embedding_length / total_heads + let total_heads = meta + .get(&n_head_key) + .and_then(|s| s.parse::().ok()) + .unwrap_or(n_head); + + let head_dim = emb_len / total_heads; + key_len = head_dim; + val_len = head_dim; + + log::info!( + "Calculated key_len and val_len from embedding_length: {} / {} heads = {} per head", + emb_len, + total_heads, + head_dim + ); + } + } + if key_len == 0 || val_len == 0 { return Err(KVCacheError::EmbeddingLengthInvalid); } diff --git a/src-tauri/src/core/mcp/helpers.rs b/src-tauri/src/core/mcp/helpers.rs index 8f55d7479..48c92ba2c 100644 --- a/src-tauri/src/core/mcp/helpers.rs +++ b/src-tauri/src/core/mcp/helpers.rs @@ -946,3 +946,47 @@ pub async fn should_restart_server( } } } + +// Add a new server configuration to the MCP config file +pub fn add_server_config( + app_handle: tauri::AppHandle, + server_key: String, + server_value: Value, +) -> Result<(), String> { + add_server_config_with_path(app_handle, server_key, server_value, None) +} + +// Add a new server configuration to the MCP config file with custom path support +pub fn add_server_config_with_path( + app_handle: tauri::AppHandle, + server_key: String, + server_value: Value, + config_filename: Option<&str>, +) -> Result<(), String> { + let config_filename = config_filename.unwrap_or("mcp_config.json"); + let config_path = get_jan_data_folder_path(app_handle).join(config_filename); + + let mut config: Value = serde_json::from_str( + &std::fs::read_to_string(&config_path) + .map_err(|e| format!("Failed to read config file: {e}"))?, + ) + .map_err(|e| format!("Failed to parse config: {e}"))?; + + config + .as_object_mut() + .ok_or("Config root is not an object")? + .entry("mcpServers") + .or_insert_with(|| Value::Object(serde_json::Map::new())) + .as_object_mut() + .ok_or("mcpServers is not an object")? + .insert(server_key, server_value); + + std::fs::write( + &config_path, + serde_json::to_string_pretty(&config) + .map_err(|e| format!("Failed to serialize config: {e}"))?, + ) + .map_err(|e| format!("Failed to write config file: {e}"))?; + + Ok(()) +} diff --git a/src-tauri/src/core/mcp/tests.rs b/src-tauri/src/core/mcp/tests.rs index 42289226a..d973ce647 100644 --- a/src-tauri/src/core/mcp/tests.rs +++ b/src-tauri/src/core/mcp/tests.rs @@ -1,4 +1,4 @@ -use super::helpers::run_mcp_commands; +use super::helpers::{add_server_config, add_server_config_with_path, run_mcp_commands}; use crate::core::app::commands::get_jan_data_folder_path; use crate::core::state::SharedMcpServers; use std::collections::HashMap; @@ -38,6 +38,150 @@ async fn test_run_mcp_commands() { std::fs::remove_file(&config_path).expect("Failed to remove config file"); } +#[test] +fn test_add_server_config_new_file() { + let app = mock_app(); + let app_path = get_jan_data_folder_path(app.handle().clone()); + let config_path = app_path.join("mcp_config_test_new.json"); + + // Ensure the directory exists + if let Some(parent) = config_path.parent() { + std::fs::create_dir_all(parent).expect("Failed to create parent directory"); + } + + // Create initial config file with empty mcpServers + let mut file = File::create(&config_path).expect("Failed to create config file"); + file.write_all(b"{\"mcpServers\":{}}") + .expect("Failed to write to config file"); + drop(file); + + // Test adding a new server config + let server_value = serde_json::json!({ + "command": "npx", + "args": ["-y", "test-server"], + "env": { "TEST_API_KEY": "test_key" }, + "active": false + }); + + let result = add_server_config_with_path( + app.handle().clone(), + "test_server".to_string(), + server_value.clone(), + Some("mcp_config_test_new.json"), + ); + + assert!(result.is_ok(), "Failed to add server config: {:?}", result); + + // Verify the config was added correctly + let config_content = std::fs::read_to_string(&config_path) + .expect("Failed to read config file"); + let config: serde_json::Value = serde_json::from_str(&config_content) + .expect("Failed to parse config"); + + assert!(config["mcpServers"]["test_server"].is_object()); + assert_eq!(config["mcpServers"]["test_server"]["command"], "npx"); + assert_eq!(config["mcpServers"]["test_server"]["args"][0], "-y"); + assert_eq!(config["mcpServers"]["test_server"]["args"][1], "test-server"); + + // Clean up + std::fs::remove_file(&config_path).expect("Failed to remove config file"); +} + +#[test] +fn test_add_server_config_existing_servers() { + let app = mock_app(); + let app_path = get_jan_data_folder_path(app.handle().clone()); + let config_path = app_path.join("mcp_config_test_existing.json"); + + // Ensure the directory exists + if let Some(parent) = config_path.parent() { + std::fs::create_dir_all(parent).expect("Failed to create parent directory"); + } + + // Create config file with existing server + let initial_config = serde_json::json!({ + "mcpServers": { + "existing_server": { + "command": "existing_command", + "args": ["arg1"], + "active": true + } + } + }); + + let mut file = File::create(&config_path).expect("Failed to create config file"); + file.write_all(serde_json::to_string_pretty(&initial_config).unwrap().as_bytes()) + .expect("Failed to write to config file"); + drop(file); + + // Add new server + let new_server_value = serde_json::json!({ + "command": "new_command", + "args": ["new_arg"], + "active": false + }); + + let result = add_server_config_with_path( + app.handle().clone(), + "new_server".to_string(), + new_server_value, + Some("mcp_config_test_existing.json"), + ); + + assert!(result.is_ok(), "Failed to add server config: {:?}", result); + + // Verify both servers exist + let config_content = std::fs::read_to_string(&config_path) + .expect("Failed to read config file"); + let config: serde_json::Value = serde_json::from_str(&config_content) + .expect("Failed to parse config"); + + // Check existing server is still there + assert!(config["mcpServers"]["existing_server"].is_object()); + assert_eq!(config["mcpServers"]["existing_server"]["command"], "existing_command"); + + // Check new server was added + assert!(config["mcpServers"]["new_server"].is_object()); + assert_eq!(config["mcpServers"]["new_server"]["command"], "new_command"); + + // Clean up + std::fs::remove_file(&config_path).expect("Failed to remove config file"); +} + +#[test] +fn test_add_server_config_missing_config_file() { + let app = mock_app(); + let app_path = get_jan_data_folder_path(app.handle().clone()); + + // Ensure the directory exists + if let Some(parent) = app_path.parent() { + std::fs::create_dir_all(parent).ok(); + } + std::fs::create_dir_all(&app_path).ok(); + + let config_path = app_path.join("mcp_config.json"); + + // Ensure the file doesn't exist + if config_path.exists() { + std::fs::remove_file(&config_path).ok(); + } + + let server_value = serde_json::json!({ + "command": "test", + "args": [], + "active": false + }); + + let result = add_server_config( + app.handle().clone(), + "test".to_string(), + server_value, + ); + + assert!(result.is_err(), "Expected error when config file doesn't exist"); + assert!(result.unwrap_err().contains("Failed to read config file")); +} + #[cfg(not(target_os = "windows"))] #[test] fn test_bin_path_construction_with_join() { diff --git a/src-tauri/src/core/setup.rs b/src-tauri/src/core/setup.rs index c88e62a8d..68c7c44a1 100644 --- a/src-tauri/src/core/setup.rs +++ b/src-tauri/src/core/setup.rs @@ -3,39 +3,23 @@ use std::{ fs::{self, File}, io::Read, path::PathBuf, + sync::Arc, }; use tar::Archive; use tauri::{ menu::{Menu, MenuItem, PredefinedMenuItem}, tray::{MouseButton, MouseButtonState, TrayIcon, TrayIconBuilder, TrayIconEvent}, - App, Emitter, Manager, + App, Emitter, Manager, Wry, }; -use tauri_plugin_store::StoreExt; -// use tokio::sync::Mutex; -// use tokio::time::{sleep, Duration}; // Using tokio::sync::Mutex -// // MCP +use tauri_plugin_store::Store; + +use crate::core::mcp::helpers::add_server_config; -// MCP use super::{ - app::commands::get_jan_data_folder_path, extensions::commands::get_jan_extensions_path, - mcp::helpers::run_mcp_commands, state::AppState, + extensions::commands::get_jan_extensions_path, mcp::helpers::run_mcp_commands, state::AppState, }; pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), String> { - let mut store_path = get_jan_data_folder_path(app.clone()); - store_path.push("store.json"); - let store = app.store(store_path).expect("Store not initialized"); - let stored_version = store - .get("version") - .and_then(|v| v.as_str().map(String::from)) - .unwrap_or_default(); - - let app_version = app - .config() - .version - .clone() - .unwrap_or_else(|| "".to_string()); - let extensions_path = get_jan_extensions_path(app.clone()); let pre_install_path = app .path() @@ -50,13 +34,8 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), Stri if std::env::var("IS_CLEAN").is_ok() { clean_up = true; } - log::info!( - "Installing extensions. Clean up: {}, Stored version: {}, App version: {}", - clean_up, - stored_version, - app_version - ); - if !clean_up && stored_version == app_version && extensions_path.exists() { + log::info!("Installing extensions. Clean up: {}", clean_up); + if !clean_up && extensions_path.exists() { return Ok(()); } @@ -160,10 +139,36 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), Stri ) .map_err(|e| e.to_string())?; - // Store the new app version - store.set("version", serde_json::json!(app_version)); - store.save().expect("Failed to save store"); + Ok(()) +} +// Migrate MCP servers configuration +pub fn migrate_mcp_servers( + app_handle: tauri::AppHandle, + store: Arc>, +) -> Result<(), String> { + let mcp_version = store + .get("mcp_version") + .and_then(|v| v.as_i64()) + .unwrap_or_else(|| 0); + if mcp_version < 1 { + log::info!("Migrating MCP schema version 1"); + let result = add_server_config( + app_handle, + "exa".to_string(), + serde_json::json!({ + "command": "npx", + "args": ["-y", "exa-mcp-server"], + "env": { "EXA_API_KEY": "YOUR_EXA_API_KEY_HERE" }, + "active": false + }), + ); + if let Err(e) = result { + log::error!("Failed to add server config: {}", e); + } + } + store.set("mcp_version", 1); + store.save().expect("Failed to save store"); Ok(()) } diff --git a/src-tauri/src/core/threads/commands.rs b/src-tauri/src/core/threads/commands.rs index a9012193a..7f16371a7 100644 --- a/src-tauri/src/core/threads/commands.rs +++ b/src-tauri/src/core/threads/commands.rs @@ -148,6 +148,9 @@ pub async fn create_message( let data = serde_json::to_string(&message).map_err(|e| e.to_string())?; writeln!(file, "{}", data).map_err(|e| e.to_string())?; + + // Explicitly flush to ensure data is written before returning + file.flush().map_err(|e| e.to_string())?; } Ok(message) diff --git a/src-tauri/src/core/threads/tests.rs b/src-tauri/src/core/threads/tests.rs index 5b4aaec57..7a918e46f 100644 --- a/src-tauri/src/core/threads/tests.rs +++ b/src-tauri/src/core/threads/tests.rs @@ -82,7 +82,7 @@ async fn test_create_and_list_messages() { let messages = list_messages(app.handle().clone(), thread_id.clone()) .await .unwrap(); - assert!(messages.len() > 0); + assert!(messages.len() > 0, "Expected at least one message, but got none. Thread ID: {}", thread_id); assert_eq!(messages[0]["role"], "user"); // Clean up diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index dad155875..a2b263c6a 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -10,6 +10,7 @@ use jan_utils::generate_app_token; use std::{collections::HashMap, sync::Arc}; use tauri::{Emitter, Manager, RunEvent}; use tauri_plugin_llamacpp::cleanup_llama_processes; +use tauri_plugin_store::StoreExt; use tokio::sync::Mutex; use crate::core::setup::setup_tray; @@ -151,11 +152,40 @@ pub fn run() { )?; app.handle() .plugin(tauri_plugin_updater::Builder::new().build())?; - // Install extensions - if let Err(e) = setup::install_extensions(app.handle().clone(), false) { + + // Start migration + let mut store_path = get_jan_data_folder_path(app.handle().clone()); + store_path.push("store.json"); + let store = app + .handle() + .store(store_path) + .expect("Store not initialized"); + let stored_version = store + .get("version") + .and_then(|v| v.as_str().map(String::from)) + .unwrap_or_default(); + let app_version = app + .config() + .version + .clone() + .unwrap_or_else(|| "".to_string()); + // Migrate extensions + if let Err(e) = + setup::install_extensions(app.handle().clone(), stored_version != app_version) + { log::error!("Failed to install extensions: {}", e); } + // Migrate MCP servers + if let Err(e) = setup::migrate_mcp_servers(app.handle().clone(), store.clone()) { + log::error!("Failed to migrate MCP servers: {}", e); + } + + // Store the new app version + store.set("version", serde_json::json!(app_version)); + store.save().expect("Failed to save store"); + // Migration completed + if option_env!("ENABLE_SYSTEM_TRAY_ICON").unwrap_or("false") == "true" { log::info!("Enabling system tray icon"); let _ = setup_tray(app); diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 6aaa66bb7..10f4325e8 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -76,6 +76,7 @@ "deep-link": { "schemes": ["jan"] } }, "bundle": { + "publisher": "Menlo Research Pte. Ltd.", "active": true, "createUpdaterArtifacts": false, "icon": [ diff --git a/web-app/src/containers/LeftPanel.tsx b/web-app/src/containers/LeftPanel.tsx index 24f3bf911..f24dcec0d 100644 --- a/web-app/src/containers/LeftPanel.tsx +++ b/web-app/src/containers/LeftPanel.tsx @@ -1,4 +1,4 @@ -import { Link, useRouterState } from '@tanstack/react-router' +import { Link, useRouterState, useNavigate } from '@tanstack/react-router' import { useLeftPanel } from '@/hooks/useLeftPanel' import { cn } from '@/lib/utils' import { @@ -58,6 +58,9 @@ const mainMenus = [ route: route.project, isEnabled: true, }, +] + +const secondaryMenus = [ { title: 'common:assistants', icon: IconClipboardSmile, @@ -82,6 +85,7 @@ const LeftPanel = () => { const open = useLeftPanel((state) => state.open) const setLeftPanel = useLeftPanel((state) => state.setLeftPanel) const { t } = useTranslation() + const navigate = useNavigate() const [searchTerm, setSearchTerm] = useState('') const { isAuthenticated } = useAuth() @@ -212,7 +216,12 @@ const LeftPanel = () => { if (editingProjectKey) { updateFolder(editingProjectKey, name) } else { - addFolder(name) + const newProject = addFolder(name) + // Navigate to the newly created project + navigate({ + to: '/project/$projectId', + params: { projectId: newProject.id }, + }) } setProjectDialogOpen(false) setEditingProjectKey(null) @@ -487,7 +496,7 @@ const LeftPanel = () => { )}
-
+
{favoritedThreads.length > 0 && ( <> @@ -607,6 +616,44 @@ const LeftPanel = () => {
+ + {secondaryMenus.map((menu) => { + if (!menu.isEnabled) { + return null + } + + // Regular menu items must have route and icon + if (!menu.route || !menu.icon) return null + + const isActive = (() => { + // Settings routes + if (menu.route.includes(route.settings.index)) { + return currentPath.includes(route.settings.index) + } + + // Default exact match for other routes + return currentPath === menu.route + })() + return ( + isSmallScreen && setLeftPanel(false)} + data-test-id={`menu-${menu.title}`} + activeOptions={{ exact: true }} + className={cn( + 'flex items-center gap-1.5 cursor-pointer hover:bg-left-panel-fg/10 py-1 px-1 rounded', + isActive && 'bg-left-panel-fg/10' + )} + > + + + {t(menu.title)} + + + ) + })} + {PlatformFeatures[PlatformFeature.AUTHENTICATION] && (
diff --git a/web-app/src/containers/RenderMarkdown.tsx b/web-app/src/containers/RenderMarkdown.tsx index 31d08cf10..c941b512d 100644 --- a/web-app/src/containers/RenderMarkdown.tsx +++ b/web-app/src/containers/RenderMarkdown.tsx @@ -1,4 +1,3 @@ -/* eslint-disable react-hooks/exhaustive-deps */ import ReactMarkdown, { Components } from 'react-markdown' import remarkGfm from 'remark-gfm' import remarkEmoji from 'remark-emoji' diff --git a/web-app/src/containers/ThinkingBlock.tsx b/web-app/src/containers/ThinkingBlock.tsx index 68ab8644f..211fda9ff 100644 --- a/web-app/src/containers/ThinkingBlock.tsx +++ b/web-app/src/containers/ThinkingBlock.tsx @@ -3,6 +3,7 @@ import { create } from 'zustand' import { RenderMarkdown } from './RenderMarkdown' import { useAppState } from '@/hooks/useAppState' import { useTranslation } from '@/i18n/react-i18next-compat' +import { extractThinkingContent } from '@/lib/utils' interface Props { text: string @@ -43,19 +44,6 @@ const ThinkingBlock = ({ id, text }: Props) => { setThinkingState(id, newExpandedState) } - // Extract thinking content from either format - const extractThinkingContent = (text: string) => { - return text - .replace(/<\/?think>/g, '') - .replace(/<\|channel\|>analysis<\|message\|>/g, '') - .replace(/<\|start\|>assistant<\|channel\|>final<\|message\|>/g, '') - .replace(/assistant<\|channel\|>final<\|message\|>/g, '') - .replace(/<\|channel\|>/g, '') // remove any remaining channel markers - .replace(/<\|message\|>/g, '') // remove any remaining message markers - .replace(/<\|start\|>/g, '') // remove any remaining start markers - .trim() - } - const thinkingContent = extractThinkingContent(text) if (!thinkingContent) return null diff --git a/web-app/src/containers/ThreadList.tsx b/web-app/src/containers/ThreadList.tsx index b58d1872a..d971064b5 100644 --- a/web-app/src/containers/ThreadList.tsx +++ b/web-app/src/containers/ThreadList.tsx @@ -23,7 +23,7 @@ import { useThreads } from '@/hooks/useThreads' import { useThreadManagement } from '@/hooks/useThreadManagement' import { useLeftPanel } from '@/hooks/useLeftPanel' import { useMessages } from '@/hooks/useMessages' -import { cn } from '@/lib/utils' +import { cn, extractThinkingContent } from '@/lib/utils' import { useSmallScreen } from '@/hooks/useMediaQuery' import { @@ -167,14 +167,10 @@ const SortableItem = memo( )} > {thread.title || t('common:newThread')} - {variant === 'project' && ( - <> - {variant === 'project' && getLastMessageInfo?.content && ( -
- {getLastMessageInfo.content} -
- )} - + {variant === 'project' && getLastMessageInfo?.content && ( + + {extractThinkingContent(getLastMessageInfo.content)} + )}
@@ -185,7 +181,10 @@ const SortableItem = memo( { e.preventDefault() e.stopPropagation() diff --git a/web-app/src/containers/ThreadPadding.tsx b/web-app/src/containers/ThreadPadding.tsx new file mode 100644 index 000000000..3f4c725c3 --- /dev/null +++ b/web-app/src/containers/ThreadPadding.tsx @@ -0,0 +1,19 @@ +import { useThreadScrolling } from '@/hooks/useThreadScrolling' + +export const ThreadPadding = ({ + threadId, + scrollContainerRef, +}: { + threadId: string + scrollContainerRef: React.RefObject +}) => { + // Get padding height for ChatGPT-style message positioning + const { paddingHeight } = useThreadScrolling(threadId, scrollContainerRef) + return ( +
+ ) +} diff --git a/web-app/src/containers/dialogs/AddModel.tsx b/web-app/src/containers/dialogs/AddModel.tsx index 2b87fb222..e8fd4e0fd 100644 --- a/web-app/src/containers/dialogs/AddModel.tsx +++ b/web-app/src/containers/dialogs/AddModel.tsx @@ -17,6 +17,7 @@ import { getProviderTitle } from '@/lib/utils' import { useTranslation } from '@/i18n/react-i18next-compat' import { ModelCapabilities } from '@/types/models' import { models as providerModels } from 'token.js' +import { toast } from 'sonner' type DialogAddModelProps = { provider: ModelProvider @@ -37,8 +38,13 @@ export const DialogAddModel = ({ provider, trigger }: DialogAddModelProps) => { // Handle form submission const handleSubmit = () => { - if (!modelId.trim()) { - return // Don't submit if model ID is empty + if (!modelId.trim()) return // Don't submit if model ID is empty + + if (provider.models.some((e) => e.id === modelId)) { + toast.error(t('providers:addModel.modelExists'), { + description: t('providers:addModel.modelExistsDesc'), + }) + return // Don't submit if model ID already exists } // Create the new model diff --git a/web-app/src/hooks/useChat.ts b/web-app/src/hooks/useChat.ts index 357fc3a8d..935458326 100644 --- a/web-app/src/hooks/useChat.ts +++ b/web-app/src/hooks/useChat.ts @@ -131,7 +131,7 @@ export const useChat = () => { }) } return currentThread - }, [createThread, retrieveThread, router]) + }, [createThread, retrieveThread, router, setMessages]) const restartModel = useCallback( async (provider: ProviderObject, modelId: string) => { diff --git a/web-app/src/hooks/useThreadManagement.ts b/web-app/src/hooks/useThreadManagement.ts index 84e5b0e34..becb41def 100644 --- a/web-app/src/hooks/useThreadManagement.ts +++ b/web-app/src/hooks/useThreadManagement.ts @@ -13,7 +13,7 @@ type ThreadFolder = { type ThreadManagementState = { folders: ThreadFolder[] setFolders: (folders: ThreadFolder[]) => void - addFolder: (name: string) => void + addFolder: (name: string) => ThreadFolder updateFolder: (id: string, name: string) => void deleteFolder: (id: string) => void getFolderById: (id: string) => ThreadFolder | undefined @@ -37,6 +37,7 @@ export const useThreadManagement = create()( set((state) => ({ folders: [...state.folders, newFolder], })) + return newFolder }, updateFolder: (id, name) => { diff --git a/web-app/src/hooks/useThreadScrolling.tsx b/web-app/src/hooks/useThreadScrolling.tsx index 41362db61..a3c6d7ed2 100644 --- a/web-app/src/hooks/useThreadScrolling.tsx +++ b/web-app/src/hooks/useThreadScrolling.tsx @@ -78,7 +78,7 @@ export const useThreadScrolling = ( return () => scrollContainer.removeEventListener('scroll', handleScroll) } - }, [handleScroll]) + }, [handleScroll, scrollContainerRef]) const checkScrollState = useCallback(() => { const scrollContainer = scrollContainerRef.current @@ -90,7 +90,7 @@ export const useThreadScrolling = ( setIsAtBottom(isBottom) setHasScrollbar(hasScroll) - }, []) + }, [scrollContainerRef]) useEffect(() => { if (!scrollContainerRef.current) return @@ -101,7 +101,7 @@ export const useThreadScrolling = ( scrollToBottom(false) checkScrollState() } - }, [checkScrollState, scrollToBottom]) + }, [checkScrollState, scrollToBottom, scrollContainerRef]) const prevCountRef = useRef(messageCount) @@ -146,7 +146,7 @@ export const useThreadScrolling = ( } prevCountRef.current = messageCount - }, [messageCount, lastMessageRole]) + }, [messageCount, lastMessageRole, getDOMElements, setPaddingHeight]) useEffect(() => { const previouslyStreaming = wasStreamingRef.current @@ -197,7 +197,7 @@ export const useThreadScrolling = ( } wasStreamingRef.current = currentlyStreaming - }, [streamingContent, threadId]) + }, [streamingContent, threadId, getDOMElements, setPaddingHeight]) useEffect(() => { userIntendedPositionRef.current = null @@ -207,7 +207,7 @@ export const useThreadScrolling = ( prevCountRef.current = messageCount scrollToBottom(false) checkScrollState() - }, [threadId]) + }, [threadId, messageCount, scrollToBottom, checkScrollState, setPaddingHeight]) return useMemo( () => ({ diff --git a/web-app/src/lib/utils.ts b/web-app/src/lib/utils.ts index 663a5051b..b035600ab 100644 --- a/web-app/src/lib/utils.ts +++ b/web-app/src/lib/utils.ts @@ -3,10 +3,12 @@ import { twMerge } from 'tailwind-merge' import { ExtensionManager } from './extension' import path from "path" + export function cn(...inputs: ClassValue[]) { return twMerge(clsx(inputs)) } + export function basenameNoExt(filePath: string): string { const base = path.basename(filePath); const VALID_EXTENSIONS = [".tar.gz", ".zip"]; @@ -22,6 +24,7 @@ export function basenameNoExt(filePath: string): string { return base.slice(0, -path.extname(base).length); } + /** * Get the display name for a model, falling back to the model ID if no display name is set */ @@ -188,3 +191,15 @@ export function formatDuration(startTime: number, endTime?: number): string { export function sanitizeModelId(modelId: string): string { return modelId.replace(/[^a-zA-Z0-9/_\-.]/g, '').replace(/\./g, '_') } + +export const extractThinkingContent = (text: string) => { + return text + .replace(/<\/?think>/g, '') + .replace(/<\|channel\|>analysis<\|message\|>/g, '') + .replace(/<\|start\|>assistant<\|channel\|>final<\|message\|>/g, '') + .replace(/assistant<\|channel\|>final<\|message\|>/g, '') + .replace(/<\|channel\|>/g, '') // remove any remaining channel markers + .replace(/<\|message\|>/g, '') // remove any remaining message markers + .replace(/<\|start\|>/g, '') // remove any remaining start markers + .trim() +} diff --git a/web-app/src/locales/de-DE/providers.json b/web-app/src/locales/de-DE/providers.json index 7e6d68c0f..39c52e047 100644 --- a/web-app/src/locales/de-DE/providers.json +++ b/web-app/src/locales/de-DE/providers.json @@ -35,7 +35,9 @@ "modelId": "Modell ID", "enterModelId": "Modell ID eingeben", "exploreModels": "Sehe Modellliste von {{provider}}", - "addModel": "Modell hinzufügen" + "addModel": "Modell hinzufügen", + "modelExists": "Modell bereits vorhanden", + "modelExistsDesc": "Bitte wähle eine andere Modell-ID." }, "deleteModel": { "title": "Lösche Modell: {{modelId}}", diff --git a/web-app/src/locales/en/providers.json b/web-app/src/locales/en/providers.json index 62fe69b8e..2683432f9 100644 --- a/web-app/src/locales/en/providers.json +++ b/web-app/src/locales/en/providers.json @@ -35,7 +35,9 @@ "modelId": "Model ID", "enterModelId": "Enter model ID", "exploreModels": "See model list from {{provider}}", - "addModel": "Add Model" + "addModel": "Add Model", + "modelExists": "Model already exists", + "modelExistsDesc": "Please choose a different model ID." }, "deleteModel": { "title": "Delete Model: {{modelId}}", @@ -69,4 +71,4 @@ "addProvider": "Add Provider", "addOpenAIProvider": "Add OpenAI Provider", "enterNameForProvider": "Enter name for provider" -} \ No newline at end of file +} diff --git a/web-app/src/locales/id/providers.json b/web-app/src/locales/id/providers.json index 803aac3e7..5f89d69c6 100644 --- a/web-app/src/locales/id/providers.json +++ b/web-app/src/locales/id/providers.json @@ -35,7 +35,9 @@ "modelId": "ID Model", "enterModelId": "Masukkan ID model", "exploreModels": "Lihat daftar model dari {{provider}}", - "addModel": "Tambah Model" + "addModel": "Tambah Model", + "modelExists": "Model sudah ada", + "modelExistsDesc": "Silakan pilih ID model yang berbeda." }, "deleteModel": { "title": "Hapus Model: {{modelId}}", @@ -69,4 +71,4 @@ "addProvider": "Tambah Penyedia", "addOpenAIProvider": "Tambah Penyedia OpenAI", "enterNameForProvider": "Masukkan nama untuk penyedia" -} \ No newline at end of file +} diff --git a/web-app/src/locales/pl/providers.json b/web-app/src/locales/pl/providers.json index 55992e3bb..c1c03434e 100644 --- a/web-app/src/locales/pl/providers.json +++ b/web-app/src/locales/pl/providers.json @@ -35,7 +35,9 @@ "modelId": "Identyfikator Modelu", "enterModelId": "Wprowadź identyfikator modelu", "exploreModels": "Zobacz listę modeli dostawcy {{provider}}", - "addModel": "Dodaj Model" + "addModel": "Dodaj Model", + "modelExists": "Model już istnieje", + "modelExistsDesc": "Wybierz inny identyfikator modelu." }, "deleteModel": { "title": "Usuń Model: {{modelId}}", diff --git a/web-app/src/locales/vn/providers.json b/web-app/src/locales/vn/providers.json index bd6bdb334..8c0e6d1b8 100644 --- a/web-app/src/locales/vn/providers.json +++ b/web-app/src/locales/vn/providers.json @@ -35,7 +35,9 @@ "modelId": "ID mô hình", "enterModelId": "Nhập ID mô hình", "exploreModels": "Xem danh sách mô hình từ {{provider}}", - "addModel": "Thêm mô hình" + "addModel": "Thêm mô hình", + "modelExists": "Mô hình đã tồn tại", + "modelExistsDesc": "Vui lòng chọn một ID mô hình khác." }, "deleteModel": { "title": "Xóa mô hình: {{modelId}}", @@ -69,4 +71,4 @@ "addProvider": "Thêm nhà cung cấp", "addOpenAIProvider": "Thêm nhà cung cấp OpenAI", "enterNameForProvider": "Nhập tên cho nhà cung cấp" -} \ No newline at end of file +} diff --git a/web-app/src/locales/zh-CN/providers.json b/web-app/src/locales/zh-CN/providers.json index ecc04df49..2ca2beb2e 100644 --- a/web-app/src/locales/zh-CN/providers.json +++ b/web-app/src/locales/zh-CN/providers.json @@ -35,7 +35,9 @@ "modelId": "模型 ID", "enterModelId": "输入模型 ID", "exploreModels": "查看 {{provider}} 的模型列表", - "addModel": "添加模型" + "addModel": "添加模型", + "modelExists": "模型已存在", + "modelExistsDesc": "请选择不同的模型 ID。" }, "deleteModel": { "title": "删除模型:{{modelId}}", @@ -69,4 +71,4 @@ "addProvider": "添加提供商", "addOpenAIProvider": "添加 OpenAI 提供商", "enterNameForProvider": "输入提供商名称" -} \ No newline at end of file +} diff --git a/web-app/src/locales/zh-TW/providers.json b/web-app/src/locales/zh-TW/providers.json index 316a9ed08..39580818b 100644 --- a/web-app/src/locales/zh-TW/providers.json +++ b/web-app/src/locales/zh-TW/providers.json @@ -35,7 +35,9 @@ "modelId": "模型 ID", "enterModelId": "輸入模型 ID", "exploreModels": "查看 {{provider}} 的模型清單", - "addModel": "新增模型" + "addModel": "新增模型", + "modelExists": "模型已存在", + "modelExistsDesc": "請選擇不同的模型 ID。" }, "deleteModel": { "title": "刪除模型:{{modelId}}", @@ -69,4 +71,4 @@ "addProvider": "新增提供者", "addOpenAIProvider": "新增 OpenAI 提供者", "enterNameForProvider": "輸入提供者名稱" -} \ No newline at end of file +} diff --git a/web-app/src/routes/project/index.tsx b/web-app/src/routes/project/index.tsx index 5ab24bb39..300bb550b 100644 --- a/web-app/src/routes/project/index.tsx +++ b/web-app/src/routes/project/index.tsx @@ -1,4 +1,4 @@ -import { createFileRoute } from '@tanstack/react-router' +import { createFileRoute, useNavigate } from '@tanstack/react-router' import { useState, useMemo } from 'react' import { useThreadManagement } from '@/hooks/useThreadManagement' @@ -31,6 +31,7 @@ function Project() { function ProjectContent() { const { t } = useTranslation() + const navigate = useNavigate() const { folders, addFolder, updateFolder, deleteFolder, getFolderById } = useThreadManagement() const threads = useThreads((state) => state.threads) @@ -59,7 +60,12 @@ function ProjectContent() { if (editingKey) { updateFolder(editingKey, name) } else { - addFolder(name) + const newProject = addFolder(name) + // Navigate to the newly created project + navigate({ + to: '/project/$projectId', + params: { projectId: newProject.id }, + }) } setOpen(false) setEditingKey(null) diff --git a/web-app/src/routes/settings/providers/$providerName.tsx b/web-app/src/routes/settings/providers/$providerName.tsx index de978da1e..2ee868a1c 100644 --- a/web-app/src/routes/settings/providers/$providerName.tsx +++ b/web-app/src/routes/settings/providers/$providerName.tsx @@ -318,17 +318,7 @@ function ProviderDetail() { .getActiveModels() .then((models) => setActiveModels(models || [])) } catch (error) { - console.error('Error starting model:', error) - if ( - error && - typeof error === 'object' && - 'message' in error && - typeof error.message === 'string' - ) { - setModelLoadError({ message: error.message }) - } else { - setModelLoadError(typeof error === 'string' ? error : `${error}`) - } + setModelLoadError(error as ErrorObject) } finally { // Remove model from loading state setLoadingModels((prev) => prev.filter((id) => id !== modelId)) diff --git a/web-app/src/routes/threads/$threadId.tsx b/web-app/src/routes/threads/$threadId.tsx index a8bd03d29..a10c96ede 100644 --- a/web-app/src/routes/threads/$threadId.tsx +++ b/web-app/src/routes/threads/$threadId.tsx @@ -23,8 +23,8 @@ import { PlatformFeatures } from '@/lib/platform/const' import { PlatformFeature } from '@/lib/platform/types' import ScrollToBottom from '@/containers/ScrollToBottom' import { PromptProgress } from '@/components/PromptProgress' +import { ThreadPadding } from '@/containers/ThreadPadding' import { TEMPORARY_CHAT_ID, TEMPORARY_CHAT_QUERY_ID } from '@/constants/chat' -import { useThreadScrolling } from '@/hooks/useThreadScrolling' import { IconInfoCircle } from '@tabler/icons-react' import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip' @@ -99,9 +99,6 @@ function ThreadDetail() { const scrollContainerRef = useRef(null) - // Get padding height for ChatGPT-style message positioning - const { paddingHeight } = useThreadScrolling(threadId, scrollContainerRef) - // Listen for conversation not found events useEffect(() => { const handleConversationNotFound = (event: CustomEvent) => { @@ -123,7 +120,7 @@ function ThreadDetail() { return () => { window.removeEventListener(CONVERSATION_NOT_FOUND_EVENT, handleConversationNotFound as EventListener) } - }, [threadId, navigate]) + }, [threadId, navigate, t]) useEffect(() => { setCurrentThreadId(threadId) @@ -270,11 +267,7 @@ function ThreadDetail() { data-test-id="thread-content-text" /> {/* Persistent padding element for ChatGPT-style message positioning */} -
+