Merge remote-tracking branch 'origin/dev' into mobile/dev

# Conflicts:
#	web-app/src/containers/HeaderPage.tsx
#	web-app/src/lib/platform/const.ts
#	web-app/src/routes/index.tsx
This commit is contained in:
Vanalite 2025-09-30 11:21:36 +07:00
commit c53d8c09c4
37 changed files with 409 additions and 86 deletions

View File

Before

Width:  |  Height:  |  Size: 328 KiB

After

Width:  |  Height:  |  Size: 328 KiB

View File

Before

Width:  |  Height:  |  Size: 634 KiB

After

Width:  |  Height:  |  Size: 634 KiB

View File

Before

Width:  |  Height:  |  Size: 725 KiB

After

Width:  |  Height:  |  Size: 725 KiB

View File

Before

Width:  |  Height:  |  Size: 235 KiB

After

Width:  |  Height:  |  Size: 235 KiB

View File

Before

Width:  |  Height:  |  Size: 402 KiB

After

Width:  |  Height:  |  Size: 402 KiB

View File

Before

Width:  |  Height:  |  Size: 1.7 MiB

After

Width:  |  Height:  |  Size: 1.7 MiB

View File

Before

Width:  |  Height:  |  Size: 138 KiB

After

Width:  |  Height:  |  Size: 138 KiB

View File

Before

Width:  |  Height:  |  Size: 176 KiB

After

Width:  |  Height:  |  Size: 176 KiB

View File

Before

Width:  |  Height:  |  Size: 673 KiB

After

Width:  |  Height:  |  Size: 673 KiB

View File

@ -4,13 +4,13 @@ description: "See the best ChatGPT alternatives in 2025. We've listed tools that
tags: AI, ChatGPT alternative, ChatGPT alternatives, alternative to chatgpt, Jan, local AI, privacy, open source, offline AI tags: AI, ChatGPT alternative, ChatGPT alternatives, alternative to chatgpt, Jan, local AI, privacy, open source, offline AI
categories: guides categories: guides
date: 2025-09-29 date: 2025-09-29
ogImage: _assets/chatgpt-alternative-jan.jpeg ogImage: assets/images/general/chatgpt-alternative-jan.jpeg
twitter: twitter:
card: summary_large_image card: summary_large_image
site: "@jandotai" site: "@jandotai"
title: "ChatGPT alternatives that actually replace it." title: "ChatGPT alternatives that actually replace it."
description: "See the best ChatGPT alternatives in 2025. We've listed tools that are alternatives to ChatGPT." description: "See the best ChatGPT alternatives in 2025. We've listed tools that are alternatives to ChatGPT."
image: _assets/chatgpt-alternative-jan.jpeg image: assets/images/general/chatgpt-alternative-jan.jpeg
--- ---
import { Callout } from 'nextra/components' import { Callout } from 'nextra/components'
import CTABlog from '@/components/Blog/CTA' import CTABlog from '@/components/Blog/CTA'
@ -33,7 +33,7 @@ If you want options that fit different needs, offline use, privacy, or specializ
### Jan is the best ChatGPT alternative ### Jan is the best ChatGPT alternative
![Use Jan to chat with AI models without internet access](./_assets/chatgpt-alternative-jan.jpeg) ![Use Jan to chat with AI models without internet access](/assets/images/general/chatgpt-alternative-jan.jpeg)
*Jan as an open-source alternative to ChatGPT* *Jan as an open-source alternative to ChatGPT*
Jan is the most complete ChatGPT alternative available today. It enables: Jan is the most complete ChatGPT alternative available today. It enables:

View File

@ -4,13 +4,13 @@ description: "A simple guide to replicating Deep Research results for free, with
tags: AI, local models, Jan, GGUF, Deep Research, local AI tags: AI, local models, Jan, GGUF, Deep Research, local AI
categories: guides categories: guides
date: 2025-08-04 date: 2025-08-04
ogImage: _assets/research-result-local.png ogImage: assets/images/general/research-result-local.png
twitter: twitter:
card: summary_large_image card: summary_large_image
site: "@jandotai" site: "@jandotai"
title: "Replicating Deep Research with Jan" title: "Replicating Deep Research with Jan"
description: "Learn how to replicate Deep Research results with Jan." description: "Learn how to replicate Deep Research results with Jan."
image: _assets/research-result-local.jpg image: assets/images/general/research-result-local.png
--- ---
import { Callout } from 'nextra/components' import { Callout } from 'nextra/components'

View File

@ -4,7 +4,7 @@ description: "A straightforward guide to running DeepSeek R1 locally regardless
tags: DeepSeek, R1, local AI, Jan, GGUF, Qwen, Llama tags: DeepSeek, R1, local AI, Jan, GGUF, Qwen, Llama
categories: guides categories: guides
date: 2025-01-31 date: 2025-01-31
ogImage: assets/deepseek-r1-locally-jan.jpg ogImage: assets/images/general/deepseek-r1-locally-jan.jpg
twitter: twitter:
card: summary_large_image card: summary_large_image
site: "@jandotai" site: "@jandotai"
@ -17,7 +17,7 @@ import CTABlog from '@/components/Blog/CTA'
# Run DeepSeek R1 locally on your device (Beginner-Friendly Guide) # Run DeepSeek R1 locally on your device (Beginner-Friendly Guide)
![DeepSeek R1 running locally in Jan AI interface, showing the chat interface and model settings](./_assets/deepseek-r1-locally-jan.jpg) ![DeepSeek R1 running locally in Jan AI interface, showing the chat interface and model settings](/assets/images/general/deepseek-r1-locally-jan.jpg)
DeepSeek R1 is one of the best open-source models in the market right now, and you can run DeepSeek R1 on your own computer! DeepSeek R1 is one of the best open-source models in the market right now, and you can run DeepSeek R1 on your own computer!

View File

@ -3,7 +3,7 @@ title: "How we (try to) benchmark GPU kernels accurately"
description: "We present the process behind how we decided to benchmark GPU kernels and iteratively improved our benchmarking pipeline" description: "We present the process behind how we decided to benchmark GPU kernels and iteratively improved our benchmarking pipeline"
tags: "" tags: ""
categories: research categories: research
ogImage: "./_assets/cover-kernel-benchmarking.png" ogImage: assets/images/general/cover-kernel-benchmarking.png
date: 2025-09-17 date: 2025-09-17
--- ---

View File

@ -4,13 +4,13 @@ description: "Check if ChatGPT down right now, and learn how to use AI that neve
tags: AI, ChatGPT down, ChatGPT alternative, Jan, local AI, offline AI, ChatGPT at capacity tags: AI, ChatGPT down, ChatGPT alternative, Jan, local AI, offline AI, ChatGPT at capacity
categories: guides categories: guides
date: 2025-09-30 date: 2025-09-30
ogImage: _assets/is-chatgpt-down.jpg ogImage: assets/images/general/is-chatgpt-down.jpg
twitter: twitter:
card: summary_large_image card: summary_large_image
site: "@jandotai" site: "@jandotai"
title: "Realtime Status Checker: Is ChatGPT down?" title: "Realtime Status: Is ChatGPT down?"
description: "Check if ChatGPT is down right now with our real-time status checker, and learn how to use AI that never goes offline." description: "Check if ChatGPT is down right now with our real-time status checker, and learn how to use AI that never goes offline."
image: _assets/is-chatgpt-down.jpg image: assets/images/general/is-chatgpt-down.jpg
--- ---
import { Callout } from 'nextra/components' import { Callout } from 'nextra/components'
import CTABlog from '@/components/Blog/CTA' import CTABlog from '@/components/Blog/CTA'
@ -20,7 +20,7 @@ import { OpenAIStatusChecker } from '@/components/OpenAIStatusChecker'
If you're seeing ChatGPT is down, it could a good signal to switch to [Jan](https://www.jan.ai/), AI that never goes down. If you're seeing ChatGPT is down, it could a good signal to switch to [Jan](https://www.jan.ai/), AI that never goes down.
## 🔴 Realtime Status Checker: Is ChatGPT down? ## 🔴 Realtime Status: Is ChatGPT down?
<Callout> <Callout>
This live tracker shows if ChatGPT is down right now. This live tracker shows if ChatGPT is down right now.
</Callout> </Callout>
@ -66,7 +66,7 @@ This live tracker shows if ChatGPT is down right now.
When ChatGPT is down, Jan keeps working. Jan is an open-source ChatGPT alternative that runs on your computer - no servers, no outages, no waiting. When ChatGPT is down, Jan keeps working. Jan is an open-source ChatGPT alternative that runs on your computer - no servers, no outages, no waiting.
![Jan running when ChatGPT is down](./_assets/chatgpt-alternative-jan.jpeg) ![Jan running when ChatGPT is down](/assets/images/general/is-chatgpt-down.jpg)
*Jan works even when ChatGPT doesn't.* *Jan works even when ChatGPT doesn't.*
### Why Jan never goes down: ### Why Jan never goes down:

View File

@ -4,13 +4,13 @@ description: "Use offline AI with Jan, a free & open-source alternative to ChatG
tags: AI, chatgpt offline, ChatGPT alternative, offline AI, Jan, local AI, privacy tags: AI, chatgpt offline, ChatGPT alternative, offline AI, Jan, local AI, privacy
categories: guides categories: guides
date: 2025-02-08 date: 2025-02-08
ogImage: _assets/offline-chatgpt-alternatives-jan.jpg ogImage: assets/images/general/offline-chatgpt-alternatives-jan.jpg
twitter: twitter:
card: summary_large_image card: summary_large_image
site: "@jandotai" site: "@jandotai"
title: "Offline ChatGPT: You can't run ChatGPT offline, do this instead" title: "Offline ChatGPT: You can't run ChatGPT offline, do this instead"
description: "Use offline AI with Jan, a free & open-source alternative to ChatGPT that runs 100% offline." description: "Use offline AI with Jan, a free & open-source alternative to ChatGPT that runs 100% offline."
image: _assets/offline-chatgpt-alternatives-jan.jpg image: assets/images/general/offline-chatgpt-alternatives-jan.jpg
--- ---
import { Callout } from 'nextra/components' import { Callout } from 'nextra/components'
import CTABlog from '@/components/Blog/CTA' import CTABlog from '@/components/Blog/CTA'
@ -64,7 +64,7 @@ If you'd like to learn more about local AI, check [how to run AI models locally
### 3. Start using AI offline ### 3. Start using AI offline
![Chat with AI offline using Jan's interface](./_assets/run-ai-locally-with-jan.jpg "Experience ChatGPT-like interactions without internet") ![Chat with AI offline using Jan's interface](/assets/images/general/run-ai-locally-with-jan.jpg "Experience ChatGPT-like interactions without internet")
*Use Jan's clean interface to chat with AI - no internet required* *Use Jan's clean interface to chat with AI - no internet required*
Once downloaded, you can use AI anywhere, anytime: Once downloaded, you can use AI anywhere, anytime:

View File

@ -50,7 +50,7 @@ Thinking mode is powerful, but greedy decoding kills its output. It'll repeat or
## Quick summary ## Quick summary
![Qwen3 settings](./_assets/qwen3-settings-jan-ai.jpeg) ![Qwen3 settings](/assets/images/general/qwen3-30b-settings.jpg)
### Non-thinking mode (`enable_thinking=False`) ### Non-thinking mode (`enable_thinking=False`)

View File

@ -4,7 +4,7 @@ description: "A straightforward guide to running AI models locally on your compu
tags: AI, local models, Jan, GGUF, privacy, local AI tags: AI, local models, Jan, GGUF, privacy, local AI
categories: guides categories: guides
date: 2025-01-31 date: 2025-01-31
ogImage: assets/run-ai-locally-with-jan.jpg ogImage: assets/images/general/run-ai-locally-with-jan.jpg
twitter: twitter:
card: summary_large_image card: summary_large_image
site: "@jandotai" site: "@jandotai"
@ -35,7 +35,7 @@ Most people think running AI models locally is complicated. It's not. Anyone can
That's all to run your first AI model locally! That's all to run your first AI model locally!
![Jan's simple and clean chat interface for local AI](./_assets/run-ai-locally-with-jan.jpg "Jan's easy-to-use chat interface after installation") ![Jan's simple and clean chat interface for local AI](/assets/images/general/run-ai-locally-with-jan.jpg "Jan's easy-to-use chat interface after installation")
*Jan's easy-to-use chat interface after installation.* *Jan's easy-to-use chat interface after installation.*
Keep reading to learn key terms of local AI and the things you should know before running AI models locally. Keep reading to learn key terms of local AI and the things you should know before running AI models locally.

View File

@ -4,21 +4,19 @@ description: "Complete 5-minute beginner guide to running OpenAI's gpt-oss local
tags: OpenAI, gpt-oss, local AI, Jan, privacy, Apache-2.0, llama.cpp, Ollama, LM Studio tags: OpenAI, gpt-oss, local AI, Jan, privacy, Apache-2.0, llama.cpp, Ollama, LM Studio
categories: guides categories: guides
date: 2025-08-06 date: 2025-08-06
ogImage: assets/gpt-oss%20locally.jpeg ogImage: assets/images/general/gpt-oss locally.jpeg
twitter: twitter:
card: summary_large_image card: summary_large_image
site: "@jandotai" site: "@jandotai"
title: "Run OpenAI's gpt-oss Locally in 5 Minutes (Beginner Guide)" title: "Run OpenAI's gpt-oss Locally in 5 Minutes (Beginner Guide)"
description: "Complete 5-minute beginner guide to running OpenAI's gpt-oss locally with Jan AI for private, offline conversations." description: "Complete 5-minute beginner guide to running OpenAI's gpt-oss locally with Jan AI for private, offline conversations."
image: assets/gpt-oss%20locally.jpeg image: assets/images/general/gpt-oss locally.jpeg
--- ---
import { Callout } from 'nextra/components' import { Callout } from 'nextra/components'
import CTABlog from '@/components/Blog/CTA' import CTABlog from '@/components/Blog/CTA'
# Run OpenAI's gpt-oss Locally in 5 mins # Run OpenAI's gpt-oss Locally in 5 mins
![gpt-oss running locally in Jan interface](./_assets/gpt-oss%20locally.jpeg)
OpenAI launched [gpt-oss](https://openai.com/index/introducing-gpt-oss/), marking their return to open-source AI after GPT-2. This model is designed to run locally on consumer hardware. This guide shows you how to install and run gpt-oss on your computer for private, offline AI conversations. OpenAI launched [gpt-oss](https://openai.com/index/introducing-gpt-oss/), marking their return to open-source AI after GPT-2. This model is designed to run locally on consumer hardware. This guide shows you how to install and run gpt-oss on your computer for private, offline AI conversations.
## What is gpt-oss? ## What is gpt-oss?

View File

@ -107,14 +107,15 @@ const config: DocsThemeConfig = {
head: function useHead() { head: function useHead() {
const { title, frontMatter } = useConfig() const { title, frontMatter } = useConfig()
const { asPath } = useRouter() const { asPath } = useRouter()
const titleTemplate = const titleTemplate = asPath.includes('/post/')
(asPath.includes('/desktop') ? (frontMatter?.title || title)
: (asPath.includes('/desktop')
? 'Jan Desktop' ? 'Jan Desktop'
: asPath.includes('/server') : asPath.includes('/server')
? 'Jan Server' ? 'Jan Server'
: 'Jan') + : 'Jan') +
' - ' + ' - ' +
(frontMatter?.title || title) (frontMatter?.title || title)
return ( return (
<Fragment> <Fragment>

View File

@ -11,6 +11,9 @@ import {
} from '@janhq/core' } from '@janhq/core'
import { RemoteApi } from './api' import { RemoteApi } from './api'
import { getDefaultAssistant, ObjectParser, combineConversationItemsToMessages } from './utils' import { getDefaultAssistant, ObjectParser, combineConversationItemsToMessages } from './utils'
import { ApiError } from '../shared/types/errors'
const CONVERSATION_NOT_FOUND_EVENT = 'conversation-not-found'
export default class ConversationalExtensionWeb extends ConversationalExtension { export default class ConversationalExtensionWeb extends ConversationalExtension {
private remoteApi: RemoteApi | undefined private remoteApi: RemoteApi | undefined
@ -111,6 +114,15 @@ export default class ConversationalExtensionWeb extends ConversationalExtension
return messages return messages
} catch (error) { } catch (error) {
console.error('Failed to list messages:', error) console.error('Failed to list messages:', error)
// Check if it's a 404 error (conversation not found)
if (error instanceof ApiError && error.isNotFound()) {
// Trigger a navigation event to redirect to home
// We'll use a custom event that the web app can listen to
window.dispatchEvent(new CustomEvent(CONVERSATION_NOT_FOUND_EVENT, {
detail: { threadId, error: error.message }
}))
}
return [] return []
} }
} }

View File

@ -5,9 +5,45 @@
import { getSharedAuthService, JanAuthService } from '../shared' import { getSharedAuthService, JanAuthService } from '../shared'
import { JanModel, janProviderStore } from './store' import { JanModel, janProviderStore } from './store'
import { ApiError } from '../shared/types/errors'
// JAN_API_BASE is defined in vite.config.ts // JAN_API_BASE is defined in vite.config.ts
// Constants
const TEMPORARY_CHAT_ID = 'temporary-chat'
/**
* Determines the appropriate API endpoint and request payload based on chat type
* @param request - The chat completion request
* @returns Object containing endpoint URL and processed request payload
*/
function getChatCompletionConfig(request: JanChatCompletionRequest, stream: boolean = false) {
const isTemporaryChat = request.conversation_id === TEMPORARY_CHAT_ID
// For temporary chats, use the stateless /chat/completions endpoint
// For regular conversations, use the stateful /conv/chat/completions endpoint
const endpoint = isTemporaryChat
? `${JAN_API_BASE}/chat/completions`
: `${JAN_API_BASE}/conv/chat/completions`
const payload = {
...request,
stream,
...(isTemporaryChat ? {
// For temporary chat: don't store anything, remove conversation metadata
conversation_id: undefined,
} : {
// For regular chat: store everything, use conversation metadata
store: true,
store_reasoning: true,
conversation: request.conversation_id,
conversation_id: undefined,
})
}
return { endpoint, payload, isTemporaryChat }
}
export interface JanModelsResponse { export interface JanModelsResponse {
object: string object: string
data: JanModel[] data: JanModel[]
@ -102,7 +138,8 @@ export class JanApiClient {
return models return models
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Failed to fetch models' const errorMessage = error instanceof ApiError ? error.message :
error instanceof Error ? error.message : 'Failed to fetch models'
janProviderStore.setError(errorMessage) janProviderStore.setError(errorMessage)
janProviderStore.setLoadingModels(false) janProviderStore.setLoadingModels(false)
throw error throw error
@ -115,22 +152,18 @@ export class JanApiClient {
try { try {
janProviderStore.clearError() janProviderStore.clearError()
const { endpoint, payload } = getChatCompletionConfig(request, false)
return await this.authService.makeAuthenticatedRequest<JanChatCompletionResponse>( return await this.authService.makeAuthenticatedRequest<JanChatCompletionResponse>(
`${JAN_API_BASE}/conv/chat/completions`, endpoint,
{ {
method: 'POST', method: 'POST',
body: JSON.stringify({ body: JSON.stringify(payload),
...request,
stream: false,
store: true,
store_reasoning: true,
conversation: request.conversation_id,
conversation_id: undefined,
}),
} }
) )
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Failed to create chat completion' const errorMessage = error instanceof ApiError ? error.message :
error instanceof Error ? error.message : 'Failed to create chat completion'
janProviderStore.setError(errorMessage) janProviderStore.setError(errorMessage)
throw error throw error
} }
@ -144,23 +177,17 @@ export class JanApiClient {
): Promise<void> { ): Promise<void> {
try { try {
janProviderStore.clearError() janProviderStore.clearError()
const authHeader = await this.authService.getAuthHeader() const authHeader = await this.authService.getAuthHeader()
const { endpoint, payload } = getChatCompletionConfig(request, true)
const response = await fetch(`${JAN_API_BASE}/conv/chat/completions`, {
const response = await fetch(endpoint, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
...authHeader, ...authHeader,
}, },
body: JSON.stringify({ body: JSON.stringify(payload),
...request,
stream: true,
store: true,
store_reasoning: true,
conversation: request.conversation_id,
conversation_id: undefined,
}),
}) })
if (!response.ok) { if (!response.ok) {
@ -216,7 +243,8 @@ export class JanApiClient {
reader.releaseLock() reader.releaseLock()
} }
} catch (error) { } catch (error) {
const err = error instanceof Error ? error : new Error('Unknown error occurred') const err = error instanceof ApiError ? error :
error instanceof Error ? error : new Error('Unknown error occurred')
janProviderStore.setError(err.message) janProviderStore.setError(err.message)
onError?.(err) onError?.(err)
throw err throw err
@ -230,7 +258,8 @@ export class JanApiClient {
await this.getModels() await this.getModels()
console.log('Jan API client initialized successfully') console.log('Jan API client initialized successfully')
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Failed to initialize API client' const errorMessage = error instanceof ApiError ? error.message :
error instanceof Error ? error.message : 'Failed to initialize API client'
janProviderStore.setError(errorMessage) janProviderStore.setError(errorMessage)
throw error throw error
} finally { } finally {

View File

@ -15,6 +15,7 @@ import {
} from '@janhq/core' // cspell: disable-line } from '@janhq/core' // cspell: disable-line
import { janApiClient, JanChatMessage } from './api' import { janApiClient, JanChatMessage } from './api'
import { janProviderStore } from './store' import { janProviderStore } from './store'
import { ApiError } from '../shared/types/errors'
// Jan models support tools via MCP // Jan models support tools via MCP
const JAN_MODEL_CAPABILITIES = ['tools'] as const const JAN_MODEL_CAPABILITIES = ['tools'] as const
@ -192,7 +193,8 @@ export default class JanProviderWeb extends AIEngine {
console.error(`Failed to unload Jan session ${sessionId}:`, error) console.error(`Failed to unload Jan session ${sessionId}:`, error)
return { return {
success: false, success: false,
error: error instanceof Error ? error.message : 'Unknown error', error: error instanceof ApiError ? error.message :
error instanceof Error ? error.message : 'Unknown error',
} }
} }
} }

View File

@ -16,6 +16,7 @@ import { logoutUser, refreshToken, guestLogin } from './api'
import { AuthProviderRegistry } from './registry' import { AuthProviderRegistry } from './registry'
import { AuthBroadcast } from './broadcast' import { AuthBroadcast } from './broadcast'
import type { ProviderType } from './providers' import type { ProviderType } from './providers'
import { ApiError } from '../types/errors'
const authProviderRegistry = new AuthProviderRegistry() const authProviderRegistry = new AuthProviderRegistry()
@ -160,7 +161,7 @@ export class JanAuthService {
this.tokenExpiryTime = Date.now() + tokens.expires_in * 1000 this.tokenExpiryTime = Date.now() + tokens.expires_in * 1000
} catch (error) { } catch (error) {
console.error('Failed to refresh access token:', error) console.error('Failed to refresh access token:', error)
if (error instanceof Error && error.message.includes('401')) { if (error instanceof ApiError && error.isStatus(401)) {
await this.handleSessionExpired() await this.handleSessionExpired()
} }
throw error throw error
@ -305,9 +306,7 @@ export class JanAuthService {
if (!response.ok) { if (!response.ok) {
const errorText = await response.text() const errorText = await response.text()
throw new Error( throw new ApiError(response.status, response.statusText, errorText)
`API request failed: ${response.status} ${response.statusText} - ${errorText}`
)
} }
return response.json() return response.json()
@ -418,7 +417,7 @@ export class JanAuthService {
) )
} catch (error) { } catch (error) {
console.error('Failed to fetch user profile:', error) console.error('Failed to fetch user profile:', error)
if (error instanceof Error && error.message.includes('401')) { if (error instanceof ApiError && error.isStatus(401)) {
// Authentication failed - handle session expiry // Authentication failed - handle session expiry
await this.handleSessionExpired() await this.handleSessionExpired()
return null return null

View File

@ -0,0 +1,50 @@
/**
* Shared error types for API responses
*/
export class ApiError extends Error {
public readonly status: number
public readonly statusText: string
public readonly responseText: string
constructor(status: number, statusText: string, responseText: string, message?: string) {
super(message || `API request failed: ${status} ${statusText} - ${responseText}`)
this.name = 'ApiError'
this.status = status
this.statusText = statusText
this.responseText = responseText
// Maintains proper stack trace for where our error was thrown (only available on V8)
if ((Error as any).captureStackTrace) {
(Error as any).captureStackTrace(this, ApiError)
}
}
/**
* Check if this is a specific HTTP status code
*/
isStatus(code: number): boolean {
return this.status === code
}
/**
* Check if this is a 404 Not Found error
*/
isNotFound(): boolean {
return this.status === 404
}
/**
* Check if this is a client error (4xx)
*/
isClientError(): boolean {
return this.status >= 400 && this.status < 500
}
/**
* Check if this is a server error (5xx)
*/
isServerError(): boolean {
return this.status >= 500 && this.status < 600
}
}

View File

@ -21,8 +21,8 @@
"@dnd-kit/core": "6.3.1", "@dnd-kit/core": "6.3.1",
"@dnd-kit/modifiers": "9.0.0", "@dnd-kit/modifiers": "9.0.0",
"@dnd-kit/sortable": "10.0.0", "@dnd-kit/sortable": "10.0.0",
"@jan/extensions-web": "link:../extensions-web", "@jan/extensions-web": "workspace:*",
"@janhq/core": "link:../core", "@janhq/core": "workspace:*",
"@radix-ui/react-accordion": "1.2.11", "@radix-ui/react-accordion": "1.2.11",
"@radix-ui/react-avatar": "1.1.10", "@radix-ui/react-avatar": "1.1.10",
"@radix-ui/react-dialog": "1.1.15", "@radix-ui/react-dialog": "1.1.15",

View File

@ -0,0 +1,6 @@
/**
* Chat-related constants
*/
export const TEMPORARY_CHAT_ID = 'temporary-chat'
export const TEMPORARY_CHAT_QUERY_ID = 'temporary-chat'

View File

@ -1,8 +1,13 @@
import { useLeftPanel } from '@/hooks/useLeftPanel' import { useLeftPanel } from '@/hooks/useLeftPanel'
import { cn } from '@/lib/utils' import { cn } from '@/lib/utils'
import { IconLayoutSidebar } from '@tabler/icons-react'
import { ReactNode } from 'react'
import { useMobileScreen, useSmallScreen } from '@/hooks/useMediaQuery' import { useMobileScreen, useSmallScreen } from '@/hooks/useMediaQuery'
import { IconLayoutSidebar, IconMessage, IconMessageFilled } from '@tabler/icons-react'
import { ReactNode } from 'react'
import { useRouter } from '@tanstack/react-router'
import { route } from '@/constants/routes'
import { PlatformFeatures } from '@/lib/platform/const'
import { PlatformFeature } from '@/lib/platform/types'
import { TEMPORARY_CHAT_QUERY_ID } from '@/constants/chat'
type HeaderPageProps = { type HeaderPageProps = {
children?: ReactNode children?: ReactNode
@ -11,6 +16,28 @@ const HeaderPage = ({ children }: HeaderPageProps) => {
const { open, setLeftPanel } = useLeftPanel() const { open, setLeftPanel } = useLeftPanel()
const isMobile = useMobileScreen() const isMobile = useMobileScreen()
const isSmallScreen = useSmallScreen() const isSmallScreen = useSmallScreen()
const router = useRouter()
const currentPath = router.state.location.pathname
const isHomePage = currentPath === route.home
// Parse temporary chat flag from URL search params directly to avoid invariant errors
const searchString = window.location.search
const urlSearchParams = new URLSearchParams(searchString)
const isTemporaryChat = isHomePage && urlSearchParams.get(TEMPORARY_CHAT_QUERY_ID) === 'true'
const handleChatToggle = () => {
console.log('Chat toggle clicked!', { isTemporaryChat, isHomePage, currentPath })
if (isHomePage) {
if (isTemporaryChat) {
console.log('Switching to regular chat')
router.navigate({ to: route.home, search: {} })
} else {
console.log('Switching to temporary chat')
router.navigate({ to: route.home, search: { [TEMPORARY_CHAT_QUERY_ID]: true } })
}
}
}
return ( return (
<div <div
@ -50,6 +77,29 @@ const HeaderPage = ({ children }: HeaderPageProps) => {
)}> )}>
{children} {children}
</div> </div>
{/* Temporary Chat Toggle - Only show on home page if feature is enabled */}
{PlatformFeatures[PlatformFeature.TEMPORARY_CHAT] && isHomePage && (
<div className="ml-auto">
<button
className="size-8 cursor-pointer flex items-center justify-center rounded hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out relative z-20"
onClick={handleChatToggle}
title={isTemporaryChat ? 'Switch to Regular Chat' : 'Start Temporary Chat'}
>
{isTemporaryChat ? (
<IconMessageFilled
size={18}
className="text-main-view-fg"
/>
) : (
<IconMessage
size={18}
className="text-main-view-fg"
/>
)}
</button>
</div>
)}
</div> </div>
</div> </div>
) )

View File

@ -33,6 +33,7 @@ import {
} from '@/utils/reasoning' } from '@/utils/reasoning'
import { useAssistant } from './useAssistant' import { useAssistant } from './useAssistant'
import { useShallow } from 'zustand/shallow' import { useShallow } from 'zustand/shallow'
import { TEMPORARY_CHAT_QUERY_ID, TEMPORARY_CHAT_ID } from '@/constants/chat'
export const useChat = () => { export const useChat = () => {
const [ const [
@ -80,12 +81,21 @@ export const useChat = () => {
const getMessages = useMessages((state) => state.getMessages) const getMessages = useMessages((state) => state.getMessages)
const addMessage = useMessages((state) => state.addMessage) const addMessage = useMessages((state) => state.addMessage)
const setMessages = useMessages((state) => state.setMessages)
const setModelLoadError = useModelLoad((state) => state.setModelLoadError) const setModelLoadError = useModelLoad((state) => state.setModelLoadError)
const router = useRouter() const router = useRouter()
const getCurrentThread = useCallback(async () => { const getCurrentThread = useCallback(async () => {
let currentThread = retrieveThread() let currentThread = retrieveThread()
// Check if we're in temporary chat mode
const isTemporaryMode = window.location.search.includes(`${TEMPORARY_CHAT_QUERY_ID}=true`)
// Clear messages for existing temporary thread on reload to ensure fresh start
if (isTemporaryMode && currentThread?.id === TEMPORARY_CHAT_ID) {
setMessages(TEMPORARY_CHAT_ID, [])
}
if (!currentThread) { if (!currentThread) {
// Get prompt directly from store when needed // Get prompt directly from store when needed
const currentPrompt = usePrompt.getState().prompt const currentPrompt = usePrompt.getState().prompt
@ -93,14 +103,28 @@ export const useChat = () => {
const assistants = useAssistant.getState().assistants const assistants = useAssistant.getState().assistants
const selectedModel = useModelProvider.getState().selectedModel const selectedModel = useModelProvider.getState().selectedModel
const selectedProvider = useModelProvider.getState().selectedProvider const selectedProvider = useModelProvider.getState().selectedProvider
currentThread = await createThread( currentThread = await createThread(
{ {
id: selectedModel?.id ?? defaultModel(selectedProvider), id: selectedModel?.id ?? defaultModel(selectedProvider),
provider: selectedProvider, provider: selectedProvider,
}, },
currentPrompt, isTemporaryMode ? 'Temporary Chat' : currentPrompt,
assistants.find((a) => a.id === currentAssistant?.id) || assistants[0] assistants.find((a) => a.id === currentAssistant?.id) || assistants[0],
undefined, // no project metadata
isTemporaryMode // pass temporary flag
) )
// Clear messages for temporary chat to ensure fresh start on reload
if (isTemporaryMode && currentThread?.id === TEMPORARY_CHAT_ID) {
setMessages(TEMPORARY_CHAT_ID, [])
}
// Set flag for temporary chat navigation
if (currentThread.id === TEMPORARY_CHAT_ID) {
sessionStorage.setItem('temp-chat-nav', 'true')
}
router.navigate({ router.navigate({
to: route.threadsDetail, to: route.threadsDetail,
params: { threadId: currentThread.id }, params: { threadId: currentThread.id },

View File

@ -2,6 +2,7 @@ import { create } from 'zustand'
import { ulid } from 'ulidx' import { ulid } from 'ulidx'
import { getServiceHub } from '@/hooks/useServiceHub' import { getServiceHub } from '@/hooks/useServiceHub'
import { Fzf } from 'fzf' import { Fzf } from 'fzf'
import { TEMPORARY_CHAT_ID } from '@/constants/chat'
type ThreadState = { type ThreadState = {
threads: Record<string, Thread> threads: Record<string, Thread>
@ -21,7 +22,8 @@ type ThreadState = {
model: ThreadModel, model: ThreadModel,
title?: string, title?: string,
assistant?: Assistant, assistant?: Assistant,
projectMetadata?: { id: string; name: string; updated_at: number } projectMetadata?: { id: string; name: string; updated_at: number },
isTemporary?: boolean
) => Promise<Thread> ) => Promise<Thread>
updateCurrentThreadModel: (model: ThreadModel) => void updateCurrentThreadModel: (model: ThreadModel) => void
getFilteredThreads: (searchTerm: string) => Thread[] getFilteredThreads: (searchTerm: string) => Thread[]
@ -61,9 +63,12 @@ export const useThreads = create<ThreadState>()((set, get) => ({
}, },
{} as Record<string, Thread> {} as Record<string, Thread>
) )
// Filter out temporary chat for search index
const filteredForSearch = Object.values(threadMap).filter(t => t.id !== TEMPORARY_CHAT_ID)
set({ set({
threads: threadMap, threads: threadMap,
searchIndex: new Fzf<Thread[]>(Object.values(threadMap), { searchIndex: new Fzf<Thread[]>(filteredForSearch, {
selector: (item: Thread) => item.title, selector: (item: Thread) => item.title,
}), }),
}) })
@ -71,15 +76,18 @@ export const useThreads = create<ThreadState>()((set, get) => ({
getFilteredThreads: (searchTerm: string) => { getFilteredThreads: (searchTerm: string) => {
const { threads, searchIndex } = get() const { threads, searchIndex } = get()
// Filter out temporary chat from all operations
const filteredThreadsValues = Object.values(threads).filter(t => t.id !== TEMPORARY_CHAT_ID)
// If no search term, return all threads // If no search term, return all threads
if (!searchTerm) { if (!searchTerm) {
// return all threads // return all threads
return Object.values(threads) return filteredThreadsValues
} }
let currentIndex = searchIndex let currentIndex = searchIndex
if (!currentIndex?.find) { if (!currentIndex?.find) {
currentIndex = new Fzf<Thread[]>(Object.values(threads), { currentIndex = new Fzf<Thread[]>(filteredThreadsValues, {
selector: (item: Thread) => item.title, selector: (item: Thread) => item.title,
}) })
set({ searchIndex: currentIndex }) set({ searchIndex: currentIndex })
@ -125,7 +133,7 @@ export const useThreads = create<ThreadState>()((set, get) => ({
getServiceHub().threads().deleteThread(threadId) getServiceHub().threads().deleteThread(threadId)
return { return {
threads: remainingThreads, threads: remainingThreads,
searchIndex: new Fzf<Thread[]>(Object.values(remainingThreads), { searchIndex: new Fzf<Thread[]>(Object.values(remainingThreads).filter(t => t.id !== TEMPORARY_CHAT_ID), {
selector: (item: Thread) => item.title, selector: (item: Thread) => item.title,
}), }),
} }
@ -165,7 +173,7 @@ export const useThreads = create<ThreadState>()((set, get) => ({
return { return {
threads: remainingThreads, threads: remainingThreads,
searchIndex: new Fzf<Thread[]>(Object.values(remainingThreads), { searchIndex: new Fzf<Thread[]>(Object.values(remainingThreads).filter(t => t.id !== TEMPORARY_CHAT_ID), {
selector: (item: Thread) => item.title, selector: (item: Thread) => item.title,
}), }),
} }
@ -218,18 +226,24 @@ export const useThreads = create<ThreadState>()((set, get) => ({
setCurrentThreadId: (threadId) => { setCurrentThreadId: (threadId) => {
if (threadId !== get().currentThreadId) set({ currentThreadId: threadId }) if (threadId !== get().currentThreadId) set({ currentThreadId: threadId })
}, },
createThread: async (model, title, assistant, projectMetadata) => { createThread: async (model, title, assistant, projectMetadata, isTemporary) => {
const newThread: Thread = { const newThread: Thread = {
id: ulid(), id: isTemporary ? TEMPORARY_CHAT_ID : ulid(),
title: title ?? 'New Thread', title: title ?? (isTemporary ? 'Temporary Chat' : 'New Thread'),
model, model,
updated: Date.now() / 1000, updated: Date.now() / 1000,
assistants: assistant ? [assistant] : [], assistants: assistant ? [assistant] : [],
...(projectMetadata && { ...(projectMetadata && !isTemporary && {
metadata: { metadata: {
project: projectMetadata, project: projectMetadata,
}, },
}), }),
...(isTemporary && {
metadata: {
isTemporary: true,
...(projectMetadata && { project: projectMetadata }),
},
}),
} }
return await getServiceHub() return await getServiceHub()
.threads() .threads()
@ -307,7 +321,7 @@ export const useThreads = create<ThreadState>()((set, get) => ({
const newThreads = { ...state.threads, [threadId]: updatedThread } const newThreads = { ...state.threads, [threadId]: updatedThread }
return { return {
threads: newThreads, threads: newThreads,
searchIndex: new Fzf<Thread[]>(Object.values(newThreads), { searchIndex: new Fzf<Thread[]>(Object.values(newThreads).filter(t => t.id !== TEMPORARY_CHAT_ID), {
selector: (item: Thread) => item.title, selector: (item: Thread) => item.title,
}), }),
} }
@ -337,7 +351,7 @@ export const useThreads = create<ThreadState>()((set, get) => ({
return { return {
threads: updatedThreads, threads: updatedThreads,
searchIndex: new Fzf<Thread[]>(Object.values(updatedThreads), { searchIndex: new Fzf<Thread[]>(Object.values(updatedThreads).filter(t => t.id !== TEMPORARY_CHAT_ID), {
selector: (item: Thread) => item.title, selector: (item: Thread) => item.title,
}), }),
} }
@ -359,7 +373,7 @@ export const useThreads = create<ThreadState>()((set, get) => ({
const newThreads = { ...state.threads, [threadId]: updatedThread } const newThreads = { ...state.threads, [threadId]: updatedThread }
return { return {
threads: newThreads, threads: newThreads,
searchIndex: new Fzf<Thread[]>(Object.values(newThreads), { searchIndex: new Fzf<Thread[]>(Object.values(newThreads).filter(t => t.id !== TEMPORARY_CHAT_ID), {
selector: (item: Thread) => item.title, selector: (item: Thread) => item.title,
}), }),
} }

View File

@ -77,4 +77,7 @@ export const PlatformFeatures: Record<PlatformFeature, boolean> = {
// First message persisted thread - enabled for web only // First message persisted thread - enabled for web only
[PlatformFeature.FIRST_MESSAGE_PERSISTED_THREAD]: !isPlatformTauri(), [PlatformFeature.FIRST_MESSAGE_PERSISTED_THREAD]: !isPlatformTauri(),
// Temporary chat mode - enabled for web only
[PlatformFeature.TEMPORARY_CHAT]: !isPlatformTauri(),
} }

View File

@ -68,4 +68,7 @@ export enum PlatformFeature {
// First message persisted thread - web-only feature for storing first user message locally during thread creation // First message persisted thread - web-only feature for storing first user message locally during thread creation
FIRST_MESSAGE_PERSISTED_THREAD = 'firstMessagePersistedThread', FIRST_MESSAGE_PERSISTED_THREAD = 'firstMessagePersistedThread',
// Temporary chat mode - web-only feature for ephemeral conversations like ChatGPT
TEMPORARY_CHAT = 'temporaryChat',
} }

View File

@ -1,6 +1,8 @@
{ {
"welcome": "Hi, how are you?", "welcome": "Hi, how are you?",
"description": "How can I help you today?", "description": "How can I help you today?",
"temporaryChat": "Temporary Chat",
"temporaryChatDescription": "Start a temporary conversation that won't be saved to your chat history.",
"status": { "status": {
"empty": "No Chats Found" "empty": "No Chats Found"
}, },

View File

@ -124,6 +124,10 @@
"error": "Error", "error": "Error",
"success": "Success", "success": "Success",
"warning": "Warning", "warning": "Warning",
"conversationNotAvailable": "Conversation not available",
"conversationNotAvailableDescription": "The conversation you are trying to access is not available or has been deleted.",
"temporaryChat": "Temporary Chat",
"temporaryChatTooltip": "Temporary chat won't appear in your history",
"noResultsFoundDesc": "We couldn't find any chats matching your search. Try a different keyword.", "noResultsFoundDesc": "We couldn't find any chats matching your search. Try a different keyword.",
"searchModels": "Search models...", "searchModels": "Search models...",
"searchStyles": "Search styles...", "searchStyles": "Search styles...",

View File

@ -15,6 +15,7 @@ type SearchParams = {
id: string id: string
provider: string provider: string
} }
'temporary-chat'?: boolean
} }
import DropdownAssistant from '@/containers/DropdownAssistant' import DropdownAssistant from '@/containers/DropdownAssistant'
import { useEffect } from 'react' import { useEffect } from 'react'
@ -22,12 +23,22 @@ import { useThreads } from '@/hooks/useThreads'
import { useMobileScreen } from '@/hooks/useMediaQuery' import { useMobileScreen } from '@/hooks/useMediaQuery'
import { PlatformFeatures } from '@/lib/platform/const' import { PlatformFeatures } from '@/lib/platform/const'
import { PlatformFeature } from '@/lib/platform/types' import { PlatformFeature } from '@/lib/platform/types'
import { TEMPORARY_CHAT_QUERY_ID } from '@/constants/chat'
export const Route = createFileRoute(route.home as any)({ export const Route = createFileRoute(route.home as any)({
component: Index, component: Index,
validateSearch: (search: Record<string, unknown>): SearchParams => ({ validateSearch: (search: Record<string, unknown>): SearchParams => {
model: search.model as SearchParams['model'], const result: SearchParams = {
}), model: search.model as SearchParams['model'],
}
// Only include temporary-chat if it's explicitly true
if (search[TEMPORARY_CHAT_QUERY_ID] === 'true' || search[TEMPORARY_CHAT_QUERY_ID] === true) {
result['temporary-chat'] = true
}
return result
},
}) })
function Index() { function Index() {
@ -35,6 +46,7 @@ function Index() {
const { providers } = useModelProvider() const { providers } = useModelProvider()
const search = useSearch({ from: route.home as any }) const search = useSearch({ from: route.home as any })
const selectedModel = search.model const selectedModel = search.model
const isTemporaryChat = search['temporary-chat']
const { setCurrentThreadId } = useThreads() const { setCurrentThreadId } = useThreads()
const isMobile = useMobileScreen() const isMobile = useMobileScreen()
useTools() useTools()
@ -87,7 +99,7 @@ function Index() {
isMobile ? 'text-2xl sm:text-3xl' : 'text-4xl' isMobile ? 'text-2xl sm:text-3xl' : 'text-4xl'
)} )}
> >
{t('chat:welcome')} {isTemporaryChat ? t('chat:temporaryChat') : t('chat:welcome')}
</h1> </h1>
<p <p
className={cn( className={cn(
@ -96,7 +108,7 @@ function Index() {
isMobile ? 'text-base' : 'text-lg' isMobile ? 'text-base' : 'text-lg'
)} )}
> >
{t('chat:description')} {isTemporaryChat ? t('chat:temporaryChatDescription') : t('chat:description')}
</p> </p>
</div> </div>
<div className="flex-1 shrink-0"> <div className="flex-1 shrink-0">

View File

@ -1,7 +1,9 @@
import { useEffect, useMemo, useRef } from 'react' import { useEffect, useMemo, useRef } from 'react'
import { createFileRoute, useParams } from '@tanstack/react-router' import { createFileRoute, useParams, redirect, useNavigate } from '@tanstack/react-router'
import cloneDeep from 'lodash.clonedeep' import cloneDeep from 'lodash.clonedeep'
import { cn } from '@/lib/utils' import { cn } from '@/lib/utils'
import { toast } from 'sonner'
import { useTranslation } from '@/i18n/react-i18next-compat'
import HeaderPage from '@/containers/HeaderPage' import HeaderPage from '@/containers/HeaderPage'
import { useThreads } from '@/hooks/useThreads' import { useThreads } from '@/hooks/useThreads'
@ -22,16 +24,63 @@ import { PlatformFeatures } from '@/lib/platform/const'
import { PlatformFeature } from '@/lib/platform/types' import { PlatformFeature } from '@/lib/platform/types'
import ScrollToBottom from '@/containers/ScrollToBottom' import ScrollToBottom from '@/containers/ScrollToBottom'
import { PromptProgress } from '@/components/PromptProgress' import { PromptProgress } from '@/components/PromptProgress'
import { TEMPORARY_CHAT_ID, TEMPORARY_CHAT_QUERY_ID } from '@/constants/chat'
import { useThreadScrolling } from '@/hooks/useThreadScrolling' import { useThreadScrolling } from '@/hooks/useThreadScrolling'
import { IconInfoCircle } from '@tabler/icons-react'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
const CONVERSATION_NOT_FOUND_EVENT = 'conversation-not-found'
const TemporaryChatIndicator = ({ t }: { t: (key: string) => string }) => {
return (
<div className="flex items-center gap-1.5 px-3 py-1 rounded-md bg-main-view-fg/5 text-main-view-fg/70 text-sm">
<span>{t('common:temporaryChat')}</span>
<Tooltip>
<TooltipTrigger asChild>
<div className="relative z-20">
<IconInfoCircle
size={14}
className="text-main-view-fg/50 hover:text-main-view-fg/70 transition-colors cursor-pointer"
/>
</div>
</TooltipTrigger>
<TooltipContent className="z-[9999]">
<p>{t('common:temporaryChatTooltip')}</p>
</TooltipContent>
</Tooltip>
</div>
)
}
// as route.threadsDetail // as route.threadsDetail
export const Route = createFileRoute('/threads/$threadId')({ export const Route = createFileRoute('/threads/$threadId')({
beforeLoad: ({ params }) => {
// Check if this is the temporary chat being accessed directly
if (params.threadId === TEMPORARY_CHAT_ID) {
// Check if we have the navigation flag in sessionStorage
const hasNavigationFlag = sessionStorage.getItem('temp-chat-nav')
if (!hasNavigationFlag) {
// Direct access - redirect to home with query parameter
throw redirect({
to: '/',
search: { [TEMPORARY_CHAT_QUERY_ID]: true },
replace: true,
})
}
// Clear the flag immediately after checking
sessionStorage.removeItem('temp-chat-nav')
}
},
component: ThreadDetail, component: ThreadDetail,
}) })
function ThreadDetail() { function ThreadDetail() {
const serviceHub = useServiceHub() const serviceHub = useServiceHub()
const { threadId } = useParams({ from: Route.id }) const { threadId } = useParams({ from: Route.id })
const navigate = useNavigate()
const { t } = useTranslation()
const setCurrentThreadId = useThreads((state) => state.setCurrentThreadId) const setCurrentThreadId = useThreads((state) => state.setCurrentThreadId)
const setCurrentAssistant = useAssistant((state) => state.setCurrentAssistant) const setCurrentAssistant = useAssistant((state) => state.setCurrentAssistant)
const assistants = useAssistant((state) => state.assistants) const assistants = useAssistant((state) => state.assistants)
@ -52,9 +101,33 @@ function ThreadDetail() {
const thread = useThreads(useShallow((state) => state.threads[threadId])) const thread = useThreads(useShallow((state) => state.threads[threadId]))
const scrollContainerRef = useRef<HTMLDivElement>(null) const scrollContainerRef = useRef<HTMLDivElement>(null)
// Get padding height for ChatGPT-style message positioning // Get padding height for ChatGPT-style message positioning
const { paddingHeight } = useThreadScrolling(threadId, scrollContainerRef) const { paddingHeight } = useThreadScrolling(threadId, scrollContainerRef)
// Listen for conversation not found events
useEffect(() => {
const handleConversationNotFound = (event: CustomEvent) => {
const { threadId: notFoundThreadId } = event.detail
if (notFoundThreadId === threadId) {
// Skip error handling for temporary chat - it's expected to not exist on server
if (threadId === TEMPORARY_CHAT_ID) {
return
}
toast.error(t('common:conversationNotAvailable'), {
description: t('common:conversationNotAvailableDescription')
})
navigate({ to: '/', replace: true })
}
}
window.addEventListener(CONVERSATION_NOT_FOUND_EVENT, handleConversationNotFound as EventListener)
return () => {
window.removeEventListener(CONVERSATION_NOT_FOUND_EVENT, handleConversationNotFound as EventListener)
}
}, [threadId, navigate])
useEffect(() => { useEffect(() => {
setCurrentThreadId(threadId) setCurrentThreadId(threadId)
const assistant = assistants.find( const assistant = assistants.find(
@ -140,9 +213,15 @@ function ThreadDetail() {
<div className="flex flex-col h-[calc(100dvh-(env(safe-area-inset-bottom)+env(safe-area-inset-top)))]"> <div className="flex flex-col h-[calc(100dvh-(env(safe-area-inset-bottom)+env(safe-area-inset-top)))]">
<HeaderPage> <HeaderPage>
<div className="flex items-center justify-between w-full pr-2"> <div className="flex items-center justify-between w-full pr-2">
{PlatformFeatures[PlatformFeature.ASSISTANTS] && ( <div>
<DropdownAssistant /> {PlatformFeatures[PlatformFeature.ASSISTANTS] && (
)} <DropdownAssistant />
)}
</div>
<div className="flex-1 flex justify-center">
{threadId === TEMPORARY_CHAT_ID && <TemporaryChatIndicator t={t} />}
</div>
<div></div>
</div> </div>
</HeaderPage> </HeaderPage>
<div className="flex flex-col h-[calc(100%-40px)]"> <div className="flex flex-col h-[calc(100%-40px)]">

View File

@ -8,10 +8,16 @@ import {
ExtensionTypeEnum, ExtensionTypeEnum,
ThreadMessage, ThreadMessage,
} from '@janhq/core' } from '@janhq/core'
import { TEMPORARY_CHAT_ID } from '@/constants/chat'
import type { MessagesService } from './types' import type { MessagesService } from './types'
export class DefaultMessagesService implements MessagesService { export class DefaultMessagesService implements MessagesService {
async fetchMessages(threadId: string): Promise<ThreadMessage[]> { async fetchMessages(threadId: string): Promise<ThreadMessage[]> {
// Don't fetch messages from server for temporary chat - it's local only
if (threadId === TEMPORARY_CHAT_ID) {
return []
}
return ( return (
ExtensionManager.getInstance() ExtensionManager.getInstance()
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational) .get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
@ -21,6 +27,11 @@ export class DefaultMessagesService implements MessagesService {
} }
async createMessage(message: ThreadMessage): Promise<ThreadMessage> { async createMessage(message: ThreadMessage): Promise<ThreadMessage> {
// Don't create messages on server for temporary chat - it's local only
if (message.thread_id === TEMPORARY_CHAT_ID) {
return message
}
return ( return (
ExtensionManager.getInstance() ExtensionManager.getInstance()
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational) .get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
@ -30,6 +41,11 @@ export class DefaultMessagesService implements MessagesService {
} }
async deleteMessage(threadId: string, messageId: string): Promise<void> { async deleteMessage(threadId: string, messageId: string): Promise<void> {
// Don't delete messages on server for temporary chat - it's local only
if (threadId === TEMPORARY_CHAT_ID) {
return
}
await ExtensionManager.getInstance() await ExtensionManager.getInstance()
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational) .get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
?.deleteMessage(threadId, messageId) ?.deleteMessage(threadId, messageId)

View File

@ -6,6 +6,7 @@ import { defaultAssistant } from '@/hooks/useAssistant'
import { ExtensionManager } from '@/lib/extension' import { ExtensionManager } from '@/lib/extension'
import { ConversationalExtension, ExtensionTypeEnum } from '@janhq/core' import { ConversationalExtension, ExtensionTypeEnum } from '@janhq/core'
import type { ThreadsService } from './types' import type { ThreadsService } from './types'
import { TEMPORARY_CHAT_ID } from '@/constants/chat'
export class DefaultThreadsService implements ThreadsService { export class DefaultThreadsService implements ThreadsService {
async fetchThreads(): Promise<Thread[]> { async fetchThreads(): Promise<Thread[]> {
@ -16,7 +17,10 @@ export class DefaultThreadsService implements ThreadsService {
.then((threads) => { .then((threads) => {
if (!Array.isArray(threads)) return [] if (!Array.isArray(threads)) return []
return threads.map((e) => { // Filter out temporary threads from the list
const filteredThreads = threads.filter((e) => e.id !== TEMPORARY_CHAT_ID)
return filteredThreads.map((e) => {
return { return {
...e, ...e,
updated: updated:
@ -47,6 +51,11 @@ export class DefaultThreadsService implements ThreadsService {
} }
async createThread(thread: Thread): Promise<Thread> { async createThread(thread: Thread): Promise<Thread> {
// For temporary threads, bypass the conversational extension (in-memory only)
if (thread.id === TEMPORARY_CHAT_ID) {
return thread
}
return ( return (
ExtensionManager.getInstance() ExtensionManager.getInstance()
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational) .get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
@ -82,6 +91,11 @@ export class DefaultThreadsService implements ThreadsService {
} }
async updateThread(thread: Thread): Promise<void> { async updateThread(thread: Thread): Promise<void> {
// For temporary threads, skip updating via conversational extension
if (thread.id === TEMPORARY_CHAT_ID) {
return
}
await ExtensionManager.getInstance() await ExtensionManager.getInstance()
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational) .get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
?.modifyThread({ ?.modifyThread({
@ -118,6 +132,11 @@ export class DefaultThreadsService implements ThreadsService {
} }
async deleteThread(threadId: string): Promise<void> { async deleteThread(threadId: string): Promise<void> {
// For temporary threads, skip deleting via conversational extension
if (threadId === TEMPORARY_CHAT_ID) {
return
}
await ExtensionManager.getInstance() await ExtensionManager.getInstance()
.get<ConversationalExtension>(ExtensionTypeEnum.Conversational) .get<ConversationalExtension>(ExtensionTypeEnum.Conversational)
?.deleteThread(threadId) ?.deleteThread(threadId)