Merge pull request #5997 from menloresearch/release/v0.6.6

Sync Release/v0.6.6 into dev
This commit is contained in:
Louis 2025-07-31 10:25:09 +07:00 committed by GitHub
commit 25fa4901c2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 237 additions and 157 deletions

View File

@ -866,6 +866,7 @@ export default class llamacpp_extension extends AIEngine {
const files = await fs.readdirSync(currentDir)
for (const child of files) {
try {
const childPath = await joinPath([currentDir, child])
const stat = await fs.fileStat(childPath)
if (
@ -926,6 +927,9 @@ export default class llamacpp_extension extends AIEngine {
continue
}
}
} catch (error) {
console.error(`Error migrating model ${child}:`, error)
}
}
// otherwise, look into subdirectories
@ -1093,9 +1097,7 @@ export default class llamacpp_extension extends AIEngine {
attempts++
}
throw new Error(
'Failed to find an available port for the model to load'
)
throw new Error('Failed to find an available port for the model to load')
}
private async sleep(ms: number): Promise<void> {

View File

@ -42,6 +42,7 @@ import { toast } from 'sonner'
import { DownloadManagement } from '@/containers/DownloadManegement'
import { useSmallScreen } from '@/hooks/useMediaQuery'
import { useClickOutside } from '@/hooks/useClickOutside'
import { useDownloadStore } from '@/hooks/useDownloadStore'
const mainMenus = [
{
@ -171,6 +172,8 @@ const LeftPanel = () => {
}
}, [isSmallScreen, open])
const { downloads, localDownloadingModels } = useDownloadStore()
return (
<>
{/* Backdrop overlay for small screens */}
@ -253,7 +256,14 @@ const LeftPanel = () => {
</div>
<div className="flex flex-col justify-between overflow-hidden mt-0 !h-[calc(100%-42px)]">
<div className="flex flex-col !h-[calc(100%-140px)]">
<div
className={cn(
'flex flex-col',
Object.keys(downloads).length > 0 || localDownloadingModels.size > 0
? 'h-[calc(100%-200px)]'
: 'h-[calc(100%-140px)]'
)}
>
{IS_MACOS && (
<div
ref={searchContainerMacRef}
@ -486,9 +496,9 @@ const LeftPanel = () => {
</Link>
)
})}
</div>
<DownloadManagement />
</div>
</div>
</aside>
</>
)

View File

@ -52,7 +52,7 @@ export default function LoadModelErrorDialog() {
<div>
<DialogTitle>{t('common:error')}</DialogTitle>
<DialogDescription className="mt-1 text-main-view-fg/70">
Failed to load model
Something went wrong
</DialogDescription>
</div>
</div>

View File

@ -247,8 +247,7 @@ export const useChat = () => {
messages,
currentAssistant?.instructions
)
builder.addUserMessage(message)
if (troubleshooting) builder.addUserMessage(message)
let isCompleted = false

View File

@ -1,3 +1,4 @@
import { describe, it, expect } from 'vitest'
import { CompletionMessagesBuilder } from '../messages'
import { ThreadMessage } from '@janhq/core'
import { ChatCompletionMessageToolCall } from 'openai/resources'
@ -66,7 +67,10 @@ describe('CompletionMessagesBuilder', () => {
it('should normalize assistant message content', () => {
const messages: ThreadMessage[] = [
createMockThreadMessage('assistant', '<think>Let me think...</think>Hello there!'),
createMockThreadMessage(
'assistant',
'<think>Let me think...</think>Hello there!'
),
]
const builder = new CompletionMessagesBuilder(messages)
@ -78,14 +82,19 @@ describe('CompletionMessagesBuilder', () => {
it('should preserve user message content without normalization', () => {
const messages: ThreadMessage[] = [
createMockThreadMessage('user', '<think>This should not be normalized</think>Hello'),
createMockThreadMessage(
'user',
'<think>This should not be normalized</think>Hello'
),
]
const builder = new CompletionMessagesBuilder(messages)
const result = builder.getMessages()
expect(result).toHaveLength(1)
expect(result[0].content).toBe('<think>This should not be normalized</think>Hello')
expect(result[0].content).toBe(
'<think>This should not be normalized</think>Hello'
)
})
it('should handle messages with empty content', () => {
@ -104,7 +113,9 @@ describe('CompletionMessagesBuilder', () => {
it('should handle messages with missing text value', () => {
const message: ThreadMessage = {
...createMockThreadMessage('user', ''),
content: [{ type: 'text' as any, text: { value: '', annotations: [] } }],
content: [
{ type: 'text' as any, text: { value: '', annotations: [] } },
],
}
const builder = new CompletionMessagesBuilder([message])
@ -129,16 +140,15 @@ describe('CompletionMessagesBuilder', () => {
})
})
it('should add multiple user messages', () => {
it('should not add consecutive user messages', () => {
const builder = new CompletionMessagesBuilder([])
builder.addUserMessage('First message')
builder.addUserMessage('Second message')
const result = builder.getMessages()
expect(result).toHaveLength(2)
expect(result[0].content).toBe('First message')
expect(result[1].content).toBe('Second message')
expect(result).toHaveLength(1)
expect(result[0].content).toBe('Second message')
})
it('should handle empty user message', () => {
@ -171,7 +181,10 @@ describe('CompletionMessagesBuilder', () => {
it('should add assistant message with refusal', () => {
const builder = new CompletionMessagesBuilder([])
builder.addAssistantMessage('I cannot help with that', 'Content policy violation')
builder.addAssistantMessage(
'I cannot help with that',
'Content policy violation'
)
const result = builder.getMessages()
expect(result).toHaveLength(1)
@ -196,7 +209,11 @@ describe('CompletionMessagesBuilder', () => {
},
]
builder.addAssistantMessage('Let me check the weather', undefined, toolCalls)
builder.addAssistantMessage(
'Let me check the weather',
undefined,
toolCalls
)
const result = builder.getMessages()
expect(result).toHaveLength(1)
@ -282,19 +299,21 @@ describe('CompletionMessagesBuilder', () => {
const threadMessages: ThreadMessage[] = [
createMockThreadMessage('user', 'Hello'),
]
const builder = new CompletionMessagesBuilder(threadMessages, 'You are helpful')
const builder = new CompletionMessagesBuilder(
threadMessages,
'You are helpful'
)
builder.addUserMessage('How are you?')
builder.addAssistantMessage('I am well, thank you!')
builder.addToolMessage('Tool response', 'call_123')
const result = builder.getMessages()
expect(result).toHaveLength(5)
expect(result).toHaveLength(4)
expect(result[0].role).toBe('system')
expect(result[1].role).toBe('user')
expect(result[2].role).toBe('user')
expect(result[3].role).toBe('assistant')
expect(result[4].role).toBe('tool')
expect(result[2].role).toBe('assistant')
expect(result[3].role).toBe('tool')
})
it('should return the same array reference (not immutable)', () => {
@ -317,7 +336,9 @@ describe('CompletionMessagesBuilder', () => {
it('should remove thinking content from the beginning', () => {
const builder = new CompletionMessagesBuilder([])
builder.addAssistantMessage('<think>Let me analyze this...</think>The answer is 42.')
builder.addAssistantMessage(
'<think>Let me analyze this...</think>The answer is 42.'
)
const result = builder.getMessages()
expect(result[0].content).toBe('The answer is 42.')
@ -326,7 +347,9 @@ describe('CompletionMessagesBuilder', () => {
it('should handle nested thinking tags', () => {
const builder = new CompletionMessagesBuilder([])
builder.addAssistantMessage('<think>First thought<think>Nested</think>More thinking</think>Final answer')
builder.addAssistantMessage(
'<think>First thought<think>Nested</think>More thinking</think>Final answer'
)
const result = builder.getMessages()
expect(result[0].content).toBe('More thinking</think>Final answer')
@ -335,7 +358,9 @@ describe('CompletionMessagesBuilder', () => {
it('should handle multiple thinking blocks', () => {
const builder = new CompletionMessagesBuilder([])
builder.addAssistantMessage('<think>First</think>Answer<think>Second</think>More content')
builder.addAssistantMessage(
'<think>First</think>Answer<think>Second</think>More content'
)
const result = builder.getMessages()
expect(result[0].content).toBe('Answer<think>Second</think>More content')
@ -362,16 +387,22 @@ describe('CompletionMessagesBuilder', () => {
it('should handle unclosed thinking tags', () => {
const builder = new CompletionMessagesBuilder([])
builder.addAssistantMessage('<think>Unclosed thinking tag... Regular content')
builder.addAssistantMessage(
'<think>Unclosed thinking tag... Regular content'
)
const result = builder.getMessages()
expect(result[0].content).toBe('<think>Unclosed thinking tag... Regular content')
expect(result[0].content).toBe(
'<think>Unclosed thinking tag... Regular content'
)
})
it('should handle thinking tags with whitespace', () => {
const builder = new CompletionMessagesBuilder([])
builder.addAssistantMessage('<think> \n Some thinking \n </think> \n Clean answer')
builder.addAssistantMessage(
'<think> \n Some thinking \n </think> \n Clean answer'
)
const result = builder.getMessages()
expect(result[0].content).toBe('Clean answer')
@ -382,10 +413,16 @@ describe('CompletionMessagesBuilder', () => {
it('should handle complex conversation flow', () => {
const threadMessages: ThreadMessage[] = [
createMockThreadMessage('user', 'What is the weather like?'),
createMockThreadMessage('assistant', '<think>I need to call weather API</think>Let me check the weather for you.'),
createMockThreadMessage(
'assistant',
'<think>I need to call weather API</think>Let me check the weather for you.'
),
]
const builder = new CompletionMessagesBuilder(threadMessages, 'You are a weather assistant')
const builder = new CompletionMessagesBuilder(
threadMessages,
'You are a weather assistant'
)
// Add tool call and response
const toolCalls: ChatCompletionMessageToolCall[] = [
@ -399,9 +436,18 @@ describe('CompletionMessagesBuilder', () => {
},
]
builder.addAssistantMessage('Calling weather service...', undefined, toolCalls)
builder.addToolMessage('{"temperature": 72, "condition": "sunny"}', 'call_weather')
builder.addAssistantMessage('<think>The weather is nice</think>The weather is 72°F and sunny!')
builder.addAssistantMessage(
'Calling weather service...',
undefined,
toolCalls
)
builder.addToolMessage(
'{"temperature": 72, "condition": "sunny"}',
'call_weather'
)
builder.addAssistantMessage(
'<think>The weather is nice</think>The weather is 72°F and sunny!'
)
const result = builder.getMessages()

View File

@ -26,7 +26,7 @@ export class CompletionMessagesBuilder {
content:
msg.role === 'assistant'
? this.normalizeContent(msg.content[0]?.text?.value || '.')
: (msg.content[0]?.text?.value || '.'),
: msg.content[0]?.text?.value || '.',
}) as ChatCompletionMessageParam
)
)
@ -37,6 +37,10 @@ export class CompletionMessagesBuilder {
* @param content - The content of the user message.
*/
addUserMessage(content: string) {
// Ensure no consecutive user messages
if (this.messages[this.messages.length - 1]?.role === 'user') {
this.messages.pop()
}
this.messages.push({
role: 'user',
content: content,

View File

@ -233,7 +233,6 @@ function MCPServers() {
return () => clearInterval(intervalId)
}, [setConnectedServers])
return (
<div className="flex flex-col h-full">
<HeaderPage>
@ -354,7 +353,9 @@ function MCPServers() {
<div
className="size-6 cursor-pointer flex items-center justify-center rounded hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out"
onClick={() => handleOpenJsonEditor(key)}
title={t('mcp-servers:editJson')}
title={t('mcp-servers:editJson.title', {
serverName: key,
})}
>
<IconCodeCircle
size={18}
@ -374,7 +375,7 @@ function MCPServers() {
<div
className="size-6 cursor-pointer flex items-center justify-center rounded hover:bg-main-view-fg/10 transition-all duration-200 ease-in-out"
onClick={() => handleDeleteClick(key)}
title={t('mcp-servers:deleteServer')}
title={t('mcp-servers:deleteServer.title')}
>
<IconTrash
size={18}

View File

@ -39,7 +39,7 @@ function ThreadDetail() {
const lastScrollTopRef = useRef(0)
const { currentThreadId, setCurrentThreadId } = useThreads()
const { setCurrentAssistant, assistants } = useAssistant()
const { setMessages } = useMessages()
const { setMessages, deleteMessage } = useMessages()
const { streamingContent } = useAppState()
const { appMainViewBgColor, chatWidth } = useAppearance()
const { sendMessage } = useChat()
@ -221,8 +221,23 @@ function ThreadDetail() {
// used when there is a sent/added user message and no assistant message (error or manual deletion)
const generateAIResponse = () => {
const latestUserMessage = messages[messages.length - 1]
if (latestUserMessage?.content?.[0]?.text?.value) {
if (
latestUserMessage?.content?.[0]?.text?.value &&
latestUserMessage.role === 'user'
) {
sendMessage(latestUserMessage.content[0].text.value, false)
} else if (latestUserMessage?.metadata?.tool_calls) {
// Only regenerate assistant message is allowed
const threadMessages = [...messages]
let toSendMessage = threadMessages.pop()
while (toSendMessage && toSendMessage?.role !== 'user') {
deleteMessage(toSendMessage.thread_id, toSendMessage.id ?? '')
toSendMessage = threadMessages.pop()
}
if (toSendMessage) {
deleteMessage(toSendMessage.thread_id, toSendMessage.id ?? '')
sendMessage(toSendMessage.content?.[0]?.text?.value || '')
}
}
}
@ -232,7 +247,10 @@ function ThreadDetail() {
const showScrollToBottomBtn = !isAtBottom && hasScrollbar
const showGenerateAIResponseBtn =
messages[messages.length - 1]?.role === 'user' && !streamingContent
(messages[messages.length - 1]?.role === 'user' ||
(messages[messages.length - 1]?.metadata &&
'tool_calls' in (messages[messages.length - 1].metadata ?? {}))) &&
!streamingContent
return (
<div className="flex flex-col h-full">