fix: token count error (#6680)

This commit is contained in:
Louis 2025-10-01 14:07:32 +07:00 committed by GitHub
parent 7a36ed238c
commit e0ab77cb24
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 50 additions and 30 deletions

View File

@ -3,6 +3,7 @@ import { ThreadMessage, ContentType } from '@janhq/core'
import { useServiceHub } from './useServiceHub'
import { useModelProvider } from './useModelProvider'
import { usePrompt } from './usePrompt'
import { removeReasoningContent } from '@/utils/reasoning'
export interface TokenCountData {
tokenCount: number
@ -69,7 +70,19 @@ export const useTokensCount = (
} as ThreadMessage)
}
}
return result
return result.map((e) => ({
...e,
content: e.content.map((c) => ({
...c,
text:
c.type === 'text'
? {
value: removeReasoningContent(c.text?.value ?? '.'),
annotations: [],
}
: c.text,
})),
}))
}, [messages, prompt, uploadedFiles])
// Debounced calculation that includes current prompt

View File

@ -2,6 +2,7 @@
import { ChatCompletionMessageParam } from 'token.js'
import { ChatCompletionMessageToolCall } from 'openai/resources'
import { ThreadMessage } from '@janhq/core'
import { removeReasoningContent } from '@/utils/reasoning'
/**
* @fileoverview Helper functions for creating chat completion request.
@ -24,7 +25,7 @@ export class CompletionMessagesBuilder {
if (msg.role === 'assistant') {
return {
role: msg.role,
content: this.normalizeContent(
content: removeReasoningContent(
msg.content[0]?.text?.value || '.'
),
} as ChatCompletionMessageParam
@ -135,7 +136,7 @@ export class CompletionMessagesBuilder {
) {
this.messages.push({
role: 'assistant',
content: this.normalizeContent(content),
content: removeReasoningContent(content),
refusal: refusal,
tool_calls: calls,
})
@ -202,30 +203,4 @@ export class CompletionMessagesBuilder {
return result
}
/**
* Normalize the content of a message by removing reasoning content.
* This is useful to ensure that reasoning content does not get sent to the model.
* @param content
* @returns
*/
private normalizeContent = (content: string): string => {
// Reasoning content should not be sent to the model
if (content.includes('<think>')) {
const match = content.match(/<think>([\s\S]*?)<\/think>/)
if (match?.index !== undefined) {
const splitIndex = match.index + match[0].length
content = content.slice(splitIndex).trim()
}
}
if (content.includes('<|channel|>analysis<|message|>')) {
const match = content.match(
/<\|channel\|>analysis<\|message\|>([\s\S]*?)<\|start\|>assistant<\|channel\|>final<\|message\|>/
)
if (match?.index !== undefined) {
const splitIndex = match.index + match[0].length
content = content.slice(splitIndex).trim()
}
}
return content
}
}

View File

@ -6,10 +6,42 @@ import {
} from '@janhq/core'
// Helper function to get reasoning content from an object
function getReasoning(obj: { reasoning_content?: string | null; reasoning?: string | null } | null | undefined): string | null {
function getReasoning(
obj:
| { reasoning_content?: string | null; reasoning?: string | null }
| null
| undefined
): string | null {
return obj?.reasoning_content ?? obj?.reasoning ?? null
}
/**
* Normalize the content of a message by removing reasoning content.
* This is useful to ensure that reasoning content does not get sent to the model.
* @param content
* @returns
*/
export function removeReasoningContent(content: string): string {
// Reasoning content should not be sent to the model
if (content.includes('<think>')) {
const match = content.match(/<think>([\s\S]*?)<\/think>/)
if (match?.index !== undefined) {
const splitIndex = match.index + match[0].length
content = content.slice(splitIndex).trim()
}
}
if (content.includes('<|channel|>analysis<|message|>')) {
const match = content.match(
/<\|channel\|>analysis<\|message\|>([\s\S]*?)<\|start\|>assistant<\|channel\|>final<\|message\|>/
)
if (match?.index !== undefined) {
const splitIndex = match.index + match[0].length
content = content.slice(splitIndex).trim()
}
}
return content
}
// Extract reasoning from a message (for completed responses)
export function extractReasoningFromMessage(
message: chatCompletionRequestMessage | ChatCompletionMessage