ui ux enhancement
This commit is contained in:
parent
6dd2d2d6c1
commit
340042682a
@ -25,6 +25,6 @@ export { MCPExtension } from './mcp'
|
||||
export * from './engines'
|
||||
|
||||
export { RAGExtension, RAG_INTERNAL_SERVER } from './rag'
|
||||
export type { AttachmentInput } from './rag'
|
||||
export type { AttachmentInput, IngestAttachmentsResult } from './rag'
|
||||
export { VectorDBExtension } from './vector-db'
|
||||
export type { SearchMode, VectorDBStatus, VectorChunkInput, VectorSearchResult, AttachmentFileInfo } from './vector-db'
|
||||
export type { SearchMode, VectorDBStatus, VectorChunkInput, VectorSearchResult, AttachmentFileInfo, VectorDBFileInput, VectorDBIngestOptions } from './vector-db'
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { BaseExtension, ExtensionTypeEnum } from '../extension'
|
||||
import type { MCPTool, MCPToolCallResult } from '../../types'
|
||||
import type { AttachmentFileInfo } from './vector-db'
|
||||
|
||||
export interface AttachmentInput {
|
||||
path: string
|
||||
@ -8,6 +9,12 @@ export interface AttachmentInput {
|
||||
size?: number
|
||||
}
|
||||
|
||||
export interface IngestAttachmentsResult {
|
||||
filesProcessed: number
|
||||
chunksInserted: number
|
||||
files: AttachmentFileInfo[]
|
||||
}
|
||||
|
||||
export const RAG_INTERNAL_SERVER = 'rag-internal'
|
||||
|
||||
/**
|
||||
@ -25,8 +32,5 @@ export abstract class RAGExtension extends BaseExtension {
|
||||
abstract getToolNames(): Promise<string[]>
|
||||
abstract callTool(toolName: string, args: Record<string, unknown>): Promise<MCPToolCallResult>
|
||||
|
||||
abstract ingestAttachments(threadId: string, files: AttachmentInput[]): Promise<{
|
||||
filesProcessed: number
|
||||
chunksInserted: number
|
||||
} | void>
|
||||
abstract ingestAttachments(threadId: string, files: AttachmentInput[]): Promise<IngestAttachmentsResult>
|
||||
}
|
||||
|
||||
@ -7,10 +7,8 @@ export interface VectorDBStatus {
|
||||
}
|
||||
|
||||
export interface VectorChunkInput {
|
||||
id?: string
|
||||
text: string
|
||||
embedding: number[]
|
||||
metadata?: Record<string, any>
|
||||
}
|
||||
|
||||
export interface VectorSearchResult {
|
||||
@ -30,6 +28,19 @@ export interface AttachmentFileInfo {
|
||||
chunk_count: number
|
||||
}
|
||||
|
||||
// High-level input types for file ingestion
|
||||
export interface VectorDBFileInput {
|
||||
path: string
|
||||
name?: string
|
||||
type?: string
|
||||
size?: number
|
||||
}
|
||||
|
||||
export interface VectorDBIngestOptions {
|
||||
chunkSize: number
|
||||
chunkOverlap: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Vector DB extension base: abstraction over local vector storage and search.
|
||||
*/
|
||||
@ -39,22 +50,31 @@ export abstract class VectorDBExtension extends BaseExtension {
|
||||
}
|
||||
|
||||
abstract getStatus(): Promise<VectorDBStatus>
|
||||
abstract createCollection(name: string, dimension: number): Promise<void>
|
||||
abstract insertChunks(collection: string, chunks: VectorChunkInput[]): Promise<void>
|
||||
abstract createCollection(threadId: string, dimension: number): Promise<void>
|
||||
abstract insertChunks(
|
||||
threadId: string,
|
||||
fileId: string,
|
||||
chunks: VectorChunkInput[]
|
||||
): Promise<void>
|
||||
abstract ingestFile(
|
||||
threadId: string,
|
||||
file: VectorDBFileInput,
|
||||
opts: VectorDBIngestOptions
|
||||
): Promise<AttachmentFileInfo>
|
||||
abstract searchCollection(
|
||||
collection: string,
|
||||
threadId: string,
|
||||
query_embedding: number[],
|
||||
limit: number,
|
||||
threshold: number,
|
||||
mode?: SearchMode,
|
||||
fileIds?: string[]
|
||||
): Promise<VectorSearchResult[]>
|
||||
abstract deleteChunks(collection: string, ids: string[]): Promise<void>
|
||||
abstract deleteCollection(collection: string): Promise<void>
|
||||
abstract chunkText(text: string, chunkSize: number, chunkOverlap: number): Promise<string[]>
|
||||
abstract listAttachments(collection: string, limit?: number): Promise<AttachmentFileInfo[]>
|
||||
abstract deleteChunks(threadId: string, ids: string[]): Promise<void>
|
||||
abstract deleteFile(threadId: string, fileId: string): Promise<void>
|
||||
abstract deleteCollection(threadId: string): Promise<void>
|
||||
abstract listAttachments(threadId: string, limit?: number): Promise<AttachmentFileInfo[]>
|
||||
abstract getChunks(
|
||||
collection: string,
|
||||
threadId: string,
|
||||
fileId: string,
|
||||
startOrder: number,
|
||||
endOrder: number
|
||||
|
||||
@ -1,7 +1,5 @@
|
||||
import { RAGExtension, MCPTool, MCPToolCallResult, ExtensionTypeEnum, VectorDBExtension, type AttachmentInput, type SettingComponentProps, AIEngine } from '@janhq/core'
|
||||
import { RAGExtension, MCPTool, MCPToolCallResult, ExtensionTypeEnum, VectorDBExtension, type AttachmentInput, type SettingComponentProps, AIEngine, type AttachmentFileInfo } from '@janhq/core'
|
||||
import './env.d'
|
||||
import * as ragApi from '@janhq/tauri-plugin-rag-api'
|
||||
import * as vecdbApi from '@janhq/tauri-plugin-vector-db-api'
|
||||
import { getRAGTools, RETRIEVE, LIST_ATTACHMENTS, GET_CHUNKS } from './tools'
|
||||
|
||||
export default class RagExtension extends RAGExtension {
|
||||
@ -75,11 +73,10 @@ export default class RagExtension extends RAGExtension {
|
||||
}
|
||||
try {
|
||||
const vec = window.core?.extensionManager.get(ExtensionTypeEnum.VectorDB) as unknown as VectorDBExtension
|
||||
const collection = `attachments_${threadId}`
|
||||
if (!vec?.listAttachments) {
|
||||
return { error: 'Vector DB extension missing listAttachments', content: [{ type: 'text', text: 'Vector DB extension missing listAttachments' }] }
|
||||
}
|
||||
const files = await vec.listAttachments(collection)
|
||||
const files = await vec.listAttachments(threadId)
|
||||
return {
|
||||
error: '',
|
||||
content: [
|
||||
@ -143,9 +140,8 @@ export default class RagExtension extends RAGExtension {
|
||||
}
|
||||
}
|
||||
|
||||
const collection = `attachments_${threadId}`
|
||||
const results = await vec.searchCollection(
|
||||
collection,
|
||||
threadId,
|
||||
queryEmb,
|
||||
topK,
|
||||
threshold,
|
||||
@ -163,7 +159,6 @@ export default class RagExtension extends RAGExtension {
|
||||
file_id: r.file_id,
|
||||
chunk_file_order: r.chunk_file_order
|
||||
})) ?? [],
|
||||
collection,
|
||||
mode,
|
||||
}
|
||||
return { error: '', content: [{ type: 'text', text: JSON.stringify(payload) }] }
|
||||
@ -203,8 +198,7 @@ export default class RagExtension extends RAGExtension {
|
||||
}
|
||||
}
|
||||
|
||||
const collection = `attachments_${threadId}`
|
||||
const chunks = await vec.getChunks(collection, fileId, startOrder, endOrder)
|
||||
const chunks = await vec.getChunks(threadId, fileId, startOrder, endOrder)
|
||||
|
||||
const payload = {
|
||||
thread_id: threadId,
|
||||
@ -222,8 +216,15 @@ export default class RagExtension extends RAGExtension {
|
||||
async ingestAttachments(
|
||||
threadId: string,
|
||||
files: AttachmentInput[]
|
||||
): Promise<{ filesProcessed: number; chunksInserted: number } | void> {
|
||||
if (!threadId || !Array.isArray(files) || files.length === 0) return
|
||||
): Promise<{ filesProcessed: number; chunksInserted: number; files: AttachmentFileInfo[] }> {
|
||||
if (!threadId || !Array.isArray(files) || files.length === 0) {
|
||||
return { filesProcessed: 0, chunksInserted: 0, files: [] }
|
||||
}
|
||||
|
||||
// Respect feature flag: do nothing when disabled
|
||||
if (this.config.enabled === false) {
|
||||
return { filesProcessed: 0, chunksInserted: 0, files: [] }
|
||||
}
|
||||
|
||||
const vec = window.core?.extensionManager.get(ExtensionTypeEnum.VectorDB) as unknown as VectorDBExtension
|
||||
if (!vec?.createCollection || !vec?.insertChunks) {
|
||||
@ -237,41 +238,32 @@ export default class RagExtension extends RAGExtension {
|
||||
const chunkOverlap = s?.overlapTokens as number | undefined
|
||||
|
||||
let totalChunks = 0
|
||||
let processed = 0
|
||||
const collection = `attachments_${threadId}`
|
||||
let created = false
|
||||
const processedFiles: AttachmentFileInfo[] = []
|
||||
|
||||
for (const f of files) {
|
||||
if (!f?.path) continue
|
||||
if (maxSize && f.size && f.size > maxSize * 1024 * 1024) continue
|
||||
if (maxSize && f.size && f.size > maxSize * 1024 * 1024) {
|
||||
throw new Error(`File '${f.name}' exceeds size limit (${f.size} bytes > ${maxSize} MB).`)
|
||||
}
|
||||
|
||||
const type = f.type || 'application/octet-stream'
|
||||
const chunks = await this.parseAndEmbed(f.path, type, {
|
||||
chunkSize: chunkSize ?? 512,
|
||||
chunkOverlap: chunkOverlap ?? 64,
|
||||
meta: { file: { name: f.name || f.path.split(/[\\/]/).pop(), path: f.path, type: f.type, size: f.size }, threadId },
|
||||
})
|
||||
|
||||
if (!chunks?.length) {
|
||||
processed += 1
|
||||
const fileName = f.name || f.path.split(/[\\/]/).pop()
|
||||
// Preferred/required path: let Vector DB extension handle full file ingestion
|
||||
const canIngestFile = typeof (vec as any)?.ingestFile === 'function'
|
||||
if (!canIngestFile) {
|
||||
console.error('[RAG] Vector DB extension missing ingestFile; cannot ingest document')
|
||||
continue
|
||||
}
|
||||
|
||||
// Ensure collection
|
||||
if (!created) {
|
||||
await vec.createCollection(collection, chunks[0].embedding.length)
|
||||
created = true
|
||||
}
|
||||
|
||||
await vec.insertChunks(
|
||||
collection,
|
||||
chunks.map((c) => ({ text: c.text, embedding: c.embedding, metadata: c.metadata }))
|
||||
const info = await (vec as VectorDBExtension).ingestFile(
|
||||
threadId,
|
||||
{ path: f.path, name: fileName, type: f.type, size: f.size },
|
||||
{ chunkSize: chunkSize ?? 512, chunkOverlap: chunkOverlap ?? 64 }
|
||||
)
|
||||
totalChunks += chunks.length
|
||||
processed += 1
|
||||
totalChunks += Number(info?.chunk_count || 0)
|
||||
processedFiles.push(info)
|
||||
}
|
||||
|
||||
return { filesProcessed: processed, chunksInserted: totalChunks }
|
||||
// Return files we ingested with real IDs directly from ingestFile
|
||||
return { filesProcessed: processedFiles.length, chunksInserted: totalChunks, files: processedFiles }
|
||||
}
|
||||
|
||||
onSettingUpdate<T>(key: string, value: T): void {
|
||||
@ -310,19 +302,4 @@ export default class RagExtension extends RAGExtension {
|
||||
for (const item of data) out[item.index] = item.embedding
|
||||
return out
|
||||
}
|
||||
|
||||
private async parseAndEmbed(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
opts: { chunkSize: number; chunkOverlap: number; meta?: Record<string, any> }
|
||||
): Promise<Array<{ text: string; embedding: number[]; metadata?: Record<string, any> }>> {
|
||||
const text = await ragApi.parseDocument(filePath, fileType)
|
||||
const chunks = await vecdbApi.chunkText(text, opts.chunkSize, opts.chunkOverlap)
|
||||
const embeddings = await this.embedTexts(chunks)
|
||||
return chunks.map((text, i) => ({
|
||||
text,
|
||||
embedding: embeddings[i],
|
||||
metadata: opts.meta,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@ -19,6 +19,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@janhq/core": "../../core/package.tgz",
|
||||
"@janhq/tauri-plugin-rag-api": "link:../../src-tauri/plugins/tauri-plugin-rag",
|
||||
"@janhq/tauri-plugin-vector-db-api": "link:../../src-tauri/plugins/tauri-plugin-vector-db"
|
||||
},
|
||||
"files": [
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { VectorDBExtension, type SearchMode, type VectorDBStatus, type VectorChunkInput, type VectorSearchResult, type AttachmentFileInfo } from '@janhq/core'
|
||||
import { VectorDBExtension, type SearchMode, type VectorDBStatus, type VectorChunkInput, type VectorSearchResult, type AttachmentFileInfo, type VectorDBFileInput, type VectorDBIngestOptions, AIEngine } from '@janhq/core'
|
||||
import * as vecdb from '@janhq/tauri-plugin-vector-db-api'
|
||||
import * as ragApi from '@janhq/tauri-plugin-rag-api'
|
||||
|
||||
export default class VectorDBExt extends VectorDBExtension {
|
||||
async onLoad(): Promise<void> {
|
||||
@ -12,47 +13,95 @@ export default class VectorDBExt extends VectorDBExtension {
|
||||
return await vecdb.getStatus() as VectorDBStatus
|
||||
}
|
||||
|
||||
async createCollection(name: string, dimension: number): Promise<void> {
|
||||
return await vecdb.createCollection(name, dimension)
|
||||
private collectionForThread(threadId: string): string {
|
||||
return `attachments_${threadId}`
|
||||
}
|
||||
|
||||
async insertChunks(collection: string, chunks: VectorChunkInput[]): Promise<void> {
|
||||
return await vecdb.insertChunks(collection, chunks)
|
||||
async createCollection(threadId: string, dimension: number): Promise<void> {
|
||||
return await vecdb.createCollection(this.collectionForThread(threadId), dimension)
|
||||
}
|
||||
|
||||
async insertChunks(threadId: string, fileId: string, chunks: VectorChunkInput[]): Promise<void> {
|
||||
return await vecdb.insertChunks(this.collectionForThread(threadId), fileId, chunks)
|
||||
}
|
||||
|
||||
async searchCollection(
|
||||
collection: string,
|
||||
threadId: string,
|
||||
query_embedding: number[],
|
||||
limit: number,
|
||||
threshold: number,
|
||||
mode?: SearchMode,
|
||||
fileIds?: string[]
|
||||
): Promise<VectorSearchResult[]> {
|
||||
return await vecdb.searchCollection(collection, query_embedding, limit, threshold, mode, fileIds) as VectorSearchResult[]
|
||||
return await vecdb.searchCollection(this.collectionForThread(threadId), query_embedding, limit, threshold, mode, fileIds) as VectorSearchResult[]
|
||||
}
|
||||
|
||||
async deleteChunks(collection: string, ids: string[]): Promise<void> {
|
||||
return await vecdb.deleteChunks(collection, ids)
|
||||
async deleteChunks(threadId: string, ids: string[]): Promise<void> {
|
||||
return await vecdb.deleteChunks(this.collectionForThread(threadId), ids)
|
||||
}
|
||||
|
||||
async deleteCollection(collection: string): Promise<void> {
|
||||
return await vecdb.deleteCollection(collection)
|
||||
async deleteCollection(threadId: string): Promise<void> {
|
||||
return await vecdb.deleteCollection(this.collectionForThread(threadId))
|
||||
}
|
||||
|
||||
async chunkText(text: string, chunkSize: number, chunkOverlap: number): Promise<string[]> {
|
||||
// Optional helper for chunking
|
||||
private async chunkText(text: string, chunkSize: number, chunkOverlap: number): Promise<string[]> {
|
||||
return await vecdb.chunkText(text, chunkSize, chunkOverlap)
|
||||
}
|
||||
|
||||
async listAttachments(collection: string, limit?: number): Promise<AttachmentFileInfo[]> {
|
||||
return await vecdb.listAttachments(collection, limit) as AttachmentFileInfo[]
|
||||
private async embedTexts(texts: string[]): Promise<number[][]> {
|
||||
const llm = window.core?.extensionManager.getByName('@janhq/llamacpp-extension') as AIEngine & { embed?: (texts: string[]) => Promise<{ data: Array<{ embedding: number[]; index: number }> }> }
|
||||
if (!llm?.embed) throw new Error('llamacpp extension not available')
|
||||
const res = await llm.embed(texts)
|
||||
const data: Array<{ embedding: number[]; index: number }> = res?.data || []
|
||||
const out: number[][] = new Array(texts.length)
|
||||
for (const item of data) out[item.index] = item.embedding
|
||||
return out
|
||||
}
|
||||
|
||||
async ingestFile(threadId: string, file: VectorDBFileInput, opts: VectorDBIngestOptions): Promise<AttachmentFileInfo> {
|
||||
// Check for duplicate file (same name + path)
|
||||
const existingFiles = await vecdb.listAttachments(this.collectionForThread(threadId)).catch(() => [])
|
||||
const duplicate = existingFiles.find((f: any) => f.name === file.name && f.path === file.path)
|
||||
if (duplicate) {
|
||||
throw new Error(`File '${file.name}' has already been attached to this thread`)
|
||||
}
|
||||
|
||||
const text = await ragApi.parseDocument(file.path, file.type || 'application/octet-stream')
|
||||
const chunks = await this.chunkText(text, opts.chunkSize, opts.chunkOverlap)
|
||||
if (!chunks.length) {
|
||||
const fi = await vecdb.createFile(this.collectionForThread(threadId), file)
|
||||
return fi
|
||||
}
|
||||
const embeddings = await this.embedTexts(chunks)
|
||||
const dimension = embeddings[0]?.length || 0
|
||||
if (dimension <= 0) throw new Error('Embedding dimension not available')
|
||||
await this.createCollection(threadId, dimension)
|
||||
const fi = await vecdb.createFile(this.collectionForThread(threadId), file)
|
||||
await vecdb.insertChunks(
|
||||
this.collectionForThread(threadId),
|
||||
fi.id,
|
||||
chunks.map((t, i) => ({ text: t, embedding: embeddings[i] }))
|
||||
)
|
||||
const infos = await vecdb.listAttachments(this.collectionForThread(threadId))
|
||||
const updated = infos.find((e) => e.id === fi.id)
|
||||
return updated || { ...fi, chunk_count: chunks.length }
|
||||
}
|
||||
|
||||
async listAttachments(threadId: string, limit?: number): Promise<AttachmentFileInfo[]> {
|
||||
return await vecdb.listAttachments(this.collectionForThread(threadId), limit) as AttachmentFileInfo[]
|
||||
}
|
||||
|
||||
async getChunks(
|
||||
collection: string,
|
||||
threadId: string,
|
||||
fileId: string,
|
||||
startOrder: number,
|
||||
endOrder: number
|
||||
): Promise<VectorSearchResult[]> {
|
||||
return await vecdb.getChunks(collection, fileId, startOrder, endOrder) as VectorSearchResult[]
|
||||
return await vecdb.getChunks(this.collectionForThread(threadId), fileId, startOrder, endOrder) as VectorSearchResult[]
|
||||
}
|
||||
|
||||
async deleteFile(threadId: string, fileId: string): Promise<void> {
|
||||
return await vecdb.deleteFile(this.collectionForThread(threadId), fileId)
|
||||
}
|
||||
}
|
||||
|
||||
1513
src-tauri/Cargo.lock
generated
1513
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -63,7 +63,9 @@ log = "0.4"
|
||||
rmcp = { version = "0.6.0", features = [
|
||||
"client",
|
||||
"transport-sse-client",
|
||||
"transport-sse-client-reqwest",
|
||||
"transport-streamable-http-client",
|
||||
"transport-streamable-http-client-reqwest",
|
||||
"transport-child-process",
|
||||
"tower",
|
||||
"reqwest",
|
||||
|
||||
@ -13,8 +13,24 @@ use std::borrow::Cow;
|
||||
|
||||
pub fn parse_pdf(file_path: &str) -> Result<String, RagError> {
|
||||
let bytes = fs::read(file_path)?;
|
||||
pdf_extract::extract_text_from_mem(&bytes)
|
||||
.map_err(|e| RagError::ParseError(format!("PDF parse error: {}", e)))
|
||||
let text = pdf_extract::extract_text_from_mem(&bytes)
|
||||
.map_err(|e| RagError::ParseError(format!("PDF parse error: {}", e)))?;
|
||||
|
||||
// Validate that the PDF has extractable text (not image-based/scanned)
|
||||
// Count meaningful characters (excluding whitespace)
|
||||
let meaningful_chars = text.chars()
|
||||
.filter(|c| !c.is_whitespace())
|
||||
.count();
|
||||
|
||||
// Require at least 50 non-whitespace characters to consider it a text PDF
|
||||
// This threshold filters out PDFs that are purely images or scanned documents
|
||||
if meaningful_chars < 50 {
|
||||
return Err(RagError::ParseError(
|
||||
"PDF appears to be image-based or scanned. OCR is not supported yet. Please use a text-based PDF.".to_string()
|
||||
));
|
||||
}
|
||||
|
||||
Ok(text)
|
||||
}
|
||||
|
||||
pub fn parse_text(file_path: &str) -> Result<String, RagError> {
|
||||
|
||||
@ -1,12 +1,16 @@
|
||||
fn main() {
|
||||
tauri_plugin::Builder::new(&[
|
||||
"create_collection",
|
||||
"create_file",
|
||||
"insert_chunks",
|
||||
"search_collection",
|
||||
"delete_chunks",
|
||||
"delete_file",
|
||||
"delete_collection",
|
||||
"chunk_text",
|
||||
"get_status",
|
||||
"list_attachments",
|
||||
"get_chunks",
|
||||
])
|
||||
.build();
|
||||
}
|
||||
|
||||
@ -2,13 +2,6 @@ import { invoke } from '@tauri-apps/api/core'
|
||||
|
||||
export type SearchMode = 'auto' | 'ann' | 'linear'
|
||||
|
||||
export interface ChunkInput {
|
||||
id?: string
|
||||
text: string
|
||||
embedding: number[]
|
||||
metadata?: Record<string, any>
|
||||
}
|
||||
|
||||
export interface SearchResult {
|
||||
id: string
|
||||
text: string
|
||||
@ -42,11 +35,26 @@ export async function createCollection(name: string, dimension: number): Promise
|
||||
return await invoke('plugin:vector-db|create_collection', { name, dimension })
|
||||
}
|
||||
|
||||
export async function createFile(
|
||||
collection: string,
|
||||
file: { path: string; name?: string; type?: string; size?: number }
|
||||
): Promise<AttachmentFileInfo> {
|
||||
return await invoke('plugin:vector-db|create_file', { collection, file })
|
||||
}
|
||||
|
||||
export async function insertChunks(
|
||||
collection: string,
|
||||
chunks: ChunkInput[]
|
||||
fileId: string,
|
||||
chunks: Array<{ text: string; embedding: number[] }>
|
||||
): Promise<void> {
|
||||
return await invoke('plugin:vector-db|insert_chunks', { collection, chunks })
|
||||
return await invoke('plugin:vector-db|insert_chunks', { collection, fileId, chunks })
|
||||
}
|
||||
|
||||
export async function deleteFile(
|
||||
collection: string,
|
||||
fileId: string
|
||||
): Promise<void> {
|
||||
return await invoke('plugin:vector-db|delete_file', { collection, fileId })
|
||||
}
|
||||
|
||||
export async function searchCollection(
|
||||
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-create-file"
|
||||
description = "Enables the create_file command without any pre-configured scope."
|
||||
commands.allow = ["create_file"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-create-file"
|
||||
description = "Denies the create_file command without any pre-configured scope."
|
||||
commands.deny = ["create_file"]
|
||||
@ -0,0 +1,13 @@
|
||||
# Automatically generated - DO NOT EDIT!
|
||||
|
||||
"$schema" = "../../schemas/schema.json"
|
||||
|
||||
[[permission]]
|
||||
identifier = "allow-delete-file"
|
||||
description = "Enables the delete_file command without any pre-configured scope."
|
||||
commands.allow = ["delete_file"]
|
||||
|
||||
[[permission]]
|
||||
identifier = "deny-delete-file"
|
||||
description = "Denies the delete_file command without any pre-configured scope."
|
||||
commands.deny = ["delete_file"]
|
||||
@ -11,4 +11,3 @@ commands.allow = ["list_attachments"]
|
||||
identifier = "deny-list-attachments"
|
||||
description = "Denies the list_attachments command without any pre-configured scope."
|
||||
commands.deny = ["list_attachments"]
|
||||
|
||||
|
||||
@ -7,8 +7,10 @@ Default permissions for the vector-db plugin
|
||||
- `allow-get-status`
|
||||
- `allow-create-collection`
|
||||
- `allow-insert-chunks`
|
||||
- `allow-create-file`
|
||||
- `allow-search-collection`
|
||||
- `allow-delete-chunks`
|
||||
- `allow-delete-file`
|
||||
- `allow-delete-collection`
|
||||
- `allow-chunk-text`
|
||||
- `allow-list-attachments`
|
||||
@ -78,6 +80,32 @@ Denies the create_collection command without any pre-configured scope.
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`vector-db:allow-create-file`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the create_file command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`vector-db:deny-create-file`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the create_file command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`vector-db:allow-delete-chunks`
|
||||
|
||||
</td>
|
||||
@ -130,6 +158,32 @@ Denies the delete_collection command without any pre-configured scope.
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`vector-db:allow-delete-file`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Enables the delete_file command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`vector-db:deny-delete-file`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Denies the delete_file command without any pre-configured scope.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`vector-db:allow-get-chunks`
|
||||
|
||||
</td>
|
||||
|
||||
@ -4,8 +4,10 @@ permissions = [
|
||||
"allow-get-status",
|
||||
"allow-create-collection",
|
||||
"allow-insert-chunks",
|
||||
"allow-create-file",
|
||||
"allow-search-collection",
|
||||
"allow-delete-chunks",
|
||||
"allow-delete-file",
|
||||
"allow-delete-collection",
|
||||
"allow-chunk-text",
|
||||
"allow-list-attachments",
|
||||
|
||||
@ -318,6 +318,18 @@
|
||||
"const": "deny-create-collection",
|
||||
"markdownDescription": "Denies the create_collection command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the create_file command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-create-file",
|
||||
"markdownDescription": "Enables the create_file command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the create_file command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-create-file",
|
||||
"markdownDescription": "Denies the create_file command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the delete_chunks command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
@ -342,6 +354,18 @@
|
||||
"const": "deny-delete-collection",
|
||||
"markdownDescription": "Denies the delete_collection command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the delete_file command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "allow-delete-file",
|
||||
"markdownDescription": "Enables the delete_file command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the delete_file command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deny-delete-file",
|
||||
"markdownDescription": "Denies the delete_file command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_chunks command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
@ -403,10 +427,10 @@
|
||||
"markdownDescription": "Denies the search_collection command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Default permissions for the vector-db plugin\n#### This default permission set includes:\n\n- `allow-get-status`\n- `allow-create-collection`\n- `allow-insert-chunks`\n- `allow-search-collection`\n- `allow-delete-chunks`\n- `allow-delete-collection`\n- `allow-chunk-text`\n- `allow-list-attachments`\n- `allow-get-chunks`",
|
||||
"description": "Default permissions for the vector-db plugin\n#### This default permission set includes:\n\n- `allow-get-status`\n- `allow-create-collection`\n- `allow-insert-chunks`\n- `allow-create-file`\n- `allow-search-collection`\n- `allow-delete-chunks`\n- `allow-delete-file`\n- `allow-delete-collection`\n- `allow-chunk-text`\n- `allow-list-attachments`\n- `allow-get-chunks`",
|
||||
"type": "string",
|
||||
"const": "default",
|
||||
"markdownDescription": "Default permissions for the vector-db plugin\n#### This default permission set includes:\n\n- `allow-get-status`\n- `allow-create-collection`\n- `allow-insert-chunks`\n- `allow-search-collection`\n- `allow-delete-chunks`\n- `allow-delete-collection`\n- `allow-chunk-text`\n- `allow-list-attachments`\n- `allow-get-chunks`"
|
||||
"markdownDescription": "Default permissions for the vector-db plugin\n#### This default permission set includes:\n\n- `allow-get-status`\n- `allow-create-collection`\n- `allow-insert-chunks`\n- `allow-create-file`\n- `allow-search-collection`\n- `allow-delete-chunks`\n- `allow-delete-file`\n- `allow-delete-collection`\n- `allow-chunk-text`\n- `allow-list-attachments`\n- `allow-get-chunks`"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
use crate::{VectorDBError, VectorDBState};
|
||||
use crate::db::{
|
||||
self, AttachmentFileInfo, ChunkInput, SearchResult,
|
||||
self, AttachmentFileInfo, SearchResult, MinimalChunkInput,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::State;
|
||||
@ -10,6 +10,15 @@ pub struct Status {
|
||||
pub ann_available: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct FileInput {
|
||||
pub path: String,
|
||||
pub name: Option<String>,
|
||||
#[serde(rename = "type")]
|
||||
pub file_type: Option<String>,
|
||||
pub size: Option<i64>,
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Tauri Command Handlers
|
||||
// ============================================================================
|
||||
@ -74,18 +83,48 @@ pub async fn create_collection<R: tauri::Runtime>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn create_file<R: tauri::Runtime>(
|
||||
_app: tauri::AppHandle<R>,
|
||||
state: State<'_, VectorDBState>,
|
||||
collection: String,
|
||||
file: FileInput,
|
||||
) -> Result<AttachmentFileInfo, VectorDBError> {
|
||||
let path = db::collection_path(&state.base_dir, &collection);
|
||||
let conn = db::open_or_init_conn(&path)?;
|
||||
db::create_file(
|
||||
&conn,
|
||||
&file.path,
|
||||
file.name.as_deref(),
|
||||
file.file_type.as_deref(),
|
||||
file.size,
|
||||
)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn insert_chunks<R: tauri::Runtime>(
|
||||
_app: tauri::AppHandle<R>,
|
||||
state: State<'_, VectorDBState>,
|
||||
collection: String,
|
||||
chunks: Vec<ChunkInput>,
|
||||
file_id: String,
|
||||
chunks: Vec<MinimalChunkInput>,
|
||||
) -> Result<(), VectorDBError> {
|
||||
let path = db::collection_path(&state.base_dir, &collection);
|
||||
let conn = db::open_or_init_conn(&path)?;
|
||||
let vec_loaded = db::try_load_sqlite_vec(&conn);
|
||||
db::insert_chunks(&conn, chunks, vec_loaded)?;
|
||||
Ok(())
|
||||
db::insert_chunks(&conn, &file_id, chunks, vec_loaded)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_file<R: tauri::Runtime>(
|
||||
_app: tauri::AppHandle<R>,
|
||||
state: State<'_, VectorDBState>,
|
||||
collection: String,
|
||||
file_id: String,
|
||||
) -> Result<(), VectorDBError> {
|
||||
let path = db::collection_path(&state.base_dir, &collection);
|
||||
let conn = db::open_or_init_conn(&path)?;
|
||||
db::delete_file(&conn, &file_id)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
|
||||
@ -2,7 +2,6 @@ use crate::VectorDBError;
|
||||
use crate::utils::{cosine_similarity, from_le_bytes_vec, to_le_bytes_vec};
|
||||
use rusqlite::{params, Connection, OptionalExtension};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
@ -16,18 +15,6 @@ pub struct FileMetadata {
|
||||
pub size: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct ChunkMetadata {
|
||||
pub file: FileMetadata,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ChunkInput {
|
||||
pub id: Option<String>,
|
||||
pub text: String,
|
||||
pub embedding: Vec<f32>,
|
||||
pub metadata: Option<ChunkMetadata>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SearchResult {
|
||||
@ -49,6 +36,13 @@ pub struct AttachmentFileInfo {
|
||||
pub chunk_count: i64,
|
||||
}
|
||||
|
||||
// New minimal chunk input (no id/metadata) for file-scoped insertion
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct MinimalChunkInput {
|
||||
pub text: String,
|
||||
pub embedding: Vec<f32>,
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Connection & Path Management
|
||||
// ============================================================================
|
||||
@ -190,14 +184,84 @@ pub fn create_schema(conn: &Connection, dimension: usize) -> Result<bool, Vector
|
||||
// Insert Operations
|
||||
// ============================================================================
|
||||
|
||||
pub fn create_file(
|
||||
conn: &Connection,
|
||||
path: &str,
|
||||
name: Option<&str>,
|
||||
file_type: Option<&str>,
|
||||
size: Option<i64>,
|
||||
) -> Result<AttachmentFileInfo, VectorDBError> {
|
||||
let tx = conn.unchecked_transaction()?;
|
||||
|
||||
// Try get existing by path
|
||||
if let Ok(Some(id)) = tx
|
||||
.prepare("SELECT id FROM files WHERE path = ?1")
|
||||
.and_then(|mut s| s.query_row(params![path], |r| r.get::<_, String>(0)).optional())
|
||||
{
|
||||
let row: AttachmentFileInfo = {
|
||||
let mut stmt = tx.prepare(
|
||||
"SELECT id, path, name, type, size, chunk_count FROM files WHERE id = ?1",
|
||||
)?;
|
||||
stmt.query_row(params![id.as_str()], |r| {
|
||||
Ok(AttachmentFileInfo {
|
||||
id: r.get(0)?,
|
||||
path: r.get(1)?,
|
||||
name: r.get(2)?,
|
||||
file_type: r.get(3)?,
|
||||
size: r.get(4)?,
|
||||
chunk_count: r.get(5)?,
|
||||
})
|
||||
})?
|
||||
};
|
||||
tx.commit()?;
|
||||
return Ok(row);
|
||||
}
|
||||
|
||||
let new_id = Uuid::new_v4().to_string();
|
||||
// Determine file size if not provided
|
||||
let computed_size: Option<i64> = match size {
|
||||
Some(s) if s > 0 => Some(s),
|
||||
_ => {
|
||||
match std::fs::metadata(path) {
|
||||
Ok(meta) => Some(meta.len() as i64),
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
};
|
||||
tx.execute(
|
||||
"INSERT INTO files (id, path, name, type, size, chunk_count) VALUES (?1, ?2, ?3, ?4, ?5, 0)",
|
||||
params![new_id, path, name, file_type, computed_size],
|
||||
)?;
|
||||
|
||||
let row: AttachmentFileInfo = {
|
||||
let mut stmt = tx.prepare(
|
||||
"SELECT id, path, name, type, size, chunk_count FROM files WHERE path = ?1",
|
||||
)?;
|
||||
stmt.query_row(params![path], |r| {
|
||||
Ok(AttachmentFileInfo {
|
||||
id: r.get(0)?,
|
||||
path: r.get(1)?,
|
||||
name: r.get(2)?,
|
||||
file_type: r.get(3)?,
|
||||
size: r.get(4)?,
|
||||
chunk_count: r.get(5)?,
|
||||
})
|
||||
})?
|
||||
};
|
||||
|
||||
tx.commit()?;
|
||||
Ok(row)
|
||||
}
|
||||
|
||||
pub fn insert_chunks(
|
||||
conn: &Connection,
|
||||
chunks: Vec<ChunkInput>,
|
||||
file_id: &str,
|
||||
chunks: Vec<MinimalChunkInput>,
|
||||
vec_loaded: bool,
|
||||
) -> Result<(), VectorDBError> {
|
||||
let tx = conn.unchecked_transaction()?;
|
||||
|
||||
// Check if vec virtual table exists
|
||||
// Check if vec table exists
|
||||
let has_vec = if vec_loaded {
|
||||
conn
|
||||
.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='chunks_vec'")
|
||||
@ -209,69 +273,22 @@ pub fn insert_chunks(
|
||||
false
|
||||
};
|
||||
|
||||
let mut file_id_cache: HashMap<String, String> = HashMap::new();
|
||||
let mut file_chunk_counters: HashMap<String, i64> = HashMap::new();
|
||||
// Determine current max order
|
||||
let mut current_order: i64 = tx
|
||||
.query_row(
|
||||
"SELECT COALESCE(MAX(chunk_file_order), -1) FROM chunks WHERE file_id = ?1",
|
||||
params![file_id],
|
||||
|row| row.get::<_, i64>(0),
|
||||
)
|
||||
.unwrap_or(-1);
|
||||
|
||||
for ch in chunks.into_iter() {
|
||||
current_order += 1;
|
||||
let emb = to_le_bytes_vec(&ch.embedding);
|
||||
|
||||
// Extract file info from metadata and get/create file_id
|
||||
let mut file_id: Option<String> = None;
|
||||
if let Some(ref meta) = ch.metadata {
|
||||
let file_path = &meta.file.path;
|
||||
|
||||
// Check cache first
|
||||
if let Some(cached_id) = file_id_cache.get(file_path) {
|
||||
file_id = Some(cached_id.clone());
|
||||
} else {
|
||||
// Generate UUID for new file
|
||||
let uuid = Uuid::new_v4().to_string();
|
||||
|
||||
// Insert or ignore if path already exists
|
||||
tx.execute(
|
||||
"INSERT OR IGNORE INTO files (id, path, name, type, size) VALUES (?1, ?2, ?3, ?4, ?5)",
|
||||
params![
|
||||
&uuid,
|
||||
&meta.file.path,
|
||||
&meta.file.name,
|
||||
&meta.file.file_type,
|
||||
meta.file.size
|
||||
],
|
||||
)?;
|
||||
|
||||
// Get the actual id (either the one we just inserted or existing one)
|
||||
let id: String = tx.query_row(
|
||||
"SELECT id FROM files WHERE path = ?1",
|
||||
params![file_path],
|
||||
|row| row.get(0),
|
||||
)?;
|
||||
file_id = Some(id.clone());
|
||||
file_id_cache.insert(file_path.clone(), id);
|
||||
}
|
||||
}
|
||||
|
||||
// Get or initialize chunk order for this file
|
||||
let chunk_order = if let Some(ref fid) = file_id {
|
||||
let counter = file_chunk_counters.entry(fid.clone()).or_insert_with(|| {
|
||||
// Get max existing order for this file
|
||||
tx.query_row(
|
||||
"SELECT COALESCE(MAX(chunk_file_order), -1) FROM chunks WHERE file_id = ?1",
|
||||
params![fid],
|
||||
|row| row.get::<_, i64>(0),
|
||||
).unwrap_or(-1)
|
||||
});
|
||||
*counter += 1;
|
||||
*counter
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
// Generate UUID for chunk if not provided
|
||||
let chunk_id = ch.id.unwrap_or_else(|| Uuid::new_v4().to_string());
|
||||
|
||||
let chunk_id = Uuid::new_v4().to_string();
|
||||
tx.execute(
|
||||
"INSERT OR REPLACE INTO chunks (id, text, embedding, file_id, chunk_file_order) VALUES (?1, ?2, ?3, ?4, ?5)",
|
||||
params![chunk_id, ch.text, emb, file_id, chunk_order],
|
||||
params![chunk_id, ch.text, emb, file_id, current_order],
|
||||
)?;
|
||||
|
||||
if has_vec {
|
||||
@ -279,20 +296,14 @@ pub fn insert_chunks(
|
||||
.prepare("SELECT rowid FROM chunks WHERE id=?1")?
|
||||
.query_row(params![chunk_id], |r| r.get(0))?;
|
||||
let json_vec = serde_json::to_string(&ch.embedding).unwrap_or("[]".to_string());
|
||||
match tx.execute(
|
||||
let _ = tx.execute(
|
||||
"INSERT OR REPLACE INTO chunks_vec(rowid, embedding) VALUES (?1, ?2)",
|
||||
params![rowid, json_vec],
|
||||
) {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
println!("[VectorDB] ✗ Failed to insert into chunks_vec: {}", e);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Update chunk_count for all affected files
|
||||
for file_id in file_id_cache.values() {
|
||||
// Update chunk_count
|
||||
let count: i64 = tx.query_row(
|
||||
"SELECT COUNT(*) FROM chunks WHERE file_id = ?1",
|
||||
params![file_id],
|
||||
@ -302,8 +313,15 @@ pub fn insert_chunks(
|
||||
"UPDATE files SET chunk_count = ?1 WHERE id = ?2",
|
||||
params![count, file_id],
|
||||
)?;
|
||||
|
||||
tx.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn delete_file(conn: &Connection, file_id: &str) -> Result<(), VectorDBError> {
|
||||
let tx = conn.unchecked_transaction()?;
|
||||
tx.execute("DELETE FROM chunks WHERE file_id = ?1", params![file_id])?;
|
||||
tx.execute("DELETE FROM files WHERE id = ?1", params![file_id])?;
|
||||
tx.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -18,8 +18,10 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
||||
.invoke_handler(tauri::generate_handler![
|
||||
commands::create_collection,
|
||||
commands::insert_chunks,
|
||||
commands::create_file,
|
||||
commands::search_collection,
|
||||
commands::delete_chunks,
|
||||
commands::delete_file,
|
||||
commands::delete_collection,
|
||||
commands::chunk_text,
|
||||
commands::get_status,
|
||||
|
||||
@ -496,6 +496,9 @@ async fn schedule_mcp_start_task<R: Runtime>(
|
||||
client_info: Implementation {
|
||||
name: "Jan Streamable Client".to_string(),
|
||||
version: "0.0.1".to_string(),
|
||||
title: None,
|
||||
website_url: None,
|
||||
icons: None,
|
||||
},
|
||||
};
|
||||
let client = client_info.serve(transport).await.inspect_err(|e| {
|
||||
@ -567,6 +570,9 @@ async fn schedule_mcp_start_task<R: Runtime>(
|
||||
client_info: Implementation {
|
||||
name: "Jan SSE Client".to_string(),
|
||||
version: "0.0.1".to_string(),
|
||||
title: None,
|
||||
website_url: None,
|
||||
icons: None,
|
||||
},
|
||||
};
|
||||
let client = client_info.serve(transport).await.map_err(|e| {
|
||||
|
||||
@ -23,6 +23,7 @@ import {
|
||||
IconX,
|
||||
IconPaperclip,
|
||||
IconLoader2,
|
||||
IconCheck,
|
||||
} from '@tabler/icons-react'
|
||||
import { useTranslation } from '@/i18n/react-i18next-compat'
|
||||
import { useGeneralSetting } from '@/hooks/useGeneralSetting'
|
||||
@ -40,7 +41,7 @@ import { TokenCounter } from '@/components/TokenCounter'
|
||||
import { useMessages } from '@/hooks/useMessages'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import { McpExtensionToolLoader } from './McpExtensionToolLoader'
|
||||
import { ExtensionTypeEnum, MCPExtension, RAGExtension } from '@janhq/core'
|
||||
import { ExtensionTypeEnum, MCPExtension, fs, RAGExtension } from '@janhq/core'
|
||||
import { ExtensionManager } from '@/lib/extension'
|
||||
import { useAttachments } from '@/hooks/useAttachments'
|
||||
import { open } from '@tauri-apps/plugin-dialog'
|
||||
@ -49,6 +50,7 @@ import { PlatformFeatures } from '@/lib/platform/const'
|
||||
import { PlatformFeature } from '@/lib/platform/types'
|
||||
import { useAnalytic } from '@/hooks/useAnalytic'
|
||||
import posthog from 'posthog-js'
|
||||
import { Attachment, createImageAttachment, createDocumentAttachment } from '@/types/attachment'
|
||||
|
||||
type ChatInputProps = {
|
||||
className?: string
|
||||
@ -101,33 +103,20 @@ const ChatInput = ({
|
||||
const [message, setMessage] = useState('')
|
||||
const [dropdownToolsAvailable, setDropdownToolsAvailable] = useState(false)
|
||||
const [tooltipToolsAvailable, setTooltipToolsAvailable] = useState(false)
|
||||
const [uploadedFiles, setUploadedFiles] = useState<
|
||||
Array<{
|
||||
name: string
|
||||
type: string
|
||||
size: number
|
||||
base64: string
|
||||
dataUrl: string
|
||||
}>
|
||||
>([])
|
||||
// Document attachments (desktop RAG ingestion). We only index on send.
|
||||
const [docFiles, setDocFiles] = useState<
|
||||
Array<{
|
||||
name: string
|
||||
path: string
|
||||
size?: number
|
||||
type?: string
|
||||
}>
|
||||
>([])
|
||||
const [attachments, setAttachments] = useState<Attachment[]>([])
|
||||
const [connectedServers, setConnectedServers] = useState<string[]>([])
|
||||
const [isDragOver, setIsDragOver] = useState(false)
|
||||
const [hasMmproj, setHasMmproj] = useState(false)
|
||||
const [hasActiveModels, setHasActiveModels] = useState(false)
|
||||
const attachmentsEnabled = useAttachments((s) => s.enabled)
|
||||
const [ingestingDocs, setIngestingDocs] = useState(false)
|
||||
// Determine whether to show the Attach documents button (simple gating)
|
||||
const showAttachmentButton =
|
||||
attachmentsEnabled && PlatformFeatures[PlatformFeature.ATTACHMENTS]
|
||||
// Derived: any document currently processing (ingestion in progress)
|
||||
const ingestingDocs = attachments.some(
|
||||
(a) => a.type === 'document' && a.processing
|
||||
)
|
||||
const ingestingAny = attachments.some((a) => a.processing)
|
||||
|
||||
// Check for connected MCP servers
|
||||
useEffect(() => {
|
||||
@ -208,36 +197,10 @@ const ChatInput = ({
|
||||
setMessage('Please select a model to start chatting.')
|
||||
return
|
||||
}
|
||||
if (!prompt.trim() && uploadedFiles.length === 0 && docFiles.length === 0) {
|
||||
if (!prompt.trim()) {
|
||||
return
|
||||
}
|
||||
// If we have pending doc files, index them first
|
||||
if (docFiles.length > 0) {
|
||||
try {
|
||||
setIngestingDocs(true)
|
||||
const rag = extensionManager.get<RAGExtension>(ExtensionTypeEnum.RAG)
|
||||
if (!rag?.ingestAttachments) throw new Error('Retrieval extension not available')
|
||||
for (const f of docFiles) {
|
||||
const id = (toast as any).loading
|
||||
? (toast as any).loading(`Indexing ${f.name || f.path}…`)
|
||||
: undefined
|
||||
try {
|
||||
await rag.ingestAttachments(currentThreadId!, [{ path: f.path, name: f.name }])
|
||||
if (id) toast.success(`Indexed ${f.name || f.path}`, { id })
|
||||
} catch (err) {
|
||||
if (id) toast.error(`Failed to index ${f.name || f.path}`, { id })
|
||||
throw err
|
||||
}
|
||||
}
|
||||
setDocFiles([])
|
||||
} catch (err) {
|
||||
const desc = err instanceof Error ? err.message : String(err)
|
||||
toast.error('Failed to index attachments', { description: desc })
|
||||
setIngestingDocs(false)
|
||||
return
|
||||
}
|
||||
setIngestingDocs(false)
|
||||
}
|
||||
|
||||
setMessage('')
|
||||
|
||||
// Track message send event with PostHog (only if product analytics is enabled)
|
||||
@ -252,13 +215,39 @@ const ChatInput = ({
|
||||
}
|
||||
}
|
||||
|
||||
// Callback to update attachment processing state
|
||||
const updateAttachmentProcessing = (
|
||||
fileName: string,
|
||||
status: 'processing' | 'done' | 'error' | 'clear_docs' | 'clear_all'
|
||||
) => {
|
||||
if (status === 'clear_docs') {
|
||||
setAttachments((prev) => prev.filter((a) => a.type !== 'document'))
|
||||
return
|
||||
}
|
||||
if (status === 'clear_all') {
|
||||
setAttachments([])
|
||||
return
|
||||
}
|
||||
setAttachments((prev) =>
|
||||
prev.map((att) =>
|
||||
att.name === fileName
|
||||
? {
|
||||
...att,
|
||||
processing: status === 'processing',
|
||||
processed: status === 'done' ? true : att.processed,
|
||||
}
|
||||
: att
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
sendMessage(
|
||||
prompt,
|
||||
true,
|
||||
uploadedFiles.length > 0 ? uploadedFiles : undefined,
|
||||
projectId
|
||||
attachments.length > 0 ? attachments : undefined,
|
||||
projectId,
|
||||
updateAttachmentProcessing
|
||||
)
|
||||
setUploadedFiles([])
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
@ -333,10 +322,6 @@ const ChatInput = ({
|
||||
toast.info('Attachments are disabled in Settings')
|
||||
return
|
||||
}
|
||||
if (!currentThreadId) {
|
||||
toast.info('Please start a thread first to attach documents.')
|
||||
return
|
||||
}
|
||||
const selection = await open({
|
||||
multiple: true,
|
||||
filters: [
|
||||
@ -349,24 +334,128 @@ const ChatInput = ({
|
||||
if (!selection) return
|
||||
const paths = Array.isArray(selection) ? selection : [selection]
|
||||
if (!paths.length) return
|
||||
setDocFiles((prev) => [
|
||||
...prev,
|
||||
...paths.map((p) => ({
|
||||
path: p,
|
||||
name: p.split(/[\\/]/).pop() || p,
|
||||
})),
|
||||
])
|
||||
|
||||
// Check for duplicates and fetch file sizes
|
||||
const existingPaths = new Set(
|
||||
attachments
|
||||
.filter((a) => a.type === 'document' && a.path)
|
||||
.map((a) => a.path)
|
||||
)
|
||||
|
||||
const duplicates: string[] = []
|
||||
const newDocAttachments: Attachment[] = []
|
||||
|
||||
for (const p of paths) {
|
||||
if (existingPaths.has(p)) {
|
||||
duplicates.push(p.split(/[\\/]/).pop() || p)
|
||||
continue
|
||||
}
|
||||
|
||||
const name = p.split(/[\\/]/).pop() || p
|
||||
const fileType = name.split('.').pop()?.toLowerCase()
|
||||
let size: number | undefined = undefined
|
||||
try {
|
||||
const stat = await fs.fileStat(p)
|
||||
size = stat?.size ? Number(stat.size) : undefined
|
||||
} catch (e) {
|
||||
console.error('Failed to ingest attachments:', e)
|
||||
console.warn('Failed to read file size for', p, e)
|
||||
}
|
||||
newDocAttachments.push(
|
||||
createDocumentAttachment({
|
||||
name,
|
||||
path: p,
|
||||
fileType,
|
||||
size,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
if (duplicates.length > 0) {
|
||||
toast.warning('Files already attached', {
|
||||
description: `${duplicates.join(', ')} ${duplicates.length === 1 ? 'is' : 'are'} already in the list`,
|
||||
})
|
||||
}
|
||||
|
||||
if (newDocAttachments.length > 0) {
|
||||
// Add to state first with processing flag
|
||||
setAttachments((prev) => [...prev, ...newDocAttachments])
|
||||
|
||||
// If thread exists, ingest immediately
|
||||
if (currentThreadId) {
|
||||
const ragExtension = ExtensionManager.getInstance().get(
|
||||
ExtensionTypeEnum.RAG
|
||||
) as RAGExtension | undefined
|
||||
if (!ragExtension) {
|
||||
toast.error('RAG extension not available')
|
||||
return
|
||||
}
|
||||
|
||||
// Ingest each document
|
||||
for (const doc of newDocAttachments) {
|
||||
try {
|
||||
// Mark as processing
|
||||
setAttachments((prev) =>
|
||||
prev.map((a) =>
|
||||
a.path === doc.path && a.type === 'document'
|
||||
? { ...a, processing: true }
|
||||
: a
|
||||
)
|
||||
)
|
||||
|
||||
const result = await ragExtension.ingestAttachments(
|
||||
currentThreadId,
|
||||
[
|
||||
{
|
||||
path: doc.path!,
|
||||
name: doc.name,
|
||||
type: doc.fileType,
|
||||
size: doc.size,
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
const fileInfo = result.files?.[0]
|
||||
if (fileInfo?.id) {
|
||||
// Mark as processed with ID
|
||||
setAttachments((prev) =>
|
||||
prev.map((a) =>
|
||||
a.path === doc.path && a.type === 'document'
|
||||
? {
|
||||
...a,
|
||||
processing: false,
|
||||
processed: true,
|
||||
id: fileInfo.id,
|
||||
chunkCount: fileInfo.chunk_count,
|
||||
}
|
||||
: a
|
||||
)
|
||||
)
|
||||
} else {
|
||||
throw new Error('No file ID returned from ingestion')
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to ingest document:', error)
|
||||
// Remove failed document
|
||||
setAttachments((prev) =>
|
||||
prev.filter((a) => !(a.path === doc.path && a.type === 'document'))
|
||||
)
|
||||
toast.error(`Failed to ingest ${doc.name}`, {
|
||||
description:
|
||||
error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to attach documents:', e)
|
||||
const desc = e instanceof Error ? e.message : String(e)
|
||||
toast.error('Failed to attach documents', { description: desc })
|
||||
}
|
||||
}
|
||||
|
||||
const handleRemoveFile = (indexToRemove: number) => {
|
||||
setUploadedFiles((prev) =>
|
||||
prev.filter((_, index) => index !== indexToRemove)
|
||||
)
|
||||
const handleRemoveAttachment = (indexToRemove: number) => {
|
||||
setAttachments((prev) => prev.filter((_, index) => index !== indexToRemove))
|
||||
}
|
||||
|
||||
const getFileTypeFromExtension = (fileName: string): string => {
|
||||
@ -382,20 +471,39 @@ const ChatInput = ({
|
||||
}
|
||||
}
|
||||
|
||||
const formatBytes = (bytes?: number): string => {
|
||||
if (!bytes || bytes <= 0) return ''
|
||||
const units = ['B', 'KB', 'MB', 'GB']
|
||||
let i = 0
|
||||
let val = bytes
|
||||
while (val >= 1024 && i < units.length - 1) {
|
||||
val /= 1024
|
||||
i++
|
||||
}
|
||||
return `${val.toFixed(i === 0 ? 0 : 1)} ${units[i]}`
|
||||
}
|
||||
|
||||
const handleFileChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const files = e.target.files
|
||||
|
||||
if (files && files.length > 0) {
|
||||
const maxSize = 10 * 1024 * 1024 // 10MB in bytes
|
||||
const newFiles: Array<{
|
||||
name: string
|
||||
type: string
|
||||
size: number
|
||||
base64: string
|
||||
dataUrl: string
|
||||
}> = []
|
||||
const newFiles: Attachment[] = []
|
||||
const duplicates: string[] = []
|
||||
const existingImageNames = new Set(
|
||||
attachments
|
||||
.filter((a) => a.type === 'image')
|
||||
.map((a) => a.name)
|
||||
)
|
||||
|
||||
Array.from(files).forEach((file) => {
|
||||
// Check for duplicate image names
|
||||
if (existingImageNames.has(file.name)) {
|
||||
duplicates.push(file.name)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
// Check file size
|
||||
if (file.size > maxSize) {
|
||||
setMessage(`File is too large. Maximum size is 10MB.`)
|
||||
@ -429,26 +537,92 @@ const ChatInput = ({
|
||||
const result = reader.result
|
||||
if (typeof result === 'string') {
|
||||
const base64String = result.split(',')[1]
|
||||
const fileData = {
|
||||
const att = createImageAttachment({
|
||||
name: file.name,
|
||||
size: file.size,
|
||||
type: actualType,
|
||||
mimeType: actualType,
|
||||
base64: base64String,
|
||||
dataUrl: result,
|
||||
}
|
||||
newFiles.push(fileData)
|
||||
})
|
||||
newFiles.push(att)
|
||||
// Update state
|
||||
if (
|
||||
newFiles.length ===
|
||||
Array.from(files).filter((f) => {
|
||||
const fType = getFileTypeFromExtension(f.name) || f.type
|
||||
return f.size <= maxSize && allowedTypes.includes(fType)
|
||||
return (
|
||||
f.size <= maxSize &&
|
||||
allowedTypes.includes(fType) &&
|
||||
!existingImageNames.has(f.name)
|
||||
)
|
||||
}).length
|
||||
) {
|
||||
setUploadedFiles((prev) => {
|
||||
if (newFiles.length > 0) {
|
||||
setAttachments((prev) => {
|
||||
const updated = [...prev, ...newFiles]
|
||||
return updated
|
||||
})
|
||||
|
||||
// If thread exists, ingest images immediately
|
||||
if (currentThreadId) {
|
||||
;(async () => {
|
||||
for (const img of newFiles) {
|
||||
try {
|
||||
// Mark as processing
|
||||
setAttachments((prev) =>
|
||||
prev.map((a) =>
|
||||
a.name === img.name && a.type === 'image'
|
||||
? { ...a, processing: true }
|
||||
: a
|
||||
)
|
||||
)
|
||||
|
||||
const result = await serviceHub.uploads().ingestImage(
|
||||
currentThreadId,
|
||||
img
|
||||
)
|
||||
|
||||
if (result?.id) {
|
||||
// Mark as processed with ID
|
||||
setAttachments((prev) =>
|
||||
prev.map((a) =>
|
||||
a.name === img.name && a.type === 'image'
|
||||
? {
|
||||
...a,
|
||||
processing: false,
|
||||
processed: true,
|
||||
id: result.id,
|
||||
}
|
||||
: a
|
||||
)
|
||||
)
|
||||
} else {
|
||||
throw new Error('No ID returned from image ingestion')
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to ingest image:', error)
|
||||
// Remove failed image
|
||||
setAttachments((prev) =>
|
||||
prev.filter(
|
||||
(a) => !(a.name === img.name && a.type === 'image')
|
||||
)
|
||||
)
|
||||
toast.error(`Failed to ingest ${img.name}`, {
|
||||
description:
|
||||
error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
})()
|
||||
}
|
||||
}
|
||||
|
||||
if (duplicates.length > 0) {
|
||||
toast.warning('Some images already attached', {
|
||||
description: `${duplicates.join(', ')} ${duplicates.length === 1 ? 'is' : 'are'} already in the list`,
|
||||
})
|
||||
}
|
||||
|
||||
// Reset the file input value to allow re-uploading the same file
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = ''
|
||||
@ -662,54 +836,90 @@ const ChatInput = ({
|
||||
onDragOver={hasMmproj ? handleDragOver : undefined}
|
||||
onDrop={hasMmproj ? handleDrop : undefined}
|
||||
>
|
||||
{(uploadedFiles.length > 0 || docFiles.length > 0) && (
|
||||
{attachments.length > 0 && (
|
||||
<div className="flex gap-3 items-center p-2 pb-0">
|
||||
{uploadedFiles.map((file, index) => {
|
||||
{attachments
|
||||
.map((att, idx) => ({ att, idx }))
|
||||
.map(({ att, idx }) => {
|
||||
const isImage = att.type === 'image'
|
||||
const ext = att.fileType || att.mimeType?.split('/')[1]
|
||||
return (
|
||||
<div key={`${att.type}-${idx}-${att.name}`} className="relative">
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div
|
||||
key={index}
|
||||
className={cn(
|
||||
'relative border border-main-view-fg/5 rounded-lg',
|
||||
file.type.startsWith('image/') ? 'size-14' : 'h-14 '
|
||||
'relative border border-main-view-fg/5 rounded-lg size-14 overflow-hidden bg-main-view/40',
|
||||
'flex items-center justify-center'
|
||||
)}
|
||||
>
|
||||
{file.type.startsWith('image/') && (
|
||||
{/* Inner content by state */}
|
||||
{isImage && att.dataUrl ? (
|
||||
<img
|
||||
className="object-cover w-full h-full rounded-lg"
|
||||
src={file.dataUrl}
|
||||
alt={`${file.name} - ${index}`}
|
||||
className="object-cover w-full h-full"
|
||||
src={att.dataUrl}
|
||||
alt={`${att.name}`}
|
||||
/>
|
||||
) : (
|
||||
<div className="flex flex-col items-center justify-center text-main-view-fg/70">
|
||||
<IconPaperclip size={18} />
|
||||
{ext && (
|
||||
<span className="text-[10px] leading-none mt-0.5 uppercase opacity-70">
|
||||
.{ext}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Overlay spinner when processing */}
|
||||
{att.processing && (
|
||||
<div className="absolute inset-0 flex items-center justify-center bg-black/10">
|
||||
<IconLoader2
|
||||
size={18}
|
||||
className="text-main-view-fg/80 animate-spin"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Overlay success check when processed */}
|
||||
{att.processed && !att.processing && (
|
||||
<div className="absolute inset-0 flex items-center justify-center bg-black/5">
|
||||
<div className="bg-green-600/90 rounded-full p-1">
|
||||
<IconCheck size={14} className="text-white" />
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<div className="text-xs">
|
||||
<div className="font-medium truncate max-w-52" title={att.name}>
|
||||
{att.name}
|
||||
</div>
|
||||
<div className="opacity-70">
|
||||
{isImage
|
||||
? (att.mimeType || 'image')
|
||||
: (ext ? `.${ext}` : 'document')}
|
||||
{att.size ? ` · ${formatBytes(att.size)}` : ''}
|
||||
</div>
|
||||
</div>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
|
||||
{/* Remove button disabled while processing - outside overflow-hidden container */}
|
||||
{!att.processing && (
|
||||
<div
|
||||
className="absolute -top-1 -right-2.5 bg-destructive size-5 flex rounded-full items-center justify-center cursor-pointer"
|
||||
onClick={() => handleRemoveFile(index)}
|
||||
onClick={() => handleRemoveAttachment(idx)}
|
||||
>
|
||||
<IconX className="text-destructive-fg" size={16} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
{docFiles.map((file, index) => (
|
||||
<div
|
||||
key={`doc-${index}`}
|
||||
className="relative border border-main-view-fg/5 rounded-lg px-2 py-1 text-xs flex items-center gap-2 bg-main-view/40"
|
||||
>
|
||||
<IconPaperclip size={14} className="text-main-view-fg/50" />
|
||||
<span className="max-w-48 truncate" title={file.name}>
|
||||
{file.name}
|
||||
</span>
|
||||
<div
|
||||
className="absolute -top-1 -right-2.5 bg-destructive size-5 flex rounded-full items-center justify-center cursor-pointer"
|
||||
onClick={() =>
|
||||
setDocFiles((prev) =>
|
||||
prev.filter((_, i) => i !== index)
|
||||
)
|
||||
}
|
||||
>
|
||||
<IconX className="text-destructive-fg" size={16} />
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
<TextareaAutosize
|
||||
@ -975,7 +1185,15 @@ const ChatInput = ({
|
||||
<TokenCounter
|
||||
messages={threadMessages || []}
|
||||
compact={true}
|
||||
uploadedFiles={uploadedFiles}
|
||||
uploadedFiles={attachments
|
||||
.filter((a) => a.type === 'image' && a.dataUrl)
|
||||
.map((a) => ({
|
||||
name: a.name,
|
||||
type: a.mimeType || getFileTypeFromExtension(a.name),
|
||||
size: a.size || 0,
|
||||
base64: a.base64 || '',
|
||||
dataUrl: a.dataUrl!,
|
||||
}))}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
@ -992,24 +1210,13 @@ const ChatInput = ({
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
variant={
|
||||
!prompt.trim() &&
|
||||
uploadedFiles.length === 0 &&
|
||||
docFiles.length === 0
|
||||
? null
|
||||
: 'default'
|
||||
}
|
||||
variant={!prompt.trim() ? null : 'default'}
|
||||
size="icon"
|
||||
disabled={
|
||||
(!prompt.trim() &&
|
||||
uploadedFiles.length === 0 &&
|
||||
docFiles.length === 0) ||
|
||||
ingestingDocs
|
||||
}
|
||||
disabled={!prompt.trim() || ingestingAny}
|
||||
data-test-id="send-message-button"
|
||||
onClick={() => handleSendMessage(prompt)}
|
||||
>
|
||||
{streamingContent || ingestingDocs ? (
|
||||
{streamingContent || ingestingAny ? (
|
||||
<span className="animate-spin h-4 w-4 border-2 border-current border-t-transparent rounded-full" />
|
||||
) : (
|
||||
<ArrowRight className="text-primary-fg" />
|
||||
@ -1048,7 +1255,15 @@ const ChatInput = ({
|
||||
<TokenCounter
|
||||
messages={threadMessages || []}
|
||||
compact={false}
|
||||
uploadedFiles={uploadedFiles}
|
||||
uploadedFiles={attachments
|
||||
.filter((a) => a.type === 'image' && a.dataUrl)
|
||||
.map((a) => ({
|
||||
name: a.name,
|
||||
type: a.mimeType || getFileTypeFromExtension(a.name),
|
||||
size: a.size || 0,
|
||||
base64: a.base64 || '',
|
||||
dataUrl: a.dataUrl!,
|
||||
}))}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@ -26,6 +26,8 @@ import TokenSpeedIndicator from '@/containers/TokenSpeedIndicator'
|
||||
|
||||
import { useTranslation } from '@/i18n/react-i18next-compat'
|
||||
import { useModelProvider } from '@/hooks/useModelProvider'
|
||||
import { extractFilesFromPrompt } from '@/lib/fileMetadata'
|
||||
import { createImageAttachment } from '@/types/attachment'
|
||||
|
||||
const CopyButton = ({ text }: { text: string }) => {
|
||||
const [copied, setCopied] = useState(false)
|
||||
@ -102,6 +104,14 @@ export const ThreadContent = memo(
|
||||
[item.content]
|
||||
)
|
||||
|
||||
// Extract file metadata from user message text
|
||||
const { files: attachedFiles, cleanPrompt } = useMemo(() => {
|
||||
if (item.role === 'user') {
|
||||
return extractFilesFromPrompt(text)
|
||||
}
|
||||
return { files: [], cleanPrompt: text }
|
||||
}, [text, item.role])
|
||||
|
||||
const { reasoningSegment, textSegment } = useMemo(() => {
|
||||
// Check for thinking formats
|
||||
const hasThinkTag = text.includes('<think>') && !text.includes('</think>')
|
||||
@ -153,9 +163,9 @@ export const ThreadContent = memo(
|
||||
if (toSendMessage) {
|
||||
deleteMessage(toSendMessage.thread_id, toSendMessage.id ?? '')
|
||||
// Extract text content and any attachments
|
||||
const textContent =
|
||||
toSendMessage.content?.find((c) => c.type === 'text')?.text?.value ||
|
||||
''
|
||||
const rawText =
|
||||
toSendMessage.content?.find((c) => c.type === 'text')?.text?.value || ''
|
||||
const { cleanPrompt: textContent } = extractFilesFromPrompt(rawText)
|
||||
const attachments = toSendMessage.content
|
||||
?.filter((c) => (c.type === 'image_url' && c.image_url?.url) || false)
|
||||
.map((c) => {
|
||||
@ -164,24 +174,19 @@ export const ThreadContent = memo(
|
||||
const [mimeType, base64] = url
|
||||
.replace('data:', '')
|
||||
.split(';base64,')
|
||||
return {
|
||||
name: 'image', // We don't have the original filename
|
||||
type: mimeType,
|
||||
size: 0, // We don't have the original size
|
||||
return createImageAttachment({
|
||||
name: 'image', // Original filename unavailable
|
||||
mimeType,
|
||||
size: 0,
|
||||
base64: base64,
|
||||
dataUrl: url,
|
||||
}
|
||||
})
|
||||
}
|
||||
return null
|
||||
})
|
||||
.filter(Boolean) as Array<{
|
||||
name: string
|
||||
type: string
|
||||
size: number
|
||||
base64: string
|
||||
dataUrl: string
|
||||
}>
|
||||
sendMessage(textContent, true, attachments)
|
||||
.filter(Boolean)
|
||||
// Keep embedded document metadata in the message for regenerate
|
||||
sendMessage(rawText, true, attachments)
|
||||
}
|
||||
}, [deleteMessage, getMessages, item, sendMessage])
|
||||
|
||||
@ -225,7 +230,56 @@ export const ThreadContent = memo(
|
||||
<Fragment>
|
||||
{item.role === 'user' && (
|
||||
<div className="w-full">
|
||||
{/* Render attachments above the message bubble */}
|
||||
{/* Render text content in the message bubble */}
|
||||
{cleanPrompt && (
|
||||
<div className="flex justify-end w-full h-full text-start break-words whitespace-normal">
|
||||
<div className="bg-main-view-fg/4 relative text-main-view-fg p-2 rounded-md inline-block max-w-[80%] ">
|
||||
<div className="select-text">
|
||||
<RenderMarkdown
|
||||
content={cleanPrompt}
|
||||
components={linkComponents}
|
||||
isUser
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Render document file attachments (extracted from message text) - below text */}
|
||||
{attachedFiles.length > 0 && (
|
||||
<div className="flex justify-end w-full mt-2 mb-2">
|
||||
<div className="flex flex-wrap gap-2 max-w-[80%] justify-end">
|
||||
{attachedFiles.map((file, index) => (
|
||||
<div
|
||||
key={file.id || index}
|
||||
className="flex items-center gap-2 px-3 py-2 bg-main-view-fg/5 rounded-md border border-main-view-fg/10 text-xs"
|
||||
>
|
||||
<svg
|
||||
className="w-4 h-4 text-main-view-fg/50"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke="currentColor"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth={2}
|
||||
d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"
|
||||
/>
|
||||
</svg>
|
||||
<span className="text-main-view-fg">{file.name}</span>
|
||||
{file.type && (
|
||||
<span className="text-main-view-fg/40 text-[10px]">
|
||||
.{file.type}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Render image attachments - below files */}
|
||||
{item.content?.some(
|
||||
(c) => (c.type === 'image_url' && c.image_url?.url) || false
|
||||
) && (
|
||||
@ -258,33 +312,9 @@ export const ThreadContent = memo(
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Render text content in the message bubble */}
|
||||
{item.content?.some((c) => c.type === 'text' && c.text?.value) && (
|
||||
<div className="flex justify-end w-full h-full text-start break-words whitespace-normal">
|
||||
<div className="bg-main-view-fg/4 relative text-main-view-fg p-2 rounded-md inline-block max-w-[80%] ">
|
||||
<div className="select-text">
|
||||
{item.content
|
||||
?.filter((c) => c.type === 'text' && c.text?.value)
|
||||
.map((contentPart, index) => (
|
||||
<div key={index}>
|
||||
<RenderMarkdown
|
||||
content={contentPart.text!.value}
|
||||
components={linkComponents}
|
||||
isUser
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex items-center justify-end gap-2 text-main-view-fg/60 text-xs mt-2">
|
||||
<EditMessageDialog
|
||||
message={
|
||||
item.content?.find((c) => c.type === 'text')?.text?.value ||
|
||||
''
|
||||
}
|
||||
message={cleanPrompt || ''}
|
||||
imageUrls={
|
||||
item.content
|
||||
?.filter((c) => c.type === 'image_url' && c.image_url?.url)
|
||||
|
||||
@ -37,6 +37,8 @@ import {
|
||||
import { useAssistant } from './useAssistant'
|
||||
import { useShallow } from 'zustand/shallow'
|
||||
import { TEMPORARY_CHAT_QUERY_ID, TEMPORARY_CHAT_ID } from '@/constants/chat'
|
||||
import { toast } from 'sonner'
|
||||
import { Attachment } from '@/types/attachment'
|
||||
|
||||
export const useChat = () => {
|
||||
const [
|
||||
@ -257,14 +259,12 @@ export const useChat = () => {
|
||||
async (
|
||||
message: string,
|
||||
troubleshooting = true,
|
||||
attachments?: Array<{
|
||||
name: string
|
||||
type: string
|
||||
size: number
|
||||
base64: string
|
||||
dataUrl: string
|
||||
}>,
|
||||
projectId?: string
|
||||
attachments?: Attachment[],
|
||||
projectId?: string,
|
||||
updateAttachmentProcessing?: (
|
||||
fileName: string,
|
||||
status: 'processing' | 'done' | 'error' | 'clear_docs' | 'clear_all'
|
||||
) => void
|
||||
) => {
|
||||
const activeThread = await getCurrentThread(projectId)
|
||||
const selectedProvider = useModelProvider.getState().selectedProvider
|
||||
@ -272,14 +272,124 @@ export const useChat = () => {
|
||||
|
||||
resetTokenSpeed()
|
||||
if (!activeThread || !activeProvider) return
|
||||
|
||||
// Separate images and documents
|
||||
const images = attachments?.filter((a) => a.type === 'image') || []
|
||||
const documents = attachments?.filter((a) => a.type === 'document') || []
|
||||
|
||||
// Process attachments BEFORE sending
|
||||
const processedAttachments: Attachment[] = []
|
||||
|
||||
// 1) Images ingestion (placeholder/no-op for now)
|
||||
// Track attachment ingestion; all must succeed before sending
|
||||
|
||||
if (images.length > 0) {
|
||||
for (const img of images) {
|
||||
try {
|
||||
// Skip if already processed (ingested in ChatInput when thread existed)
|
||||
if (img.processed && img.id) {
|
||||
processedAttachments.push(img)
|
||||
continue
|
||||
}
|
||||
|
||||
if (updateAttachmentProcessing) {
|
||||
updateAttachmentProcessing(img.name, 'processing')
|
||||
}
|
||||
// Upload image, get id/URL
|
||||
const res = await serviceHub.uploads().ingestImage(activeThread.id, img)
|
||||
processedAttachments.push({
|
||||
...img,
|
||||
id: res.id,
|
||||
processed: true,
|
||||
processing: false,
|
||||
})
|
||||
if (updateAttachmentProcessing) {
|
||||
updateAttachmentProcessing(img.name, 'done')
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to ingest image ${img.name}:`, err)
|
||||
if (updateAttachmentProcessing) {
|
||||
updateAttachmentProcessing(img.name, 'error')
|
||||
}
|
||||
const desc = err instanceof Error ? err.message : String(err)
|
||||
toast.error('Failed to ingest image attachment', { description: desc })
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (documents.length > 0) {
|
||||
try {
|
||||
for (const doc of documents) {
|
||||
// Skip if already processed (ingested in ChatInput when thread existed)
|
||||
if (doc.processed && doc.id) {
|
||||
processedAttachments.push(doc)
|
||||
continue
|
||||
}
|
||||
|
||||
// Update UI to show spinner on this file
|
||||
if (updateAttachmentProcessing) {
|
||||
updateAttachmentProcessing(doc.name, 'processing')
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await serviceHub
|
||||
.uploads()
|
||||
.ingestFileAttachment(activeThread.id, doc)
|
||||
|
||||
// Add processed document with ID
|
||||
processedAttachments.push({
|
||||
...doc,
|
||||
id: res.id,
|
||||
size: res.size ?? doc.size,
|
||||
chunkCount: res.chunkCount ?? doc.chunkCount,
|
||||
processing: false,
|
||||
processed: true,
|
||||
})
|
||||
|
||||
// Update UI to show done state
|
||||
if (updateAttachmentProcessing) {
|
||||
updateAttachmentProcessing(doc.name, 'done')
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to ingest ${doc.name}:`, err)
|
||||
if (updateAttachmentProcessing) {
|
||||
updateAttachmentProcessing(doc.name, 'error')
|
||||
}
|
||||
throw err // Re-throw to handle in outer catch
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to ingest documents:', err)
|
||||
const desc = err instanceof Error ? err.message : String(err)
|
||||
toast.error('Failed to index attachments', { description: desc })
|
||||
// Don't continue with message send if ingestion failed
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// All attachments prepared successfully
|
||||
|
||||
const messages = getMessages(activeThread.id)
|
||||
const abortController = new AbortController()
|
||||
setAbortController(activeThread.id, abortController)
|
||||
updateStreamingContent(emptyThreadContent)
|
||||
updatePromptProgress(undefined)
|
||||
// Do not add new message on retry
|
||||
if (troubleshooting)
|
||||
addMessage(newUserThreadContent(activeThread.id, message, attachments))
|
||||
// All attachments (images + docs) ingested successfully.
|
||||
// Build the user content once; use it for both the outbound request
|
||||
// and persisting to the store so both are identical.
|
||||
if (updateAttachmentProcessing) {
|
||||
updateAttachmentProcessing('__CLEAR_ALL__' as any, 'clear_all')
|
||||
}
|
||||
const userContent = newUserThreadContent(
|
||||
activeThread.id,
|
||||
message,
|
||||
processedAttachments
|
||||
)
|
||||
if (troubleshooting) {
|
||||
addMessage(userContent)
|
||||
}
|
||||
updateThreadTimestamp(activeThread.id)
|
||||
usePrompt.getState().setPrompt('')
|
||||
const selectedModel = useModelProvider.getState().selectedModel
|
||||
@ -296,7 +406,8 @@ export const useChat = () => {
|
||||
? renderInstructions(currentAssistant.instructions)
|
||||
: undefined
|
||||
)
|
||||
if (troubleshooting) builder.addUserMessage(message, attachments)
|
||||
// Using addUserMessage to respect legacy code. Should be using the userContent above.
|
||||
if (troubleshooting) builder.addUserMessage(userContent)
|
||||
|
||||
let isCompleted = false
|
||||
|
||||
|
||||
@ -137,7 +137,9 @@ describe('CompletionMessagesBuilder', () => {
|
||||
it('should add user message to messages array', () => {
|
||||
const builder = new CompletionMessagesBuilder([])
|
||||
|
||||
builder.addUserMessage('Hello, how are you?')
|
||||
builder.addUserMessage(
|
||||
createMockThreadMessage('user', 'Hello, how are you?')
|
||||
)
|
||||
|
||||
const result = builder.getMessages()
|
||||
expect(result).toHaveLength(1)
|
||||
@ -150,8 +152,8 @@ describe('CompletionMessagesBuilder', () => {
|
||||
it('should not add consecutive user messages', () => {
|
||||
const builder = new CompletionMessagesBuilder([])
|
||||
|
||||
builder.addUserMessage('First message')
|
||||
builder.addUserMessage('Second message')
|
||||
builder.addUserMessage(createMockThreadMessage('user', 'First message'))
|
||||
builder.addUserMessage(createMockThreadMessage('user', 'Second message'))
|
||||
|
||||
const result = builder.getMessages()
|
||||
expect(result).toHaveLength(1)
|
||||
@ -161,7 +163,7 @@ describe('CompletionMessagesBuilder', () => {
|
||||
it('should handle empty user message', () => {
|
||||
const builder = new CompletionMessagesBuilder([])
|
||||
|
||||
builder.addUserMessage('')
|
||||
builder.addUserMessage(createMockThreadMessage('user', ''))
|
||||
|
||||
const result = builder.getMessages()
|
||||
expect(result).toHaveLength(1)
|
||||
@ -338,7 +340,7 @@ describe('CompletionMessagesBuilder', () => {
|
||||
'You are helpful'
|
||||
)
|
||||
|
||||
builder.addUserMessage('How are you?')
|
||||
builder.addUserMessage(createMockThreadMessage('user', 'How are you?'))
|
||||
builder.addAssistantMessage('I am well, thank you!')
|
||||
builder.addToolMessage('Tool response', 'call_123')
|
||||
|
||||
@ -353,7 +355,7 @@ describe('CompletionMessagesBuilder', () => {
|
||||
it('should return the same array reference (not immutable)', () => {
|
||||
const builder = new CompletionMessagesBuilder([])
|
||||
|
||||
builder.addUserMessage('Test message')
|
||||
builder.addUserMessage(createMockThreadMessage('user', 'Test message'))
|
||||
const result1 = builder.getMessages()
|
||||
|
||||
builder.addAssistantMessage('Response')
|
||||
|
||||
@ -36,6 +36,8 @@ import { CompletionMessagesBuilder } from './messages'
|
||||
import { ChatCompletionMessageToolCall } from 'openai/resources'
|
||||
import { ExtensionManager } from './extension'
|
||||
import { useAppState } from '@/hooks/useAppState'
|
||||
import { injectFilesIntoPrompt } from './fileMetadata'
|
||||
import { Attachment } from '@/types/attachment'
|
||||
|
||||
export type ChatCompletionResponse =
|
||||
| chatCompletion
|
||||
@ -54,38 +56,48 @@ export type ChatCompletionResponse =
|
||||
export const newUserThreadContent = (
|
||||
threadId: string,
|
||||
content: string,
|
||||
attachments?: Array<{
|
||||
name: string
|
||||
type: string
|
||||
size: number
|
||||
base64: string
|
||||
dataUrl: string
|
||||
}>
|
||||
attachments?: Attachment[]
|
||||
): ThreadMessage => {
|
||||
// Separate images and documents
|
||||
const images = attachments?.filter((a) => a.type === 'image') || []
|
||||
const documents = attachments?.filter((a) => a.type === 'document') || []
|
||||
|
||||
// Inject document metadata into the text content (id, name, fileType only - no path)
|
||||
const docMetadata = documents
|
||||
.filter((doc) => doc.id) // Only include processed documents
|
||||
.map((doc) => ({
|
||||
id: doc.id!,
|
||||
name: doc.name,
|
||||
type: doc.fileType,
|
||||
size: typeof doc.size === 'number' ? doc.size : undefined,
|
||||
chunkCount: typeof doc.chunkCount === 'number' ? doc.chunkCount : undefined,
|
||||
}))
|
||||
|
||||
const textWithFiles =
|
||||
docMetadata.length > 0 ? injectFilesIntoPrompt(content, docMetadata) : content
|
||||
|
||||
const contentParts = [
|
||||
{
|
||||
type: ContentType.Text,
|
||||
text: {
|
||||
value: content,
|
||||
value: textWithFiles,
|
||||
annotations: [],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
// Add attachments to content array
|
||||
if (attachments) {
|
||||
attachments.forEach((attachment) => {
|
||||
if (attachment.type.startsWith('image/')) {
|
||||
// Add image attachments to content array
|
||||
images.forEach((img) => {
|
||||
if (img.base64 && img.mimeType) {
|
||||
contentParts.push({
|
||||
type: ContentType.Image,
|
||||
image_url: {
|
||||
url: `data:${attachment.type};base64,${attachment.base64}`,
|
||||
url: `data:${img.mimeType};base64,${img.base64}`,
|
||||
detail: 'auto',
|
||||
},
|
||||
} as any)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'text',
|
||||
|
||||
96
web-app/src/lib/fileMetadata.ts
Normal file
96
web-app/src/lib/fileMetadata.ts
Normal file
@ -0,0 +1,96 @@
|
||||
/**
|
||||
* Utility functions for embedding and extracting file metadata from user prompts
|
||||
*/
|
||||
|
||||
export interface FileMetadata {
|
||||
id: string
|
||||
name: string
|
||||
type?: string
|
||||
size?: number
|
||||
chunkCount?: number
|
||||
}
|
||||
|
||||
const FILE_METADATA_START = '[ATTACHED_FILES]'
|
||||
const FILE_METADATA_END = '[/ATTACHED_FILES]'
|
||||
|
||||
/**
|
||||
* Inject file metadata into user prompt at the end
|
||||
* @param prompt - The user's message
|
||||
* @param files - Array of file metadata
|
||||
* @returns Prompt with embedded file metadata
|
||||
*/
|
||||
export function injectFilesIntoPrompt(
|
||||
prompt: string,
|
||||
files: FileMetadata[]
|
||||
): string {
|
||||
if (!files || files.length === 0) return prompt
|
||||
|
||||
const fileLines = files
|
||||
.map((file) => {
|
||||
const parts = [`file_id: ${file.id}`, `name: ${file.name}`]
|
||||
if (file.type) parts.push(`type: ${file.type}`)
|
||||
if (typeof file.size === 'number') parts.push(`size: ${file.size}`)
|
||||
if (typeof file.chunkCount === 'number') parts.push(`chunks: ${file.chunkCount}`)
|
||||
return `- ${parts.join(', ')}`
|
||||
})
|
||||
.join('\n')
|
||||
|
||||
const fileBlock = `\n\n${FILE_METADATA_START}\n${fileLines}\n${FILE_METADATA_END}`
|
||||
|
||||
return prompt + fileBlock
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract file metadata from user prompt
|
||||
* @param prompt - The prompt potentially containing file metadata
|
||||
* @returns Object containing extracted files and clean prompt
|
||||
*/
|
||||
export function extractFilesFromPrompt(prompt: string): {
|
||||
files: FileMetadata[]
|
||||
cleanPrompt: string
|
||||
} {
|
||||
if (!prompt.includes(FILE_METADATA_START)) {
|
||||
return { files: [], cleanPrompt: prompt }
|
||||
}
|
||||
|
||||
const startIndex = prompt.indexOf(FILE_METADATA_START)
|
||||
const endIndex = prompt.indexOf(FILE_METADATA_END)
|
||||
|
||||
if (startIndex === -1 || endIndex === -1 || endIndex <= startIndex) {
|
||||
return { files: [], cleanPrompt: prompt }
|
||||
}
|
||||
|
||||
// Extract the file metadata block
|
||||
const fileBlock = prompt.substring(
|
||||
startIndex + FILE_METADATA_START.length,
|
||||
endIndex
|
||||
)
|
||||
|
||||
// Parse file metadata (flexible key:value parser)
|
||||
const files: FileMetadata[] = []
|
||||
const lines = fileBlock.trim().split('\n')
|
||||
for (const line of lines) {
|
||||
const trimmed = line.replace(/^\s*-\s*/, '').trim()
|
||||
const parts = trimmed.split(',')
|
||||
const map: Record<string, string> = {}
|
||||
for (const part of parts) {
|
||||
const [k, ...rest] = part.split(':')
|
||||
if (!k || rest.length === 0) continue
|
||||
map[k.trim()] = rest.join(':').trim()
|
||||
}
|
||||
const id = map['file_id']
|
||||
const name = map['name']
|
||||
if (!id || !name) continue
|
||||
const type = map['type']
|
||||
const size = map['size'] ? Number(map['size']) : undefined
|
||||
const chunkCount = map['chunks'] ? Number(map['chunks']) : undefined
|
||||
files.push({ id, name, ...(type && { type }), ...(typeof size === 'number' && !Number.isNaN(size) ? { size } : {}), ...(typeof chunkCount === 'number' && !Number.isNaN(chunkCount) ? { chunkCount } : {}) })
|
||||
}
|
||||
|
||||
// Extract clean prompt (everything before [ATTACHED_FILES])
|
||||
const cleanPrompt = prompt
|
||||
.substring(0, startIndex)
|
||||
.trim()
|
||||
|
||||
return { files, cleanPrompt }
|
||||
}
|
||||
@ -3,6 +3,7 @@ import { ChatCompletionMessageParam } from 'token.js'
|
||||
import { ChatCompletionMessageToolCall } from 'openai/resources'
|
||||
import { ThreadMessage } from '@janhq/core'
|
||||
import { removeReasoningContent } from '@/utils/reasoning'
|
||||
// Attachments are now handled upstream in newUserThreadContent
|
||||
|
||||
/**
|
||||
* @fileoverview Helper functions for creating chat completion request.
|
||||
@ -21,106 +22,62 @@ export class CompletionMessagesBuilder {
|
||||
this.messages.push(
|
||||
...messages
|
||||
.filter((e) => !e.metadata?.error)
|
||||
.map<ChatCompletionMessageParam>((msg) => {
|
||||
if (msg.role === 'assistant') {
|
||||
return {
|
||||
role: msg.role,
|
||||
content: removeReasoningContent(
|
||||
msg.content[0]?.text?.value || '.'
|
||||
),
|
||||
} as ChatCompletionMessageParam
|
||||
} else {
|
||||
// For user messages, handle multimodal content
|
||||
if (msg.content.length > 1) {
|
||||
// Multiple content parts (text + images + files)
|
||||
|
||||
const content = msg.content.map((contentPart) => {
|
||||
if (contentPart.type === 'text') {
|
||||
return {
|
||||
type: 'text',
|
||||
text: contentPart.text?.value || '',
|
||||
}
|
||||
} else if (contentPart.type === 'image_url') {
|
||||
return {
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: contentPart.image_url?.url || '',
|
||||
detail: contentPart.image_url?.detail || 'auto',
|
||||
},
|
||||
}
|
||||
} else {
|
||||
return contentPart
|
||||
}
|
||||
})
|
||||
return {
|
||||
role: msg.role,
|
||||
content,
|
||||
} as ChatCompletionMessageParam
|
||||
} else {
|
||||
// Single text content
|
||||
return {
|
||||
role: msg.role,
|
||||
content: msg.content[0]?.text?.value || '.',
|
||||
} as ChatCompletionMessageParam
|
||||
}
|
||||
}
|
||||
})
|
||||
.map<ChatCompletionMessageParam>((msg) => this.toCompletionParamFromThread(msg))
|
||||
)
|
||||
}
|
||||
|
||||
// Normalize a ThreadMessage into a ChatCompletionMessageParam for Token.js
|
||||
private toCompletionParamFromThread(msg: ThreadMessage): ChatCompletionMessageParam {
|
||||
if (msg.role === 'assistant') {
|
||||
return {
|
||||
role: 'assistant',
|
||||
content: removeReasoningContent(msg.content?.[0]?.text?.value || '.'),
|
||||
} as ChatCompletionMessageParam
|
||||
}
|
||||
|
||||
// System messages are uncommon here; normalize to plain text
|
||||
if (msg.role === 'system') {
|
||||
return {
|
||||
role: 'system',
|
||||
content: msg.content?.[0]?.text?.value || '.',
|
||||
} as ChatCompletionMessageParam
|
||||
}
|
||||
|
||||
// User messages: handle multimodal content
|
||||
if (Array.isArray(msg.content) && msg.content.length > 1) {
|
||||
const content = msg.content.map((part: any) => {
|
||||
if (part.type === 'text') {
|
||||
return { type: 'text', text: part.text?.value ?? '' }
|
||||
}
|
||||
if (part.type === 'image_url') {
|
||||
return {
|
||||
type: 'image_url',
|
||||
image_url: { url: part.image_url?.url || '', detail: part.image_url?.detail || 'auto' },
|
||||
}
|
||||
}
|
||||
return part
|
||||
})
|
||||
return { role: 'user', content } as any
|
||||
}
|
||||
// Single text part
|
||||
const text = msg?.content?.[0]?.text?.value ?? '.'
|
||||
return { role: 'user', content: text }
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a user message to the messages array.
|
||||
* @param content - The content of the user message.
|
||||
* @param attachments - Optional attachments for the message.
|
||||
* Add a user message to the messages array from a parsed ThreadMessage.
|
||||
* Upstream code should construct the message via newUserThreadContent
|
||||
* and pass it here to avoid duplicated logic.
|
||||
*/
|
||||
addUserMessage(
|
||||
content: string,
|
||||
attachments?: Array<{
|
||||
name: string
|
||||
type: string
|
||||
size: number
|
||||
base64: string
|
||||
dataUrl: string
|
||||
}>
|
||||
) {
|
||||
addUserMessage(message: ThreadMessage) {
|
||||
if (message.role !== 'user') {
|
||||
throw new Error('addUserMessage expects a user ThreadMessage')
|
||||
}
|
||||
// Ensure no consecutive user messages
|
||||
if (this.messages[this.messages.length - 1]?.role === 'user') {
|
||||
this.messages.pop()
|
||||
}
|
||||
|
||||
// Handle multimodal content with attachments
|
||||
if (attachments && attachments.length > 0) {
|
||||
const messageContent: any[] = [
|
||||
{
|
||||
type: 'text',
|
||||
text: content,
|
||||
},
|
||||
]
|
||||
|
||||
// Add attachments (images and PDFs)
|
||||
attachments.forEach((attachment) => {
|
||||
if (attachment.type.startsWith('image/')) {
|
||||
messageContent.push({
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: `data:${attachment.type};base64,${attachment.base64}`,
|
||||
detail: 'auto',
|
||||
},
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
this.messages.push({
|
||||
role: 'user',
|
||||
content: messageContent,
|
||||
} as any)
|
||||
} else {
|
||||
// Text-only message
|
||||
this.messages.push({
|
||||
role: 'user',
|
||||
content: content,
|
||||
})
|
||||
}
|
||||
this.messages.push(this.toCompletionParamFromThread(message))
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -29,6 +29,8 @@ import { DefaultDeepLinkService } from './deeplink/default'
|
||||
import { DefaultProjectsService } from './projects/default'
|
||||
import { DefaultRAGService } from './rag/default'
|
||||
import type { RAGService } from './rag/types'
|
||||
import { DefaultUploadsService } from './uploads/default'
|
||||
import type { UploadsService } from './uploads/types'
|
||||
|
||||
// Import service types
|
||||
import type { ThemeService } from './theme/types'
|
||||
@ -73,6 +75,7 @@ export interface ServiceHub {
|
||||
deeplink(): DeepLinkService
|
||||
projects(): ProjectsService
|
||||
rag(): RAGService
|
||||
uploads(): UploadsService
|
||||
}
|
||||
|
||||
class PlatformServiceHub implements ServiceHub {
|
||||
@ -96,6 +99,7 @@ class PlatformServiceHub implements ServiceHub {
|
||||
private deepLinkService: DeepLinkService = new DefaultDeepLinkService()
|
||||
private projectsService: ProjectsService = new DefaultProjectsService()
|
||||
private ragService: RAGService = new DefaultRAGService()
|
||||
private uploadsService: UploadsService = new DefaultUploadsService()
|
||||
private initialized = false
|
||||
|
||||
/**
|
||||
@ -352,6 +356,11 @@ class PlatformServiceHub implements ServiceHub {
|
||||
this.ensureInitialized()
|
||||
return this.ragService
|
||||
}
|
||||
|
||||
uploads(): UploadsService {
|
||||
this.ensureInitialized()
|
||||
return this.uploadsService
|
||||
}
|
||||
}
|
||||
|
||||
export async function initializeServiceHub(): Promise<ServiceHub> {
|
||||
|
||||
32
web-app/src/services/uploads/default.ts
Normal file
32
web-app/src/services/uploads/default.ts
Normal file
@ -0,0 +1,32 @@
|
||||
import type { UploadsService, UploadResult } from './types'
|
||||
import type { Attachment, } from '@/types/attachment'
|
||||
import { ulid } from 'ulidx'
|
||||
import { ExtensionManager } from '@/lib/extension'
|
||||
import { ExtensionTypeEnum, type RAGExtension, type IngestAttachmentsResult } from '@janhq/core'
|
||||
|
||||
export class DefaultUploadsService implements UploadsService {
|
||||
async ingestImage(_threadId: string, attachment: Attachment): Promise<UploadResult> {
|
||||
if (attachment.type !== 'image') throw new Error('ingestImage: attachment is not image')
|
||||
// Placeholder upload flow; swap for real API call when backend is ready
|
||||
await new Promise((r) => setTimeout(r, 100))
|
||||
return { id: ulid() }
|
||||
}
|
||||
|
||||
async ingestFileAttachment(threadId: string, attachment: Attachment): Promise<UploadResult> {
|
||||
if (attachment.type !== 'document') throw new Error('ingestFileAttachment: attachment is not document')
|
||||
const ext = ExtensionManager.getInstance().get<RAGExtension>(ExtensionTypeEnum.RAG)
|
||||
if (!ext?.ingestAttachments) throw new Error('RAG extension not available')
|
||||
const res: IngestAttachmentsResult = await ext.ingestAttachments(threadId, [
|
||||
{ path: attachment.path!, name: attachment.name, type: attachment.fileType, size: attachment.size },
|
||||
])
|
||||
const files = res.files
|
||||
if (Array.isArray(files) && files[0]?.id) {
|
||||
return {
|
||||
id: files[0].id,
|
||||
size: typeof files[0].size === 'number' ? Number(files[0].size) : undefined,
|
||||
chunkCount: typeof files[0].chunk_count === 'number' ? Number(files[0].chunk_count) : undefined,
|
||||
}
|
||||
}
|
||||
throw new Error('Failed to resolve ingested attachment id')
|
||||
}
|
||||
}
|
||||
16
web-app/src/services/uploads/types.ts
Normal file
16
web-app/src/services/uploads/types.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import type { Attachment } from '@/types/attachment'
|
||||
|
||||
export type UploadResult = {
|
||||
id: string
|
||||
url?: string
|
||||
size?: number
|
||||
chunkCount?: number
|
||||
}
|
||||
|
||||
export interface UploadsService {
|
||||
// Ingest an image attachment (placeholder upload)
|
||||
ingestImage(threadId: string, attachment: Attachment): Promise<UploadResult>
|
||||
|
||||
// Ingest a document attachment in the context of a thread
|
||||
ingestFileAttachment(threadId: string, attachment: Attachment): Promise<UploadResult>
|
||||
}
|
||||
57
web-app/src/types/attachment.ts
Normal file
57
web-app/src/types/attachment.ts
Normal file
@ -0,0 +1,57 @@
|
||||
/**
|
||||
* Unified attachment type for both images and documents
|
||||
*/
|
||||
export type Attachment = {
|
||||
name: string
|
||||
type: 'image' | 'document'
|
||||
|
||||
// Common fields
|
||||
size?: number
|
||||
chunkCount?: number
|
||||
processing?: boolean
|
||||
processed?: boolean
|
||||
error?: string
|
||||
|
||||
// For images (before upload)
|
||||
base64?: string
|
||||
dataUrl?: string
|
||||
mimeType?: string
|
||||
|
||||
// For documents (local files)
|
||||
path?: string
|
||||
fileType?: string // e.g., 'pdf', 'docx'
|
||||
|
||||
// After processing (images uploaded, documents ingested)
|
||||
id?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create image attachment
|
||||
*/
|
||||
export function createImageAttachment(data: {
|
||||
name: string
|
||||
base64: string
|
||||
dataUrl: string
|
||||
mimeType: string
|
||||
size: number
|
||||
}): Attachment {
|
||||
return {
|
||||
...data,
|
||||
type: 'image',
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create document attachment
|
||||
*/
|
||||
export function createDocumentAttachment(data: {
|
||||
name: string
|
||||
path: string
|
||||
fileType?: string
|
||||
size?: number
|
||||
}): Attachment {
|
||||
return {
|
||||
...data,
|
||||
type: 'document',
|
||||
}
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user