fix: upload document mid-thread does not work (#2504)
This commit is contained in:
parent
934ea00b0d
commit
7857a6e75e
@ -1,5 +1,5 @@
|
|||||||
import { getJanDataFolderPath, normalizeFilePath } from '@janhq/core/node'
|
import { getJanDataFolderPath, normalizeFilePath } from '@janhq/core/node'
|
||||||
import { retrieval } from './tools/retrieval'
|
import { retrieval } from './retrieval'
|
||||||
import path from 'path'
|
import path from 'path'
|
||||||
|
|
||||||
export function toolRetrievalUpdateTextSplitter(
|
export function toolRetrievalUpdateTextSplitter(
|
||||||
|
|||||||
@ -5,7 +5,7 @@ import { PDFLoader } from 'langchain/document_loaders/fs/pdf'
|
|||||||
import { HNSWLib } from 'langchain/vectorstores/hnswlib'
|
import { HNSWLib } from 'langchain/vectorstores/hnswlib'
|
||||||
|
|
||||||
import { OpenAIEmbeddings } from 'langchain/embeddings/openai'
|
import { OpenAIEmbeddings } from 'langchain/embeddings/openai'
|
||||||
import { readEmbeddingEngine } from '../../engine'
|
import { readEmbeddingEngine } from './engine'
|
||||||
|
|
||||||
export class Retrieval {
|
export class Retrieval {
|
||||||
public chunkSize: number = 100
|
public chunkSize: number = 100
|
||||||
@ -18,7 +18,7 @@ export class RetrievalTool extends InferenceTool {
|
|||||||
tool?: AssistantTool
|
tool?: AssistantTool
|
||||||
): Promise<MessageRequest> {
|
): Promise<MessageRequest> {
|
||||||
if (!data.model || !data.messages) {
|
if (!data.model || !data.messages) {
|
||||||
return Promise.resolve(data)
|
return Promise.resolve(this.normalize(data))
|
||||||
}
|
}
|
||||||
|
|
||||||
const latestMessage = data.messages[data.messages.length - 1]
|
const latestMessage = data.messages[data.messages.length - 1]
|
||||||
@ -48,7 +48,7 @@ export class RetrievalTool extends InferenceTool {
|
|||||||
) {
|
) {
|
||||||
// No document ingested, reroute the result to inference engine
|
// No document ingested, reroute the result to inference engine
|
||||||
|
|
||||||
return Promise.resolve(data)
|
return Promise.resolve(this.normalize(data))
|
||||||
}
|
}
|
||||||
// 2. Load agent on thread changed
|
// 2. Load agent on thread changed
|
||||||
if (this.retrievalThreadId !== data.threadId) {
|
if (this.retrievalThreadId !== data.threadId) {
|
||||||
@ -87,8 +87,14 @@ export class RetrievalTool extends InferenceTool {
|
|||||||
.replace('{QUESTION}', prompt)
|
.replace('{QUESTION}', prompt)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filter out all the messages that are not text
|
// 4. Reroute the result to inference engine
|
||||||
data.messages = data.messages.map((message) => {
|
return Promise.resolve(this.normalize(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter out all the messages that are not text
|
||||||
|
// TODO: Remove it until engines can handle multiple content types
|
||||||
|
normalize(request: MessageRequest): MessageRequest {
|
||||||
|
request.messages = request.messages?.map((message) => {
|
||||||
if (
|
if (
|
||||||
message.content &&
|
message.content &&
|
||||||
typeof message.content !== 'string' &&
|
typeof message.content !== 'string' &&
|
||||||
@ -101,8 +107,6 @@ export class RetrievalTool extends InferenceTool {
|
|||||||
}
|
}
|
||||||
return message
|
return message
|
||||||
})
|
})
|
||||||
|
return request
|
||||||
// 4. Reroute the result to inference engine
|
|
||||||
return Promise.resolve(data)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@janhq/model-extension",
|
"name": "@janhq/model-extension",
|
||||||
"version": "1.0.27",
|
"version": "1.0.28",
|
||||||
"description": "Model Management Extension provides model exploration and seamless downloads",
|
"description": "Model Management Extension provides model exploration and seamless downloads",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"module": "dist/module.js",
|
"module": "dist/module.js",
|
||||||
|
|||||||
@ -10,7 +10,9 @@
|
|||||||
"version": "1.0",
|
"version": "1.0",
|
||||||
"description": "Groq Llama 2 70b with supercharged speed!",
|
"description": "Groq Llama 2 70b with supercharged speed!",
|
||||||
"format": "api",
|
"format": "api",
|
||||||
"settings": {},
|
"settings": {
|
||||||
|
"text_model": false
|
||||||
|
},
|
||||||
"parameters": {
|
"parameters": {
|
||||||
"max_tokens": 4096,
|
"max_tokens": 4096,
|
||||||
"temperature": 0.7,
|
"temperature": 0.7,
|
||||||
|
|||||||
@ -10,7 +10,9 @@
|
|||||||
"version": "1.0",
|
"version": "1.0",
|
||||||
"description": "Groq Mixtral 8x7b Instruct is Mixtral with supercharged speed!",
|
"description": "Groq Mixtral 8x7b Instruct is Mixtral with supercharged speed!",
|
||||||
"format": "api",
|
"format": "api",
|
||||||
"settings": {},
|
"settings": {
|
||||||
|
"text_model": false
|
||||||
|
},
|
||||||
"parameters": {
|
"parameters": {
|
||||||
"max_tokens": 4096,
|
"max_tokens": 4096,
|
||||||
"temperature": 0.7,
|
"temperature": 0.7,
|
||||||
|
|||||||
@ -91,7 +91,7 @@ export class MessageRequestBuilder {
|
|||||||
},
|
},
|
||||||
] as ChatCompletionMessageContent,
|
] as ChatCompletionMessageContent,
|
||||||
}
|
}
|
||||||
this.messages = [message, ...this.messages]
|
this.messages = [...this.messages, message]
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -113,7 +113,7 @@ export class MessageRequestBuilder {
|
|||||||
] as ChatCompletionMessageContent,
|
] as ChatCompletionMessageContent,
|
||||||
}
|
}
|
||||||
|
|
||||||
this.messages = [message, ...this.messages]
|
this.messages = [...this.messages, message]
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user