diff --git a/core/src/browser/extensions/engines/AIEngine.ts b/core/src/browser/extensions/engines/AIEngine.ts index 2d1bdb3c2..6616208ad 100644 --- a/core/src/browser/extensions/engines/AIEngine.ts +++ b/core/src/browser/extensions/engines/AIEngine.ts @@ -40,12 +40,13 @@ export abstract class AIEngine extends BaseExtension { * Stops the model. */ async unloadModel(model?: Model): Promise { - if (model?.engine && model.engine.toString() !== this.provider) return Promise.resolve() + if (model?.engine && model.engine.toString() !== this.provider) + return Promise.resolve() events.emit(ModelEvent.OnModelStopped, model ?? {}) return Promise.resolve() } - /* + /** * Inference request */ inference(data: MessageRequest) {} diff --git a/core/src/browser/extensions/engines/OAIEngine.ts b/core/src/browser/extensions/engines/OAIEngine.ts index 61032357c..50940a97b 100644 --- a/core/src/browser/extensions/engines/OAIEngine.ts +++ b/core/src/browser/extensions/engines/OAIEngine.ts @@ -76,7 +76,7 @@ export abstract class OAIEngine extends AIEngine { const timestamp = Date.now() / 1000 const message: ThreadMessage = { id: ulid(), - thread_id: data.threadId, + thread_id: data.thread?.id ?? data.threadId, type: data.type, assistant_id: data.assistantId, role: ChatCompletionRole.Assistant, @@ -104,6 +104,7 @@ export abstract class OAIEngine extends AIEngine { messages: data.messages ?? [], model: model.id, stream: true, + tools: data.tools, ...model.parameters, } if (this.transformPayload) { diff --git a/core/src/browser/index.ts b/core/src/browser/index.ts index a6ce187ca..5912d8c3b 100644 --- a/core/src/browser/index.ts +++ b/core/src/browser/index.ts @@ -28,12 +28,6 @@ export * from './extension' */ export * from './extensions' -/** - * Export all base tools. - * @module - */ -export * from './tools' - /** * Export all base models. * @module diff --git a/core/src/browser/tools/index.test.ts b/core/src/browser/tools/index.test.ts deleted file mode 100644 index 8a24d3bb6..000000000 --- a/core/src/browser/tools/index.test.ts +++ /dev/null @@ -1,5 +0,0 @@ - - -it('should not throw any errors when imported', () => { - expect(() => require('./index')).not.toThrow(); -}) diff --git a/core/src/browser/tools/index.ts b/core/src/browser/tools/index.ts deleted file mode 100644 index 24cd12780..000000000 --- a/core/src/browser/tools/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from './manager' -export * from './tool' diff --git a/core/src/browser/tools/manager.ts b/core/src/browser/tools/manager.ts deleted file mode 100644 index b323ad7ce..000000000 --- a/core/src/browser/tools/manager.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { AssistantTool, MessageRequest } from '../../types' -import { InferenceTool } from './tool' - -/** - * Manages the registration and retrieval of inference tools. - */ -export class ToolManager { - public tools = new Map() - - /** - * Registers a tool. - * @param tool - The tool to register. - */ - register(tool: T) { - this.tools.set(tool.name, tool) - } - - /** - * Retrieves a tool by it's name. - * @param name - The name of the tool to retrieve. - * @returns The tool, if found. - */ - get(name: string): T | undefined { - return this.tools.get(name) as T | undefined - } - - /* - ** Process the message request with the tools. - */ - process(request: MessageRequest, tools: AssistantTool[]): Promise { - return tools.reduce((prevPromise, currentTool) => { - return prevPromise.then((prevResult) => { - return currentTool.enabled - ? this.get(currentTool.type)?.process(prevResult, currentTool) ?? - Promise.resolve(prevResult) - : Promise.resolve(prevResult) - }) - }, Promise.resolve(request)) - } - - /** - * The instance of the tool manager. - */ - static instance(): ToolManager { - return (window.core?.toolManager as ToolManager) ?? new ToolManager() - } -} diff --git a/core/src/browser/tools/tool.test.ts b/core/src/browser/tools/tool.test.ts deleted file mode 100644 index dcb478478..000000000 --- a/core/src/browser/tools/tool.test.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { ToolManager } from '../../browser/tools/manager' -import { InferenceTool } from '../../browser/tools/tool' -import { AssistantTool, MessageRequest } from '../../types' - -class MockInferenceTool implements InferenceTool { - name = 'mockTool' - process(request: MessageRequest, tool: AssistantTool): Promise { - return Promise.resolve(request) - } -} - -it('should register a tool', () => { - const manager = new ToolManager() - const tool = new MockInferenceTool() - manager.register(tool) - expect(manager.get(tool.name)).toBe(tool) -}) - -it('should retrieve a tool by its name', () => { - const manager = new ToolManager() - const tool = new MockInferenceTool() - manager.register(tool) - const retrievedTool = manager.get(tool.name) - expect(retrievedTool).toBe(tool) -}) - -it('should return undefined for a non-existent tool', () => { - const manager = new ToolManager() - const retrievedTool = manager.get('nonExistentTool') - expect(retrievedTool).toBeUndefined() -}) - -it('should process the message request with enabled tools', async () => { - const manager = new ToolManager() - const tool = new MockInferenceTool() - manager.register(tool) - - const request: MessageRequest = { message: 'test' } as any - const tools: AssistantTool[] = [{ type: 'mockTool', enabled: true }] as any - - const result = await manager.process(request, tools) - expect(result).toBe(request) -}) - -it('should skip processing for disabled tools', async () => { - const manager = new ToolManager() - const tool = new MockInferenceTool() - manager.register(tool) - - const request: MessageRequest = { message: 'test' } as any - const tools: AssistantTool[] = [{ type: 'mockTool', enabled: false }] as any - - const result = await manager.process(request, tools) - expect(result).toBe(request) -}) - -it('should throw an error when process is called without implementation', () => { - class TestTool extends InferenceTool { - name = 'testTool' - } - const tool = new TestTool() - expect(() => tool.process({} as MessageRequest)).toThrowError() -}) diff --git a/core/src/browser/tools/tool.ts b/core/src/browser/tools/tool.ts deleted file mode 100644 index 0fd342933..000000000 --- a/core/src/browser/tools/tool.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { AssistantTool, MessageRequest } from '../../types' - -/** - * Represents a base inference tool. - */ -export abstract class InferenceTool { - abstract name: string - /* - ** Process a message request and return the processed message request. - */ - abstract process(request: MessageRequest, tool?: AssistantTool): Promise -} diff --git a/core/src/types/message/messageEntity.ts b/core/src/types/message/messageEntity.ts index edd253a57..280ce75a3 100644 --- a/core/src/types/message/messageEntity.ts +++ b/core/src/types/message/messageEntity.ts @@ -43,6 +43,9 @@ export type ThreadMessage = { * @data_transfer_object */ export type MessageRequest = { + /** + * The id of the message request. + */ id?: string /** @@ -71,6 +74,11 @@ export type MessageRequest = { // TODO: deprecate threadId field thread?: Thread + /** + * ChatCompletion tools + */ + tools?: MessageTool[] + /** Engine name to process */ engine?: string @@ -78,6 +86,24 @@ export type MessageRequest = { type?: string } +/** + * ChatCompletion Tool parameters + */ +export type MessageTool = { + type: string + function: MessageFunction +} + +/** + * ChatCompletion Tool's function parameters + */ +export type MessageFunction = { + name: string + description?: string + parameters?: Record + strict?: boolean +} + /** * The status of the message. * @data_transfer_object diff --git a/extensions/assistant-extension/package.json b/extensions/assistant-extension/package.json index 08ccb3b3d..4761aa900 100644 --- a/extensions/assistant-extension/package.json +++ b/extensions/assistant-extension/package.json @@ -8,17 +8,10 @@ "author": "Jan ", "license": "AGPL-3.0", "scripts": { - "clean:modules": "rimraf node_modules/pdf-parse/test && cd node_modules/pdf-parse/lib/pdf.js && rimraf v1.9.426 v1.10.88 v2.0.550", - "build-universal-hnswlib": "[ \"$IS_TEST\" = \"true\" ] && echo \"Skip universal build\" || (cd node_modules/hnswlib-node && arch -x86_64 npx node-gyp rebuild --arch=x64 && mv build/Release/addon.node ./addon-amd64.node && node-gyp rebuild --arch=arm64 && mv build/Release/addon.node ./addon-arm64.node && lipo -create -output build/Release/addon.node ./addon-arm64.node ./addon-amd64.node && rm ./addon-arm64.node && rm ./addon-amd64.node)", - "build": "yarn clean:modules && rolldown -c rolldown.config.mjs", - "build:publish:linux": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install", - "build:publish:darwin": "rimraf *.tgz --glob || true && yarn build-universal-hnswlib && yarn build && ../../.github/scripts/auto-sign.sh && npm pack && cpx *.tgz ../../pre-install", - "build:publish:win32": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install", - "build:publish": "run-script-os", - "build:dev": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install" + "build": "rolldown -c rolldown.config.mjs", + "build:publish": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install" }, "devDependencies": { - "@types/pdf-parse": "^1.1.4", "cpx": "^1.5.0", "rimraf": "^3.0.2", "rolldown": "1.0.0-beta.1", @@ -27,11 +20,6 @@ }, "dependencies": { "@janhq/core": "../../core/package.tgz", - "@langchain/community": "0.0.13", - "hnswlib-node": "^1.4.2", - "langchain": "^0.0.214", - "node-gyp": "^11.0.0", - "pdf-parse": "^1.1.1", "ts-loader": "^9.5.0" }, "files": [ @@ -40,8 +28,7 @@ "README.md" ], "bundleDependencies": [ - "@janhq/core", - "hnswlib-node" + "@janhq/core" ], "installConfig": { "hoistingLimits": "workspaces" diff --git a/extensions/assistant-extension/rolldown.config.mjs b/extensions/assistant-extension/rolldown.config.mjs index e549ea7d9..436de93a8 100644 --- a/extensions/assistant-extension/rolldown.config.mjs +++ b/extensions/assistant-extension/rolldown.config.mjs @@ -13,22 +13,5 @@ export default defineConfig([ NODE: JSON.stringify(`${pkgJson.name}/${pkgJson.node}`), VERSION: JSON.stringify(pkgJson.version), }, - }, - { - input: 'src/node/index.ts', - external: ['@janhq/core/node', 'path', 'hnswlib-node'], - output: { - format: 'cjs', - file: 'dist/node/index.js', - sourcemap: false, - inlineDynamicImports: true, - }, - resolve: { - extensions: ['.js', '.ts'], - }, - define: { - CORTEX_API_URL: JSON.stringify(`http://127.0.0.1:${process.env.CORTEX_API_PORT ?? "39291"}`), - }, - platform: 'node', - }, + } ]) diff --git a/extensions/assistant-extension/src/index.ts b/extensions/assistant-extension/src/index.ts index 0f9b52808..bb253bd7f 100644 --- a/extensions/assistant-extension/src/index.ts +++ b/extensions/assistant-extension/src/index.ts @@ -1,12 +1,7 @@ -import { Assistant, AssistantExtension, ToolManager } from '@janhq/core' -import { RetrievalTool } from './tools/retrieval' +import { Assistant, AssistantExtension } from '@janhq/core' export default class JanAssistantExtension extends AssistantExtension { - - async onLoad() { - // Register the retrieval tool - ToolManager.instance().register(new RetrievalTool()) - } + async onLoad() {} /** * Called when the extension is unloaded. diff --git a/extensions/assistant-extension/src/node/index.ts b/extensions/assistant-extension/src/node/index.ts deleted file mode 100644 index 731890b34..000000000 --- a/extensions/assistant-extension/src/node/index.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { getJanDataFolderPath } from '@janhq/core/node' -import { retrieval } from './retrieval' -import path from 'path' - -export function toolRetrievalUpdateTextSplitter( - chunkSize: number, - chunkOverlap: number -) { - retrieval.updateTextSplitter(chunkSize, chunkOverlap) -} -export async function toolRetrievalIngestNewDocument( - thread: string, - file: string, - model: string, - engine: string, - useTimeWeighted: boolean -) { - const threadPath = path.join(getJanDataFolderPath(), 'threads', thread) - const filePath = path.join(getJanDataFolderPath(), 'files', file) - retrieval.updateEmbeddingEngine(model, engine) - return retrieval - .ingestAgentKnowledge(filePath, `${threadPath}/memory`, useTimeWeighted) - .catch((err) => { - console.error(err) - }) -} - -export async function toolRetrievalLoadThreadMemory(threadId: string) { - return retrieval - .loadRetrievalAgent( - path.join(getJanDataFolderPath(), 'threads', threadId, 'memory') - ) - .catch((err) => { - console.error(err) - }) -} - -export async function toolRetrievalQueryResult( - query: string, - useTimeWeighted: boolean = false -) { - return retrieval.generateResult(query, useTimeWeighted).catch((err) => { - console.error(err) - }) -} diff --git a/extensions/assistant-extension/src/node/retrieval.ts b/extensions/assistant-extension/src/node/retrieval.ts deleted file mode 100644 index 0e80fd2d7..000000000 --- a/extensions/assistant-extension/src/node/retrieval.ts +++ /dev/null @@ -1,121 +0,0 @@ -import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter' -import { formatDocumentsAsString } from 'langchain/util/document' -import { PDFLoader } from 'langchain/document_loaders/fs/pdf' - -import { TimeWeightedVectorStoreRetriever } from 'langchain/retrievers/time_weighted' -import { MemoryVectorStore } from 'langchain/vectorstores/memory' - -import { HNSWLib } from 'langchain/vectorstores/hnswlib' - -import { OpenAIEmbeddings } from 'langchain/embeddings/openai' - -export class Retrieval { - public chunkSize: number = 100 - public chunkOverlap?: number = 0 - private retriever: any - - private embeddingModel?: OpenAIEmbeddings = undefined - private textSplitter?: RecursiveCharacterTextSplitter - - // to support time-weighted retrieval - private timeWeightedVectorStore: MemoryVectorStore - private timeWeightedretriever: any | TimeWeightedVectorStoreRetriever - - constructor(chunkSize: number = 4000, chunkOverlap: number = 200) { - this.updateTextSplitter(chunkSize, chunkOverlap) - this.initialize() - } - - private async initialize() { - const apiKey = await window.core?.api.appToken() - - // declare time-weighted retriever and storage - this.timeWeightedVectorStore = new MemoryVectorStore( - new OpenAIEmbeddings( - { openAIApiKey: apiKey }, - { basePath: `${CORTEX_API_URL}/v1` } - ) - ) - this.timeWeightedretriever = new TimeWeightedVectorStoreRetriever({ - vectorStore: this.timeWeightedVectorStore, - memoryStream: [], - searchKwargs: 2, - }) - } - - public updateTextSplitter(chunkSize: number, chunkOverlap: number): void { - this.chunkSize = chunkSize - this.chunkOverlap = chunkOverlap - this.textSplitter = new RecursiveCharacterTextSplitter({ - chunkSize: chunkSize, - chunkOverlap: chunkOverlap, - }) - } - - public async updateEmbeddingEngine(model: string, engine: string) { - const apiKey = await window.core?.api.appToken() - this.embeddingModel = new OpenAIEmbeddings( - { openAIApiKey: apiKey, model }, - // TODO: Raw settings - { basePath: `${CORTEX_API_URL}/v1` } - ) - - // update time-weighted embedding model - this.timeWeightedVectorStore.embeddings = this.embeddingModel - } - - public ingestAgentKnowledge = async ( - filePath: string, - memoryPath: string, - useTimeWeighted: boolean - ): Promise => { - const loader = new PDFLoader(filePath, { - splitPages: true, - }) - if (!this.embeddingModel) return Promise.reject() - const doc = await loader.load() - const docs = await this.textSplitter!.splitDocuments(doc) - const vectorStore = await HNSWLib.fromDocuments(docs, this.embeddingModel) - - // add documents with metadata by using the time-weighted retriever in order to support time-weighted retrieval - if (useTimeWeighted && this.timeWeightedretriever) { - await ( - this.timeWeightedretriever as TimeWeightedVectorStoreRetriever - ).addDocuments(docs) - } - return vectorStore.save(memoryPath) - } - - public loadRetrievalAgent = async (memoryPath: string): Promise => { - if (!this.embeddingModel) return Promise.reject() - const vectorStore = await HNSWLib.load(memoryPath, this.embeddingModel) - this.retriever = vectorStore.asRetriever(2) - return Promise.resolve() - } - - public generateResult = async ( - query: string, - useTimeWeighted: boolean - ): Promise => { - if (useTimeWeighted) { - if (!this.timeWeightedretriever) { - return Promise.resolve(' ') - } - // use invoke because getRelevantDocuments is deprecated - const relevantDocs = await this.timeWeightedretriever.invoke(query) - const serializedDoc = formatDocumentsAsString(relevantDocs) - return Promise.resolve(serializedDoc) - } - - if (!this.retriever) { - return Promise.resolve(' ') - } - - // should use invoke(query) because getRelevantDocuments is deprecated - const relevantDocs = await this.retriever.getRelevantDocuments(query) - const serializedDoc = formatDocumentsAsString(relevantDocs) - return Promise.resolve(serializedDoc) - } -} - -export const retrieval = new Retrieval() diff --git a/extensions/assistant-extension/src/tools/retrieval.ts b/extensions/assistant-extension/src/tools/retrieval.ts deleted file mode 100644 index b1a0c3cba..000000000 --- a/extensions/assistant-extension/src/tools/retrieval.ts +++ /dev/null @@ -1,118 +0,0 @@ -import { - AssistantTool, - executeOnMain, - fs, - InferenceTool, - joinPath, - MessageRequest, -} from '@janhq/core' - -export class RetrievalTool extends InferenceTool { - private _threadDir = 'file://threads' - private retrievalThreadId: string | undefined = undefined - - name: string = 'retrieval' - - async process( - data: MessageRequest, - tool?: AssistantTool - ): Promise { - if (!data.model || !data.messages) { - return Promise.resolve(data) - } - - const latestMessage = data.messages[data.messages.length - 1] - - // 1. Ingest the document if needed - if ( - latestMessage && - latestMessage.content && - typeof latestMessage.content !== 'string' && - latestMessage.content.length > 1 - ) { - const docFile = latestMessage.content[1]?.doc_url?.url - if (docFile) { - await executeOnMain( - NODE, - 'toolRetrievalIngestNewDocument', - data.thread?.id, - docFile, - data.model?.id, - data.model?.engine, - tool?.useTimeWeightedRetriever ?? false - ) - } else { - return Promise.resolve(data) - } - } else if ( - // Check whether we need to ingest document or not - // Otherwise wrong context will be sent - !(await fs.existsSync( - await joinPath([this._threadDir, data.threadId, 'memory']) - )) - ) { - // No document ingested, reroute the result to inference engine - - return Promise.resolve(data) - } - // 2. Load agent on thread changed - if (this.retrievalThreadId !== data.threadId) { - await executeOnMain(NODE, 'toolRetrievalLoadThreadMemory', data.threadId) - - this.retrievalThreadId = data.threadId - - // Update the text splitter - await executeOnMain( - NODE, - 'toolRetrievalUpdateTextSplitter', - tool?.settings?.chunk_size ?? 4000, - tool?.settings?.chunk_overlap ?? 200 - ) - } - - // 3. Using the retrieval template with the result and query - if (latestMessage.content) { - const prompt = - typeof latestMessage.content === 'string' - ? latestMessage.content - : latestMessage.content[0].text - // Retrieve the result - const retrievalResult = await executeOnMain( - NODE, - 'toolRetrievalQueryResult', - prompt, - tool?.useTimeWeightedRetriever ?? false - ) - console.debug('toolRetrievalQueryResult', retrievalResult) - - // Update message content - if (retrievalResult) - data.messages[data.messages.length - 1].content = - tool?.settings?.retrieval_template - ?.replace('{CONTEXT}', retrievalResult) - .replace('{QUESTION}', prompt) - } - - // 4. Reroute the result to inference engine - return Promise.resolve(this.normalize(data)) - } - - // Filter out all the messages that are not text - // TODO: Remove it until engines can handle multiple content types - normalize(request: MessageRequest): MessageRequest { - request.messages = request.messages?.map((message) => { - if ( - message.content && - typeof message.content !== 'string' && - (message.content.length ?? 0) > 0 - ) { - return { - ...message, - content: [message.content[0]], - } - } - return message - }) - return request - } -} diff --git a/package.json b/package.json index ea58ead59..2b8b2de15 100644 --- a/package.json +++ b/package.json @@ -21,7 +21,6 @@ "dev:electron": "yarn copy:assets && yarn workspace jan dev", "dev:web:standalone": "concurrently \"yarn workspace @janhq/web dev\" \"wait-on http://localhost:3000 && rsync -av --prune-empty-dirs --include '*/' --include 'dist/***' --include 'package.json' --include 'tsconfig.json' --exclude '*' ./extensions/ web/.next/static/extensions/\"", "dev:web": "yarn workspace @janhq/web dev", - "dev:web:tauri": "IS_TAURI=true yarn workspace @janhq/web dev", "dev:server": "yarn workspace @janhq/server dev", "dev": "concurrently -n \"NEXT,ELECTRON\" -c \"yellow,blue\" --kill-others \"yarn dev:web\" \"yarn dev:electron\"", "install:cortex:linux:darwin": "cd src-tauri/binaries && ./download.sh", diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index 312392eb5..5ef4e7a4e 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -21,9 +21,7 @@ tauri-build = { version = "2.0.2", features = [] } serde_json = "1.0" serde = { version = "1.0", features = ["derive"] } log = "0.4" -tauri = { version = "2.1.0", features = [ - "protocol-asset", - 'macos-private-api', +tauri = { version = "2.1.0", features = [ "protocol-asset", "macos-private-api", "test", ] } tauri-plugin-log = "2.0.0-rc" @@ -36,10 +34,12 @@ tauri-plugin-store = "2" hyper = { version = "0.14", features = ["server"] } reqwest = { version = "0.11", features = ["json"] } tokio = { version = "1", features = ["full"] } -tracing = "0.1.41" rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", branch = "main", features = [ "client", "transport-sse", "transport-child-process", "tower", ] } + +[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies] +tauri-plugin-updater = "2" diff --git a/src-tauri/capabilities/default.json b/src-tauri/capabilities/default.json index ca95964ee..0544b1312 100644 --- a/src-tauri/capabilities/default.json +++ b/src-tauri/capabilities/default.json @@ -2,18 +2,15 @@ "$schema": "../gen/schemas/desktop-schema.json", "identifier": "default", "description": "enables the default permissions", - "windows": [ - "main" - ], + "windows": ["main"], "remote": { - "urls": [ - "http://*" - ] + "urls": ["http://*"] }, "permissions": [ "core:default", "shell:allow-spawn", "shell:allow-open", + "log:default", { "identifier": "http:default", "allow": [ @@ -55,4 +52,4 @@ }, "store:default" ] -} \ No newline at end of file +} diff --git a/src-tauri/src/core/cmd.rs b/src-tauri/src/core/cmd.rs index 62f53bf56..0fe706a1a 100644 --- a/src-tauri/src/core/cmd.rs +++ b/src-tauri/src/core/cmd.rs @@ -35,7 +35,7 @@ pub fn get_app_configurations(app_handle: tauri::AppHandle) -> Ap let default_data_folder = default_data_folder_path(app_handle.clone()); if !configuration_file.exists() { - println!( + log::info!( "App config not found, creating default config at {:?}", configuration_file ); @@ -46,7 +46,7 @@ pub fn get_app_configurations(app_handle: tauri::AppHandle) -> Ap &configuration_file, serde_json::to_string(&app_default_configuration).unwrap(), ) { - eprintln!("Failed to create default config: {}", err); + log::error!("Failed to create default config: {}", err); } return app_default_configuration; @@ -56,7 +56,7 @@ pub fn get_app_configurations(app_handle: tauri::AppHandle) -> Ap Ok(content) => match serde_json::from_str::(&content) { Ok(app_configurations) => app_configurations, Err(err) => { - eprintln!( + log::error!( "Failed to parse app config, returning default config instead. Error: {}", err ); @@ -64,7 +64,7 @@ pub fn get_app_configurations(app_handle: tauri::AppHandle) -> Ap } }, Err(err) => { - eprintln!( + log::error!( "Failed to read app config, returning default config instead. Error: {}", err ); @@ -79,7 +79,7 @@ pub fn update_app_configuration( configuration: AppConfiguration, ) -> Result<(), String> { let configuration_file = get_configuration_file_path(app_handle); - println!( + log::info!( "update_app_configuration, configuration_file: {:?}", configuration_file ); @@ -136,7 +136,7 @@ pub fn read_theme(app_handle: tauri::AppHandle, theme_name: String) -> Result(app_handle: tauri::AppHandle) -> PathBuf { let app_path = app_handle.path().app_data_dir().unwrap_or_else(|err| { - eprintln!( + log::error!( "Failed to get app data directory: {}. Using home directory instead.", err ); @@ -215,7 +215,7 @@ pub fn open_file_explorer(path: String) { #[tauri::command] pub fn install_extensions(app: AppHandle) { if let Err(err) = setup::install_extensions(app, true) { - eprintln!("Failed to install extensions: {}", err); + log::error!("Failed to install extensions: {}", err); } } @@ -223,7 +223,7 @@ pub fn install_extensions(app: AppHandle) { pub fn get_active_extensions(app: AppHandle) -> Vec { let mut path = get_jan_extensions_path(app); path.push("extensions.json"); - println!("get jan extensions, path: {:?}", path); + log::info!("get jan extensions, path: {:?}", path); let contents = fs::read_to_string(path); let contents: Vec = match contents { diff --git a/src-tauri/src/core/fs.rs b/src-tauri/src/core/fs.rs index f0ffb9a71..9e77a812c 100644 --- a/src-tauri/src/core/fs.rs +++ b/src-tauri/src/core/fs.rs @@ -73,7 +73,7 @@ pub fn readdir_sync( } let path = resolve_path(app_handle, &args[0]); - println!("Reading directory: {:?}", path); + log::error!("Reading directory: {:?}", path); let entries = fs::read_dir(&path).map_err(|e| e.to_string())?; let paths: Vec = entries .filter_map(|entry| entry.ok()) diff --git a/src-tauri/src/core/mcp.rs b/src-tauri/src/core/mcp.rs index c016effd7..b73b91f3a 100644 --- a/src-tauri/src/core/mcp.rs +++ b/src-tauri/src/core/mcp.rs @@ -17,7 +17,7 @@ pub async fn run_mcp_commands( app_path: String, servers_state: Arc>>>, ) -> Result<(), String> { - println!( + log::info!( "Load MCP configs from {}", app_path.clone() + "/mcp_config.json" ); @@ -29,17 +29,20 @@ pub async fn run_mcp_commands( .map_err(|e| format!("Failed to parse config: {}", e))?; if let Some(server_map) = mcp_servers.get("mcpServers").and_then(Value::as_object) { - println!("MCP Servers: {server_map:#?}"); - + log::info!("MCP Servers: {server_map:#?}"); + for (name, config) in server_map { if let Some((command, args)) = extract_command_args(config) { let mut cmd = Command::new(command); - args.iter().filter_map(Value::as_str).for_each(|arg| { cmd.arg(arg); }); - - let service = ().serve(TokioChildProcess::new(&mut cmd).map_err(|e| e.to_string())?) - .await - .map_err(|e| e.to_string())?; - + args.iter().filter_map(Value::as_str).for_each(|arg| { + cmd.arg(arg); + }); + + let service = + ().serve(TokioChildProcess::new(&mut cmd).map_err(|e| e.to_string())?) + .await + .map_err(|e| e.to_string())?; + servers_state.lock().await.insert(name.clone(), service); } } @@ -50,7 +53,7 @@ pub async fn run_mcp_commands( for (_, service) in servers_map.iter() { // Initialize let _server_info = service.peer_info(); - println!("Connected to server: {_server_info:#?}"); + log::info!("Connected to server: {_server_info:#?}"); } Ok(()) } diff --git a/src-tauri/src/core/mod.rs b/src-tauri/src/core/mod.rs index baa8c2834..e4f0ee6c4 100644 --- a/src-tauri/src/core/mod.rs +++ b/src-tauri/src/core/mod.rs @@ -1,6 +1,6 @@ pub mod cmd; pub mod fs; +pub mod mcp; +pub mod server; pub mod setup; pub mod state; -pub mod server; -pub mod mcp; \ No newline at end of file diff --git a/src-tauri/src/core/server.rs b/src-tauri/src/core/server.rs index 359da13da..042ecf135 100644 --- a/src-tauri/src/core/server.rs +++ b/src-tauri/src/core/server.rs @@ -6,7 +6,6 @@ use std::net::SocketAddr; use std::sync::LazyLock; use tokio::sync::Mutex; use tokio::task::JoinHandle; -use tracing::{debug, error, info}; /// Server handle type for managing the proxy server lifecycle type ServerHandle = JoinHandle>>; @@ -24,7 +23,7 @@ struct ProxyConfig { /// Removes a prefix from a path, ensuring proper formatting fn remove_prefix(path: &str, prefix: &str) -> String { - debug!("Processing path: {}, removing prefix: {}", path, prefix); + log::debug!("Processing path: {}, removing prefix: {}", path, prefix); if !prefix.is_empty() && path.starts_with(prefix) { let result = path[prefix.len()..].to_string(); @@ -42,7 +41,6 @@ fn remove_prefix(path: &str, prefix: &str) -> String { fn get_destination_path(original_path: &str, prefix: &str) -> String { let removed_prefix_path = remove_prefix(original_path, prefix); - println!("Removed prefix path: {}", removed_prefix_path); // Special paths don't need the /v1 prefix if !original_path.contains(prefix) || removed_prefix_path.contains("/healthz") @@ -81,7 +79,7 @@ async fn proxy_request( // Build the outbound request let upstream_url = build_upstream_url(&config.upstream, &path); - debug!("Proxying request to: {}", upstream_url); + log::debug!("Proxying request to: {}", upstream_url); let mut outbound_req = client.request(req.method().clone(), &upstream_url); @@ -100,7 +98,7 @@ async fn proxy_request( match outbound_req.body(req.into_body()).send().await { Ok(response) => { let status = response.status(); - debug!("Received response with status: {}", status); + log::debug!("Received response with status: {}", status); let mut builder = Response::builder().status(status); @@ -113,7 +111,7 @@ async fn proxy_request( match response.bytes().await { Ok(bytes) => Ok(builder.body(Body::from(bytes)).unwrap()), Err(e) => { - error!("Failed to read response body: {}", e); + log::error!("Failed to read response body: {}", e); Ok(Response::builder() .status(StatusCode::INTERNAL_SERVER_ERROR) .body(Body::from("Error reading upstream response")) @@ -122,7 +120,7 @@ async fn proxy_request( } } Err(e) => { - error!("Proxy request failed: {}", e); + log::error!("Proxy request failed: {}", e); Ok(Response::builder() .status(StatusCode::BAD_GATEWAY) .body(Body::from(format!("Upstream error: {}", e))) @@ -175,12 +173,12 @@ pub async fn start_server( // Create and start the server let server = Server::bind(&addr).serve(make_svc); - info!("Proxy server started on http://{}", addr); + log::info!("Proxy server started on http://{}", addr); // Spawn server task let server_handle = tokio::spawn(async move { if let Err(e) = server.await { - error!("Server error: {}", e); + log::error!("Server error: {}", e); return Err(Box::new(e) as Box); } Ok(()) @@ -196,9 +194,9 @@ pub async fn stop_server() -> Result<(), Box Result<(), Stri // Attempt to remove extensions folder if extensions_path.exists() { fs::remove_dir_all(&extensions_path).unwrap_or_else(|_| { - println!("Failed to remove existing extensions folder, it may not exist."); + log::info!("Failed to remove existing extensions folder, it may not exist."); }); } @@ -66,7 +67,7 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), Stri let path = entry.path(); if path.extension().map_or(false, |ext| ext == "tgz") { - println!("Installing extension from {:?}", path); + log::info!("Installing extension from {:?}", path); let tar_gz = File::open(&path).map_err(|e| e.to_string())?; let gz_decoder = GzDecoder::new(tar_gz); let mut archive = Archive::new(gz_decoder); @@ -132,7 +133,7 @@ pub fn install_extensions(app: tauri::AppHandle, force: bool) -> Result<(), Stri extensions_list.push(new_extension); - println!("Installed extension to {:?}", extension_dir); + log::info!("Installed extension to {:?}", extension_dir); } } fs::write( @@ -186,7 +187,7 @@ pub fn setup_mcp(app: &App) { let servers = state.mcp_servers.clone(); tauri::async_runtime::spawn(async move { if let Err(e) = run_mcp_commands(app_path_str, servers).await { - eprintln!("Failed to run mcp commands: {}", e); + log::error!("Failed to run mcp commands: {}", e); } }); } @@ -252,7 +253,7 @@ pub fn setup_sidecar(app: &App) -> Result<(), String> { while let Some(event) = rx.recv().await { if let CommandEvent::Stdout(line_bytes) = event { let line = String::from_utf8_lossy(&line_bytes); - println!("Outputs: {:?}", line) + log::info!("Outputs: {:?}", line) } } }); @@ -268,7 +269,7 @@ pub fn setup_sidecar(app: &App) -> Result<(), String> { fn copy_dir_all(src: PathBuf, dst: PathBuf) -> Result<(), String> { fs::create_dir_all(&dst).map_err(|e| e.to_string())?; - println!("Copying from {:?} to {:?}", src, dst); + log::info!("Copying from {:?} to {:?}", src, dst); for entry in fs::read_dir(src).map_err(|e| e.to_string())? { let entry = entry.map_err(|e| e.to_string())?; let ty = entry.file_type().map_err(|e| e.to_string())?; @@ -293,10 +294,10 @@ pub fn setup_engine_binaries(app: &App) -> Result<(), String> { .join("resources"); if let Err(e) = copy_dir_all(binaries_dir, app_data_dir.clone()) { - eprintln!("Failed to copy binaries: {}", e); + log::error!("Failed to copy binaries: {}", e); } if let Err(e) = copy_dir_all(themes_dir, app_data_dir.clone()) { - eprintln!("Failed to copy themes: {}", e); + log::error!("Failed to copy themes: {}", e); } Ok(()) } diff --git a/src-tauri/src/core/state.rs b/src-tauri/src/core/state.rs index 93d770bc2..925030085 100644 --- a/src-tauri/src/core/state.rs +++ b/src-tauri/src/core/state.rs @@ -7,7 +7,7 @@ use tokio::sync::Mutex; #[derive(Default)] pub struct AppState { pub app_token: Option, - pub mcp_servers: Arc>>> + pub mcp_servers: Arc>>>, } pub fn generate_app_token() -> String { rand::thread_rng() diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 85578d680..40cd83f57 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -1,5 +1,6 @@ mod core; use core::{ + cmd::get_jan_data_folder_path, setup::{self, setup_engine_binaries, setup_mcp, setup_sidecar}, state::{generate_app_token, AppState}, }; @@ -11,8 +12,8 @@ use tokio::sync::Mutex; #[cfg_attr(mobile, tauri::mobile_entry_point)] pub fn run() { tauri::Builder::default() - .plugin(tauri_plugin_store::Builder::new().build()) .plugin(tauri_plugin_http::init()) + .plugin(tauri_plugin_store::Builder::new().build()) .plugin(tauri_plugin_shell::init()) .invoke_handler(tauri::generate_handler![ // FS commands - Deperecate soon @@ -47,25 +48,25 @@ pub fn run() { mcp_servers: Arc::new(Mutex::new(HashMap::new())), }) .setup(|app| { - if cfg!(debug_assertions) { - app.handle().plugin( - tauri_plugin_log::Builder::default() - .level(log::LevelFilter::Info) - .build(), - )?; - } - + app.handle().plugin( + tauri_plugin_log::Builder::default() + .targets([if cfg!(debug_assertions) { + tauri_plugin_log::Target::new(tauri_plugin_log::TargetKind::Stdout) + } else { + tauri_plugin_log::Target::new(tauri_plugin_log::TargetKind::Folder { + path: get_jan_data_folder_path(app.handle().clone()).join("logs"), + file_name: Some("app".to_string()), + }) + }]) + .build(), + )?; // Install extensions if let Err(e) = setup::install_extensions(app.handle().clone(), false) { - eprintln!("Failed to install extensions: {}", e); + log::error!("Failed to install extensions: {}", e); } - setup_mcp(app); - setup_sidecar(app).expect("Failed to setup sidecar"); - setup_engine_binaries(app).expect("Failed to setup engine binaries"); - Ok(()) }) .on_window_event(|window, event| match event { diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index d59949f8b..51e65133a 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -1,13 +1,13 @@ { - "$schema": "../node_modules/@tauri-apps/cli/config.schema.json", + "$schema": "https://schema.tauri.app/config/2", "productName": "Jan", "version": "0.1.0", "identifier": "jan.ai", "build": { "frontendDist": "../web/out", "devUrl": "http://localhost:3000", - "beforeDevCommand": "yarn dev:web:tauri", - "beforeBuildCommand": "yarn build:web" + "beforeDevCommand": "IS_TAURI=true yarn dev:web", + "beforeBuildCommand": "IS_TAURI=true yarn build:web" }, "app": { "macOSPrivateApi": true, @@ -27,9 +27,10 @@ "csp": { "default-src": "'self' customprotocol: asset: http://localhost:* http://127.0.0.1:* ws://localhost:* ws://127.0.0.1:*", "connect-src": "ipc: http://ipc.localhost", - "font-src": ["https://fonts.gstatic.com"], + "font-src": ["https://fonts.gstatic.com blob: data:"], "img-src": "'self' asset: http://asset.localhost blob: data:", - "style-src": "'unsafe-inline' 'self' https://fonts.googleapis.com" + "style-src": "'unsafe-inline' 'self' https://fonts.googleapis.com", + "script-src": "'self' asset: $APPDATA/**.*" }, "assetProtocol": { "enable": true, @@ -40,9 +41,18 @@ } } }, + "plugins": { + "updater": { + "pubkey": "", + "endpoints": [ + "https://github.com/menloresearch/jan/releases/latest/download/latest.json" + ] + } + }, "bundle": { "active": true, "targets": "all", + "createUpdaterArtifacts": true, "icon": [ "icons/32x32.png", "icons/128x128.png", diff --git a/web/containers/ModalAppUpdaterChangelog/index.tsx b/web/containers/ModalAppUpdaterChangelog/index.tsx index 705623a90..fa519780c 100644 --- a/web/containers/ModalAppUpdaterChangelog/index.tsx +++ b/web/containers/ModalAppUpdaterChangelog/index.tsx @@ -1,7 +1,8 @@ -import React, { useEffect, useState } from 'react' +import React, { useEffect, useRef, useState } from 'react' import { Button, Modal } from '@janhq/joi' +import { check, Update } from '@tauri-apps/plugin-updater' import { useAtom } from 'jotai' import { useGetLatestRelease } from '@/hooks/useGetLatestRelease' @@ -16,6 +17,7 @@ const ModalAppUpdaterChangelog = () => { const [appUpdateAvailable, setAppUpdateAvailable] = useAtom( appUpdateAvailableAtom ) + const updaterRef = useRef(null) const [open, setOpen] = useState(appUpdateAvailable) @@ -26,6 +28,17 @@ const ModalAppUpdaterChangelog = () => { const beta = VERSION.includes('beta') const nightly = VERSION.includes('-') + const checkForUpdate = async () => { + const update = await check() + if (update) { + setAppUpdateAvailable(true) + updaterRef.current = update + } + } + useEffect(() => { + checkForUpdate() + }, []) + const { release } = useGetLatestRelease(beta ? true : false) return ( @@ -73,8 +86,8 @@ const ModalAppUpdaterChangelog = () => {