{ "name": "@janhq/inference-triton-trt-llm-extension", "productName": "Triton-TRT-LLM Inference Engine", "version": "1.0.0", "description": "This extension enables Nvidia's TensorRT-LLM as an inference engine option", "main": "dist/index.js", "module": "dist/module.js", "author": "Jan ", "license": "AGPL-3.0", "scripts": { "build": "tsc -b . && webpack --config webpack.config.js", "build:publish": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install" }, "exports": { ".": "./dist/index.js", "./main": "./dist/module.js" }, "devDependencies": { "cpx": "^1.5.0", "rimraf": "^3.0.2", "ts-loader": "^9.5.0", "typescript": "^5.7.2", "webpack": "^5.88.2", "webpack-cli": "^5.1.4" }, "dependencies": { "@janhq/core": "../../core/package.tgz", "fetch-retry": "^5.0.6", "rxjs": "^7.8.1", "ulidx": "^2.3.0" }, "engines": { "node": ">=18.0.0" }, "files": [ "dist/*", "package.json", "README.md" ], "bundleDependencies": [ "fetch-retry" ], "installConfig": { "hoistingLimits": "workspaces" }, "packageManager": "yarn@4.5.3" }