{ "name": "@janhq/llamacpp-extension", "productName": "llama.cpp Inference Engine", "version": "1.0.0", "description": "This extension enables llama.cpp chat completion API calls", "main": "dist/index.js", "module": "dist/module.js", "engine": "llama.cpp", "author": "Jan ", "license": "AGPL-3.0", "scripts": { "build": "rolldown -c rolldown.config.mjs", "build:publish": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install", "test": "vitest", "test:ui": "vitest --ui", "test:run": "vitest run", "test:coverage": "vitest run --coverage" }, "devDependencies": { "@vitest/ui": "^3.2.4", "cpx": "^1.5.0", "jsdom": "^26.1.0", "rimraf": "^3.0.2", "rolldown": "1.0.0-beta.1", "ts-loader": "^9.5.0", "typescript": "^5.7.2", "vitest": "^3.2.4" }, "dependencies": { "@janhq/core": "../../core/package.tgz", "@tauri-apps/api": "^2.5.0", "@tauri-apps/plugin-log": "^2.6.0", "fetch-retry": "^5.0.6", "ulidx": "^2.3.0" }, "engines": { "node": ">=18.0.0" }, "files": [ "dist/*", "package.json" ], "bundleDependencies": [ "fetch-retry" ], "installConfig": { "hoistingLimits": "workspaces" }, "packageManager": "yarn@4.5.3" }