Akarshan Biswas 1eaec5e4f6
Fix: engine unable to find dlls on when running on Windows (#5863)
* Fix: Windows llamacpp not picking up dlls from lib repo

* Fix lib path on Windows

* Add debug info about lib_path

* Normalize lib_path for Windows

* fix window lib path normalization

* fix: missing cuda dll files on windows

* throw backend setup errors to UI

* Fix format

* Update extensions/llamacpp-extension/src/index.ts

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>

* feat: add logger to llamacpp-extension

* fix: platform check

---------

Co-authored-by: Louis <louis@jan.ai>
Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>
2025-07-22 20:05:24 +05:30

51 lines
1.3 KiB
JSON

{
"name": "@janhq/llamacpp-extension",
"productName": "llama.cpp Inference Engine",
"version": "1.0.0",
"description": "This extension enables llama.cpp chat completion API calls",
"main": "dist/index.js",
"module": "dist/module.js",
"engine": "llama.cpp",
"author": "Jan <service@jan.ai>",
"license": "AGPL-3.0",
"scripts": {
"build": "rolldown -c rolldown.config.mjs",
"build:publish": "rimraf *.tgz --glob || true && yarn build && npm pack && cpx *.tgz ../../pre-install",
"test": "vitest",
"test:ui": "vitest --ui",
"test:run": "vitest run",
"test:coverage": "vitest run --coverage"
},
"devDependencies": {
"@vitest/ui": "^3.2.4",
"cpx": "^1.5.0",
"jsdom": "^26.1.0",
"rimraf": "^3.0.2",
"rolldown": "1.0.0-beta.1",
"ts-loader": "^9.5.0",
"typescript": "^5.7.2",
"vitest": "^3.2.4"
},
"dependencies": {
"@janhq/core": "../../core/package.tgz",
"@tauri-apps/api": "^2.5.0",
"@tauri-apps/plugin-log": "^2.6.0",
"fetch-retry": "^5.0.6",
"ulidx": "^2.3.0"
},
"engines": {
"node": ">=18.0.0"
},
"files": [
"dist/*",
"package.json"
],
"bundleDependencies": [
"fetch-retry"
],
"installConfig": {
"hoistingLimits": "workspaces"
},
"packageManager": "yarn@4.5.3"
}