diff --git a/extensions/engine-management-extension/rolldown.config.mjs b/extensions/engine-management-extension/rolldown.config.mjs index 98a5445cf..7cb0a8648 100644 --- a/extensions/engine-management-extension/rolldown.config.mjs +++ b/extensions/engine-management-extension/rolldown.config.mjs @@ -15,7 +15,7 @@ export default defineConfig([ `http://127.0.0.1:${process.env.CORTEX_API_PORT ?? '39291'}` ), PLATFORM: JSON.stringify(process.platform), - CORTEX_ENGINE_VERSION: JSON.stringify('b5509'), + CORTEX_ENGINE_VERSION: JSON.stringify('b5833'), DEFAULT_REMOTE_ENGINES: JSON.stringify(engines), DEFAULT_REMOTE_MODELS: JSON.stringify(models), DEFAULT_REQUEST_PAYLOAD_TRANSFORM: JSON.stringify( @@ -38,7 +38,7 @@ export default defineConfig([ file: 'dist/node/index.cjs.js', }, define: { - CORTEX_ENGINE_VERSION: JSON.stringify('b5509'), + CORTEX_ENGINE_VERSION: JSON.stringify('b5833'), }, }, ]) diff --git a/extensions/inference-cortex-extension/download.bat b/extensions/inference-cortex-extension/download.bat index fe2df6645..701f030f4 100644 --- a/extensions/inference-cortex-extension/download.bat +++ b/extensions/inference-cortex-extension/download.bat @@ -2,7 +2,7 @@ set BIN_PATH=./bin set SHARED_PATH=./../../electron/shared set /p CORTEX_VERSION=<./bin/version.txt -set ENGINE_VERSION=b5509 +set ENGINE_VERSION=b5833 @REM Download llama.cpp binaries set DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/%ENGINE_VERSION%/llama-%ENGINE_VERSION%-bin-win diff --git a/extensions/inference-cortex-extension/download.sh b/extensions/inference-cortex-extension/download.sh index 834c3315b..186bdbe04 100755 --- a/extensions/inference-cortex-extension/download.sh +++ b/extensions/inference-cortex-extension/download.sh @@ -2,7 +2,7 @@ # Read CORTEX_VERSION CORTEX_VERSION=$(cat ./bin/version.txt) -ENGINE_VERSION=b5509 +ENGINE_VERSION=b5833 CORTEX_RELEASE_URL="https://github.com/menloresearch/cortex.cpp/releases/download" ENGINE_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/${ENGINE_VERSION}/llama-${ENGINE_VERSION}-bin CUDA_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/${ENGINE_VERSION} diff --git a/src-tauri/binaries/download.sh b/src-tauri/binaries/download.sh index 8ce0041f0..6d11d77a9 100755 --- a/src-tauri/binaries/download.sh +++ b/src-tauri/binaries/download.sh @@ -15,7 +15,7 @@ download() { # Read CORTEX_VERSION CORTEX_VERSION=1.0.14 -ENGINE_VERSION=b5509 +ENGINE_VERSION=b5833 CORTEX_RELEASE_URL="https://github.com/menloresearch/cortex.cpp/releases/download" ENGINE_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/${ENGINE_VERSION}/llama-${ENGINE_VERSION}-bin CUDA_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/${ENGINE_VERSION}