feat: bump version of llama.cpp - b5833
This commit is contained in:
parent
ccab7f9119
commit
46c95ebb97
@ -15,7 +15,7 @@ export default defineConfig([
|
||||
`http://127.0.0.1:${process.env.CORTEX_API_PORT ?? '39291'}`
|
||||
),
|
||||
PLATFORM: JSON.stringify(process.platform),
|
||||
CORTEX_ENGINE_VERSION: JSON.stringify('b5509'),
|
||||
CORTEX_ENGINE_VERSION: JSON.stringify('b5833'),
|
||||
DEFAULT_REMOTE_ENGINES: JSON.stringify(engines),
|
||||
DEFAULT_REMOTE_MODELS: JSON.stringify(models),
|
||||
DEFAULT_REQUEST_PAYLOAD_TRANSFORM: JSON.stringify(
|
||||
@ -38,7 +38,7 @@ export default defineConfig([
|
||||
file: 'dist/node/index.cjs.js',
|
||||
},
|
||||
define: {
|
||||
CORTEX_ENGINE_VERSION: JSON.stringify('b5509'),
|
||||
CORTEX_ENGINE_VERSION: JSON.stringify('b5833'),
|
||||
},
|
||||
},
|
||||
])
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
set BIN_PATH=./bin
|
||||
set SHARED_PATH=./../../electron/shared
|
||||
set /p CORTEX_VERSION=<./bin/version.txt
|
||||
set ENGINE_VERSION=b5509
|
||||
set ENGINE_VERSION=b5833
|
||||
|
||||
@REM Download llama.cpp binaries
|
||||
set DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/%ENGINE_VERSION%/llama-%ENGINE_VERSION%-bin-win
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
# Read CORTEX_VERSION
|
||||
CORTEX_VERSION=$(cat ./bin/version.txt)
|
||||
ENGINE_VERSION=b5509
|
||||
ENGINE_VERSION=b5833
|
||||
CORTEX_RELEASE_URL="https://github.com/menloresearch/cortex.cpp/releases/download"
|
||||
ENGINE_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/${ENGINE_VERSION}/llama-${ENGINE_VERSION}-bin
|
||||
CUDA_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/${ENGINE_VERSION}
|
||||
|
||||
@ -15,7 +15,7 @@ download() {
|
||||
|
||||
# Read CORTEX_VERSION
|
||||
CORTEX_VERSION=1.0.14
|
||||
ENGINE_VERSION=b5509
|
||||
ENGINE_VERSION=b5833
|
||||
CORTEX_RELEASE_URL="https://github.com/menloresearch/cortex.cpp/releases/download"
|
||||
ENGINE_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/${ENGINE_VERSION}/llama-${ENGINE_VERSION}-bin
|
||||
CUDA_DOWNLOAD_URL=https://github.com/menloresearch/llama.cpp/releases/download/${ENGINE_VERSION}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user