diff --git a/extensions/engine-management-extension/resources/cohere.json b/extensions/engine-management-extension/resources/cohere.json index c6df45898..02f1cc625 100644 --- a/extensions/engine-management-extension/resources/cohere.json +++ b/extensions/engine-management-extension/resources/cohere.json @@ -10,7 +10,7 @@ "transform_req": { "chat_completions": { "url": "https://api.cohere.ai/v1/chat", - "template": "{ {% for key, value in input_request %} {% if key == \"messages\" %} {% if input_request.messages.0.role == \"system\" %} \"preamble\": {{ tojson(input_request.messages.0.content) }}, {% if length(input_request.messages) > 2 %} \"chatHistory\": [{% for message in input_request.messages %} {% if not loop.is_first and not loop.is_last %} {\"role\": {% if message.role == \"user\" %} \"USER\" {% else %} \"CHATBOT\" {% endif %}, \"content\": \"{{ message.content }}\" } {% if loop.index < length(input_request.messages) - 2 %},{% endif %} {% endif %} {% endfor %}], {% endif %} \"message\": \"{{ last(input_request.messages).content }}\" {% else %} {% if length(input_request.messages) > 2 %} \"chatHistory\": [{% for message in input_request.messages %} {% if not loop.is_last %} { \"role\": {% if message.role == \"user\" %} \"USER\" {% else %} \"CHATBOT\" {% endif %}, \"content\": \"{{ message.content }}\" } {% if loop.index < length(input_request.messages) - 2 %},{% endif %} {% endif %} {% endfor %}],{% endif %}\"message\": \"{{ last(input_request.messages).content }}\" {% endif %}{% if not loop.is_last %},{% endif %} {% else if key == \"system\" or key == \"model\" or key == \"temperature\" or key == \"store\" or key == \"max_tokens\" or key == \"stream\" or key == \"presence_penalty\" or key == \"metadata\" or key == \"frequency_penalty\" or key == \"tools\" or key == \"tool_choice\" or key == \"logprobs\" or key == \"top_logprobs\" or key == \"logit_bias\" or key == \"n\" or key == \"modalities\" or key == \"prediction\" or key == \"response_format\" or key == \"service_tier\" or key == \"seed\" or key == \"stop\" or key == \"stream_options\" or key == \"top_p\" or key == \"parallel_tool_calls\" or key == \"user\" %} \"{{ key }}\": {{ tojson(value) }} {% if not loop.is_last %},{% endif %} {% endif %} {% endfor %} }" + "template": "{ {% for key, value in input_request %} {% if key == \"messages\" %} {% if input_request.messages.0.role == \"system\" %} \"preamble\": {{ tojson(input_request.messages.0.content) }}, {% if length(input_request.messages) > 2 %} \"chatHistory\": [{% for message in input_request.messages %} {% if not loop.is_first and not loop.is_last %} {\"role\": {% if message.role == \"user\" %} \"USER\" {% else %} \"CHATBOT\" {% endif %}, \"content\": {{ tojson(message.content) }} } {% if loop.index < length(input_request.messages) - 2 %},{% endif %} {% endif %} {% endfor %}], {% endif %} \"message\": {{ tojson(last(input_request.messages).content) }} {% else %} {% if length(input_request.messages) > 2 %} \"chatHistory\": [{% for message in input_request.messages %} {% if not loop.is_last %} { \"role\": {% if message.role == \"user\" %} \"USER\" {% else %} \"CHATBOT\" {% endif %}, \"content\": {{ tojson(message.content) }} } {% if loop.index < length(input_request.messages) - 2 %},{% endif %} {% endif %} {% endfor %}],{% endif %}\"message\": {{ tojson(last(input_request.messages).content) }} {% endif %}{% if not loop.is_last %},{% endif %} {% else if key == \"system\" or key == \"model\" or key == \"temperature\" or key == \"store\" or key == \"max_tokens\" or key == \"stream\" or key == \"presence_penalty\" or key == \"metadata\" or key == \"frequency_penalty\" or key == \"tools\" or key == \"tool_choice\" or key == \"logprobs\" or key == \"top_logprobs\" or key == \"logit_bias\" or key == \"n\" or key == \"modalities\" or key == \"prediction\" or key == \"response_format\" or key == \"service_tier\" or key == \"seed\" or key == \"stop\" or key == \"stream_options\" or key == \"top_p\" or key == \"parallel_tool_calls\" or key == \"user\" %} \"{{ key }}\": {{ tojson(value) }} {% if not loop.is_last %},{% endif %} {% endif %} {% endfor %} }" } }, "transform_resp": { diff --git a/extensions/engine-management-extension/rolldown.config.mjs b/extensions/engine-management-extension/rolldown.config.mjs index 8f15b148d..02b84b363 100644 --- a/extensions/engine-management-extension/rolldown.config.mjs +++ b/extensions/engine-management-extension/rolldown.config.mjs @@ -15,7 +15,7 @@ export default defineConfig([ `http://127.0.0.1:${process.env.CORTEX_API_PORT ?? '39291'}` ), PLATFORM: JSON.stringify(process.platform), - CORTEX_ENGINE_VERSION: JSON.stringify('v0.1.54'), + CORTEX_ENGINE_VERSION: JSON.stringify('v0.1.55'), DEFAULT_REMOTE_ENGINES: JSON.stringify(engines), DEFAULT_REMOTE_MODELS: JSON.stringify(models), DEFAULT_REQUEST_PAYLOAD_TRANSFORM: JSON.stringify( @@ -38,7 +38,7 @@ export default defineConfig([ file: 'dist/node/index.cjs.js', }, define: { - CORTEX_ENGINE_VERSION: JSON.stringify('v0.1.54'), + CORTEX_ENGINE_VERSION: JSON.stringify('v0.1.55'), }, }, ]) diff --git a/extensions/inference-cortex-extension/download.bat b/extensions/inference-cortex-extension/download.bat index 0646cff07..045bb3cf3 100644 --- a/extensions/inference-cortex-extension/download.bat +++ b/extensions/inference-cortex-extension/download.bat @@ -2,7 +2,7 @@ set BIN_PATH=./bin set SHARED_PATH=./../../electron/shared set /p CORTEX_VERSION=<./bin/version.txt -set ENGINE_VERSION=0.1.54 +set ENGINE_VERSION=0.1.55 @REM Download cortex.llamacpp binaries set DOWNLOAD_URL=https://github.com/janhq/cortex.llamacpp/releases/download/v%ENGINE_VERSION%/cortex.llamacpp-%ENGINE_VERSION%-windows-amd64 diff --git a/extensions/inference-cortex-extension/download.sh b/extensions/inference-cortex-extension/download.sh index f5fefb341..c88dffc72 100755 --- a/extensions/inference-cortex-extension/download.sh +++ b/extensions/inference-cortex-extension/download.sh @@ -2,7 +2,7 @@ # Read CORTEX_VERSION CORTEX_VERSION=$(cat ./bin/version.txt) -ENGINE_VERSION=0.1.54 +ENGINE_VERSION=0.1.55 CORTEX_RELEASE_URL="https://github.com/janhq/cortex.cpp/releases/download" ENGINE_DOWNLOAD_URL="https://github.com/janhq/cortex.llamacpp/releases/download/v${ENGINE_VERSION}/cortex.llamacpp-${ENGINE_VERSION}" CUDA_DOWNLOAD_URL="https://github.com/janhq/cortex.llamacpp/releases/download/v${ENGINE_VERSION}" diff --git a/extensions/inference-cortex-extension/rolldown.config.mjs b/extensions/inference-cortex-extension/rolldown.config.mjs index 5b5fed296..3bad30b58 100644 --- a/extensions/inference-cortex-extension/rolldown.config.mjs +++ b/extensions/inference-cortex-extension/rolldown.config.mjs @@ -19,7 +19,7 @@ export default defineConfig([ CORTEX_SOCKET_URL: JSON.stringify( `ws://127.0.0.1:${process.env.CORTEX_API_PORT ?? '39291'}` ), - CORTEX_ENGINE_VERSION: JSON.stringify('v0.1.54'), + CORTEX_ENGINE_VERSION: JSON.stringify('v0.1.55'), }, }, {