fix: hide placeholder python engine and unsupported openai o1 system role (#4455)
This commit is contained in:
parent
231c7116ea
commit
1964a9cf4d
@ -10,7 +10,7 @@
|
||||
"transform_req": {
|
||||
"chat_completions": {
|
||||
"url": "https://api.openai.com/v1/chat/completions",
|
||||
"template": "{ {% set first = true %}{% for key, value in input_request %}{% if key == \"model\" or key == \"temperature\" or key == \"store\" or key == \"messages\" or key == \"stream\" or key == \"presence_penalty\" or key == \"metadata\" or key == \"frequency_penalty\" or key == \"tools\" or key == \"tool_choice\" or key == \"logprobs\" or key == \"top_logprobs\" or key == \"logit_bias\" or key == \"n\" or key == \"modalities\" or key == \"prediction\" or key == \"response_format\" or key == \"service_tier\" or key == \"seed\" or key == \"stream_options\" or key == \"top_p\" or key == \"parallel_tool_calls\" or key == \"user\" or (not \"o1\" in input_request.model and (key == \"max_tokens\" or key == \"stop\")) %} {% if key == \"max_tokens\" and \"o1\" in input_request.model %} \"max_completion_tokens\": {{ tojson(value) }} {% else %} {% if not first %},{% endif %} \"{{ key }}\": {{ tojson(value) }} {% set first = false %} {% endif %} {% endif %} {% endfor %} }"
|
||||
"template": "{ {% set first = true %} {% for key, value in input_request %} {% if key == \"model\" or key == \"temperature\" or key == \"store\" or key == \"messages\" or key == \"stream\" or key == \"presence_penalty\" or key == \"metadata\" or key == \"frequency_penalty\" or key == \"tools\" or key == \"tool_choice\" or key == \"logprobs\" or key == \"top_logprobs\" or key == \"logit_bias\" or key == \"n\" or key == \"modalities\" or key == \"prediction\" or key == \"response_format\" or key == \"service_tier\" or key == \"seed\" or key == \"stream_options\" or key == \"top_p\" or key == \"parallel_tool_calls\" or key == \"user\" or key == \"max_tokens\" or ((input_request.model == \"o1\" or input_request.model == \"o1-preview\" or input_request.model == \"o1-mini\") and (key == \"stop\")) %} {% if not first %} , {% endif %} {% if key == \"messages\" and (input_request.model == \"o1\" or input_request.model == \"o1-preview\" or input_request.model == \"o1-mini\") and input_request.messages.0.role == \"system\" %} \"messages\": [{% for message in input_request.messages %} {% if not loop.is_first %} { \"role\": \"{{ message.role }}\", \"content\": \"{{ message.content }}\" } {% if not loop.is_last %} , {% endif %} {% endif %} {% endfor %}] {% else if key == \"max_tokens\" and (input_request.model == \"o1\" or input_request.model == \"o1-preview\" or input_request.model == \"o1-mini\") %} \"max_completion_tokens\": {{ tojson(value) }} {% else %} \"{{ key }}\": {{ tojson(value) }} {% set first = false %} {% endif %} {% endif %} {% endfor %} }"
|
||||
}
|
||||
},
|
||||
"transform_resp": {
|
||||
|
||||
@ -23,7 +23,11 @@ const Engines = () => {
|
||||
</h6>
|
||||
{engines &&
|
||||
Object.entries(engines).map(([key]) => {
|
||||
if (!isLocalEngine(engines, key as InferenceEngine)) return
|
||||
if (
|
||||
!isLocalEngine(engines, key as InferenceEngine) ||
|
||||
!engines[key as InferenceEngine].length
|
||||
)
|
||||
return
|
||||
return (
|
||||
<LocalEngineItems engine={key as InferenceEngine} key={key} />
|
||||
)
|
||||
@ -40,7 +44,11 @@ const Engines = () => {
|
||||
</div>
|
||||
{engines &&
|
||||
Object.entries(engines).map(([key, values]) => {
|
||||
if (isLocalEngine(engines, key as InferenceEngine)) return
|
||||
if (
|
||||
isLocalEngine(engines, key as InferenceEngine) ||
|
||||
!values.length
|
||||
)
|
||||
return
|
||||
return (
|
||||
<RemoteEngineItems
|
||||
engine={key as InferenceEngine}
|
||||
|
||||
@ -88,7 +88,9 @@ const SettingLeftPanel = () => {
|
||||
{engines &&
|
||||
Object.entries(engines)
|
||||
.filter(
|
||||
([key]) => !showSettingActiveLocalEngine.includes(key)
|
||||
([key]) =>
|
||||
!showSettingActiveLocalEngine.includes(key) &&
|
||||
engines[key as InferenceEngine].length > 0
|
||||
)
|
||||
.map(([key]) => {
|
||||
if (!isLocalEngine(engines, key as InferenceEngine)) return
|
||||
@ -119,7 +121,9 @@ const SettingLeftPanel = () => {
|
||||
{engines &&
|
||||
Object.entries(engines)
|
||||
.filter(
|
||||
([key]) => !showSettingActiveRemoteEngine.includes(key)
|
||||
([key]) =>
|
||||
!showSettingActiveRemoteEngine.includes(key) &&
|
||||
engines[key as InferenceEngine].length > 0
|
||||
)
|
||||
.map(([key]) => {
|
||||
if (isLocalEngine(engines, key as InferenceEngine)) return
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user