fix: add fallback as default endpoint for inference engine (#2669)
Co-authored-by: James <james@jan.ai>
This commit is contained in:
parent
69f73b86c4
commit
8917be5ef3
@ -102,7 +102,7 @@ export abstract class OAIEngine extends AIEngine {
|
|||||||
events.emit(MessageEvent.OnMessageUpdate, message)
|
events.emit(MessageEvent.OnMessageUpdate, message)
|
||||||
},
|
},
|
||||||
error: async (err: any) => {
|
error: async (err: any) => {
|
||||||
console.error(`Inference error:`, err)
|
console.error(`Inference error:`, JSON.stringify(err))
|
||||||
if (this.isCancelled || message.content.length) {
|
if (this.isCancelled || message.content.length) {
|
||||||
message.status = MessageStatus.Stopped
|
message.status = MessageStatus.Stopped
|
||||||
events.emit(MessageEvent.OnMessageUpdate, message)
|
events.emit(MessageEvent.OnMessageUpdate, message)
|
||||||
|
|||||||
@ -5,7 +5,7 @@
|
|||||||
"description": "The endpoint to use for chat completions. See the [Groq documentation](https://console.groq.com/docs/openai) for more information.",
|
"description": "The endpoint to use for chat completions. See the [Groq documentation](https://console.groq.com/docs/openai) for more information.",
|
||||||
"controllerType": "input",
|
"controllerType": "input",
|
||||||
"controllerProps": {
|
"controllerProps": {
|
||||||
"placeholder": "Chat Completions Endpoint",
|
"placeholder": "https://api.groq.com/openai/v1/chat/completions",
|
||||||
"value": "https://api.groq.com/openai/v1/chat/completions"
|
"value": "https://api.groq.com/openai/v1/chat/completions"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -6,7 +6,7 @@
|
|||||||
* @module inference-groq-extension/src/index
|
* @module inference-groq-extension/src/index
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { RemoteOAIEngine } from '@janhq/core'
|
import { RemoteOAIEngine, SettingComponentProps } from '@janhq/core'
|
||||||
|
|
||||||
declare const SETTINGS: Array<any>
|
declare const SETTINGS: Array<any>
|
||||||
declare const MODELS: Array<any>
|
declare const MODELS: Array<any>
|
||||||
@ -43,7 +43,17 @@ export default class JanInferenceGroqExtension extends RemoteOAIEngine {
|
|||||||
if (key === Settings.apiKey) {
|
if (key === Settings.apiKey) {
|
||||||
this.apiKey = value as string
|
this.apiKey = value as string
|
||||||
} else if (key === Settings.chatCompletionsEndPoint) {
|
} else if (key === Settings.chatCompletionsEndPoint) {
|
||||||
this.inferenceUrl = value as string
|
if (typeof value !== 'string') return
|
||||||
|
|
||||||
|
if (value.trim().length === 0) {
|
||||||
|
SETTINGS.forEach((setting) => {
|
||||||
|
if (setting.key === Settings.chatCompletionsEndPoint) {
|
||||||
|
this.inferenceUrl = setting.controllerProps.value as string
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
this.inferenceUrl = value
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,7 +5,7 @@
|
|||||||
"description": "The endpoint to use for chat completions. See the [Mistral API documentation](https://docs.mistral.ai/api/#operation/createChatCompletion) for more information.",
|
"description": "The endpoint to use for chat completions. See the [Mistral API documentation](https://docs.mistral.ai/api/#operation/createChatCompletion) for more information.",
|
||||||
"controllerType": "input",
|
"controllerType": "input",
|
||||||
"controllerProps": {
|
"controllerProps": {
|
||||||
"placeholder": "Chat Completions Endpoint",
|
"placeholder": "https://api.mistral.ai/v1/chat/completions",
|
||||||
"value": "https://api.mistral.ai/v1/chat/completions"
|
"value": "https://api.mistral.ai/v1/chat/completions"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -42,7 +42,17 @@ export default class JanInferenceMistralExtension extends RemoteOAIEngine {
|
|||||||
if (key === Settings.apiKey) {
|
if (key === Settings.apiKey) {
|
||||||
this.apiKey = value as string
|
this.apiKey = value as string
|
||||||
} else if (key === Settings.chatCompletionsEndPoint) {
|
} else if (key === Settings.chatCompletionsEndPoint) {
|
||||||
this.inferenceUrl = value as string
|
if (typeof value !== 'string') return
|
||||||
|
|
||||||
|
if (value.trim().length === 0) {
|
||||||
|
SETTINGS.forEach((setting) => {
|
||||||
|
if (setting.key === Settings.chatCompletionsEndPoint) {
|
||||||
|
this.inferenceUrl = setting.controllerProps.value as string
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
this.inferenceUrl = value
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,7 +5,7 @@
|
|||||||
"description": "The endpoint to use for chat completions. See the [OpenAI API documentation](https://platform.openai.com/docs/api-reference/chat/create) for more information.",
|
"description": "The endpoint to use for chat completions. See the [OpenAI API documentation](https://platform.openai.com/docs/api-reference/chat/create) for more information.",
|
||||||
"controllerType": "input",
|
"controllerType": "input",
|
||||||
"controllerProps": {
|
"controllerProps": {
|
||||||
"placeholder": "Chat Completions Endpoint",
|
"placeholder": "https://api.openai.com/v1/chat/completions",
|
||||||
"value": "https://api.openai.com/v1/chat/completions"
|
"value": "https://api.openai.com/v1/chat/completions"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -6,7 +6,7 @@
|
|||||||
* @module inference-openai-extension/src/index
|
* @module inference-openai-extension/src/index
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { RemoteOAIEngine } from '@janhq/core'
|
import { RemoteOAIEngine, SettingComponentProps } from '@janhq/core'
|
||||||
|
|
||||||
declare const SETTINGS: Array<any>
|
declare const SETTINGS: Array<any>
|
||||||
declare const MODELS: Array<any>
|
declare const MODELS: Array<any>
|
||||||
@ -43,7 +43,17 @@ export default class JanInferenceOpenAIExtension extends RemoteOAIEngine {
|
|||||||
if (key === Settings.apiKey) {
|
if (key === Settings.apiKey) {
|
||||||
this.apiKey = value as string
|
this.apiKey = value as string
|
||||||
} else if (key === Settings.chatCompletionsEndPoint) {
|
} else if (key === Settings.chatCompletionsEndPoint) {
|
||||||
this.inferenceUrl = value as string
|
if (typeof value !== 'string') return
|
||||||
|
|
||||||
|
if (value.trim().length === 0) {
|
||||||
|
SETTINGS.forEach((setting) => {
|
||||||
|
if (setting.key === Settings.chatCompletionsEndPoint) {
|
||||||
|
this.inferenceUrl = setting.controllerProps.value as string
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
this.inferenceUrl = value
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,7 +5,7 @@
|
|||||||
"description": "The endpoint to use for chat completions.",
|
"description": "The endpoint to use for chat completions.",
|
||||||
"controllerType": "input",
|
"controllerType": "input",
|
||||||
"controllerProps": {
|
"controllerProps": {
|
||||||
"placeholder": "Chat Completions Endpoint",
|
"placeholder": "http://localhost:8000/v2/models/tensorrt_llm_bls/generate",
|
||||||
"value": "http://localhost:8000/v2/models/tensorrt_llm_bls/generate"
|
"value": "http://localhost:8000/v2/models/tensorrt_llm_bls/generate"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -6,7 +6,7 @@
|
|||||||
* @module inference-nvidia-triton-trt-llm-extension/src/index
|
* @module inference-nvidia-triton-trt-llm-extension/src/index
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { RemoteOAIEngine } from '@janhq/core'
|
import { RemoteOAIEngine, SettingComponentProps } from '@janhq/core'
|
||||||
|
|
||||||
declare const SETTINGS: Array<any>
|
declare const SETTINGS: Array<any>
|
||||||
enum Settings {
|
enum Settings {
|
||||||
@ -43,7 +43,17 @@ export default class JanInferenceTritonTrtLLMExtension extends RemoteOAIEngine {
|
|||||||
if (key === Settings.apiKey) {
|
if (key === Settings.apiKey) {
|
||||||
this.apiKey = value as string
|
this.apiKey = value as string
|
||||||
} else if (key === Settings.chatCompletionsEndPoint) {
|
} else if (key === Settings.chatCompletionsEndPoint) {
|
||||||
this.inferenceUrl = value as string
|
if (typeof value !== 'string') return
|
||||||
|
|
||||||
|
if (value.trim().length === 0) {
|
||||||
|
SETTINGS.forEach((setting) => {
|
||||||
|
if (setting.key === Settings.chatCompletionsEndPoint) {
|
||||||
|
this.inferenceUrl = setting.controllerProps.value as string
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
this.inferenceUrl = value
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user