* feat: add extesion settings Signed-off-by: James <james@jan.ai> --------- Signed-off-by: James <james@jan.ai> Co-authored-by: James <james@jan.ai> Co-authored-by: Louis <louis@jan.ai>
50 lines
1.6 KiB
TypeScript
50 lines
1.6 KiB
TypeScript
/**
|
|
* @file This file exports a class that implements the InferenceExtension interface from the @janhq/core package.
|
|
* The class provides methods for initializing and stopping a model, and for making inference requests.
|
|
* It also subscribes to events emitted by the @janhq/core package and handles new message requests.
|
|
* @version 1.0.0
|
|
* @module inference-nvidia-triton-trt-llm-extension/src/index
|
|
*/
|
|
|
|
import { RemoteOAIEngine } from '@janhq/core'
|
|
|
|
declare const SETTINGS: Array<any>
|
|
enum Settings {
|
|
apiKey = 'tritonllm-api-key',
|
|
chatCompletionsEndPoint = 'chat-completions-endpoint',
|
|
}
|
|
/**
|
|
* A class that implements the InferenceExtension interface from the @janhq/core package.
|
|
* The class provides methods for initializing and stopping a model, and for making inference requests.
|
|
* It also subscribes to events emitted by the @janhq/core package and handles new message requests.
|
|
*/
|
|
export default class JanInferenceTritonTrtLLMExtension extends RemoteOAIEngine {
|
|
inferenceUrl: string = ''
|
|
provider: string = 'triton_trtllm'
|
|
|
|
/**
|
|
* Subscribes to events emitted by the @janhq/core package.
|
|
*/
|
|
async onLoad() {
|
|
super.onLoad()
|
|
|
|
// Register Settings
|
|
this.registerSettings(SETTINGS)
|
|
|
|
// Retrieve API Key Setting
|
|
this.apiKey = await this.getSetting<string>(Settings.apiKey, '')
|
|
this.inferenceUrl = await this.getSetting<string>(
|
|
Settings.chatCompletionsEndPoint,
|
|
''
|
|
)
|
|
}
|
|
|
|
onSettingUpdate<T>(key: string, value: T): void {
|
|
if (key === Settings.apiKey) {
|
|
this.apiKey = value as string
|
|
} else if (key === Settings.chatCompletionsEndPoint) {
|
|
this.inferenceUrl = value as string
|
|
}
|
|
}
|
|
}
|