diff --git a/extensions/inference-nitro-extension/src/helpers/sse.ts b/extensions/inference-nitro-extension/src/helpers/sse.ts index 6edad302c..d9d8712dd 100644 --- a/extensions/inference-nitro-extension/src/helpers/sse.ts +++ b/extensions/inference-nitro-extension/src/helpers/sse.ts @@ -16,7 +16,6 @@ export function requestInference( messages: recentMessages, model: model.id, stream: true, - // TODO: Model parameters spreading // ...model.parameters, }); fetch(INFERENCE_URL, { diff --git a/extensions/inference-openai-extension/src/index.ts b/extensions/inference-openai-extension/src/index.ts index 8a7955746..7e3e6e71e 100644 --- a/extensions/inference-openai-extension/src/index.ts +++ b/extensions/inference-openai-extension/src/index.ts @@ -37,7 +37,7 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension { private static _currentModel: OpenAIModel; private static _engineSettings: EngineSettings = { - full_url: "https://api.openai.com/v1/chat/completion", + full_url: "https://api.openai.com/v1/chat/completions", api_key: "sk-", };