fix: malicious redirections & security risk

This commit is contained in:
Louis 2023-12-21 14:10:33 +07:00
parent aeab8edcca
commit 8d5fce9165
No known key found for this signature in database
GPG Key ID: 44FA9F4D33C37DE2
6 changed files with 6 additions and 9 deletions

View File

@ -71,6 +71,7 @@ export default function AnnoncementBanner() {
key={i} key={i}
href={social.href} href={social.href}
target="_blank" target="_blank"
rel="noopener"
> >
{social.icon} {social.icon}
</a> </a>

View File

@ -109,6 +109,7 @@ export default function Footer() {
key={i} key={i}
href={social.href} href={social.href}
target="_blank" target="_blank"
rel="noopener"
> >
{social.icon} {social.icon}
</a> </a>

View File

@ -1,6 +1,5 @@
import { Model } from "@janhq/core";
declare const MODULE: string; declare const MODULE: string;
declare const OPENAI_DOMAIN: string;
declare interface EngineSettings { declare interface EngineSettings {
full_url?: string; full_url?: string;

View File

@ -1,5 +1,4 @@
import { Observable } from "rxjs"; import { Observable } from "rxjs";
import { EngineSettings, OpenAIModel } from "../@types/global";
/** /**
* Sends a request to the inference server to generate a response based on the recent messages. * Sends a request to the inference server to generate a response based on the recent messages.
@ -16,7 +15,7 @@ export function requestInference(
): Observable<string> { ): Observable<string> {
return new Observable((subscriber) => { return new Observable((subscriber) => {
let model_id: string = model.id; let model_id: string = model.id;
if (engine.full_url.includes("openai.azure.com")) { if (engine.full_url.includes(OPENAI_DOMAIN)) {
model_id = engine.full_url.split("/")[5]; model_id = engine.full_url.split("/")[5];
} }
const requestBody = JSON.stringify({ const requestBody = JSON.stringify({

View File

@ -22,7 +22,6 @@ import { InferenceExtension } from "@janhq/core";
import { requestInference } from "./helpers/sse"; import { requestInference } from "./helpers/sse";
import { ulid } from "ulid"; import { ulid } from "ulid";
import { join } from "path"; import { join } from "path";
import { EngineSettings, OpenAIModel } from "./@types/global";
/** /**
* A class that implements the InferenceExtension interface from the @janhq/core package. * A class that implements the InferenceExtension interface from the @janhq/core package.
@ -108,6 +107,7 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
*/ */
async inference(data: MessageRequest): Promise<ThreadMessage> { async inference(data: MessageRequest): Promise<ThreadMessage> {
const timestamp = Date.now(); const timestamp = Date.now();
const message: ThreadMessage = { const message: ThreadMessage = {
thread_id: data.threadId, thread_id: data.threadId,
created: timestamp, created: timestamp,

View File

@ -18,10 +18,7 @@ module.exports = {
plugins: [ plugins: [
new webpack.DefinePlugin({ new webpack.DefinePlugin({
MODULE: JSON.stringify(`${packageJson.name}/${packageJson.module}`), MODULE: JSON.stringify(`${packageJson.name}/${packageJson.module}`),
INFERENCE_URL: JSON.stringify( OPENAI_DOMAIN: JSON.stringify("openai.azure.com"),
process.env.INFERENCE_URL ||
"http://127.0.0.1:3928/inferences/llamacpp/chat_completion"
),
}), }),
], ],
output: { output: {