chore: resolve comment

This commit is contained in:
Louis 2023-09-30 12:51:41 +07:00 committed by Louis
parent dad3d04403
commit 1a3c53c576
3 changed files with 20 additions and 22 deletions

View File

@ -17,9 +17,11 @@ const dispose = async () =>
.then((res) => resolve(res));
}
});
const inferenceUrl = () => "http://localhost:8080/llama/chat_completion";
// Register all the above functions and objects with the relevant extension points
export function init({ register }) {
register("initModel", "initModel", initModel);
register("inferenceUrl", "inferenceUrl", inferenceUrl);
register("dispose", "dispose", dispose);
}

View File

@ -11,7 +11,7 @@ import {
import { useAtom, useAtomValue, useSetAtom } from "jotai";
import { selectAtom } from "jotai/utils";
import { DataService } from "../../shared/coreService";
import { DataService, InfereceService } from "../../shared/coreService";
import {
MessageSenderType,
RawMessage,
@ -52,9 +52,7 @@ export default function useSendChatMessage() {
addNewMessage(newChatMessage);
const recentMessages = [
...chatMessagesHistory.sort(
(a, b) => parseInt(a.id) - parseInt(b.id)
),
...chatMessagesHistory.sort((a, b) => parseInt(a.id) - parseInt(b.id)),
newChatMessage,
]
.slice(-10)
@ -67,23 +65,21 @@ export default function useSendChatMessage() {
: "assistant",
};
});
const response = await fetch(
"http://localhost:8080/llama/chat_completion",
{
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "text/event-stream",
"Access-Control-Allow-Origi": "*",
},
body: JSON.stringify({
messages: recentMessages,
stream: true,
model: "gpt-3.5-turbo",
max_tokens: 500,
}),
}
);
const url = await executeSerial(InfereceService.INFERENCE_URL);
const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "text/event-stream",
"Access-Control-Allow-Origi": "*",
},
body: JSON.stringify({
messages: recentMessages,
stream: true,
model: "gpt-3.5-turbo",
max_tokens: 500,
}),
});
const stream = response.body;
const decoder = new TextDecoder("utf-8");

View File

@ -28,7 +28,7 @@ export enum ModelService {
}
export enum InfereceService {
PROMPT = "prompt",
INFERENCE_URL = "inferenceUrl",
INIT_MODEL = "initModel",
}