fixup from refactoring
This commit is contained in:
parent
bbbf4779df
commit
0e9a8a27e5
@ -235,25 +235,7 @@ export default class llamacpp_extension
|
||||
async chat(
|
||||
opts: chatOptions
|
||||
): Promise<chatCompletion | AsyncIterable<chatCompletionChunk>> {
|
||||
const sessionInfo = this.activeSessions.get(opts.sessionId)
|
||||
if (!sessionInfo) {
|
||||
throw new Error(
|
||||
`No active session found for sessionId: ${opts.sessionId}`
|
||||
)
|
||||
}
|
||||
|
||||
// For streaming responses
|
||||
if (opts.stream) {
|
||||
return this.streamChat(opts)
|
||||
}
|
||||
|
||||
// For non-streaming responses
|
||||
try {
|
||||
return await invoke<chatCompletion>('plugin:llamacpp|chat', { opts })
|
||||
} catch (error) {
|
||||
console.error('Error during chat completion:', error)
|
||||
throw new Error(`Chat completion failed: ${error}`)
|
||||
}
|
||||
throw new Error("method not implemented yet")
|
||||
}
|
||||
|
||||
async delete(opts: deleteOptions): Promise<deleteResult> {
|
||||
|
||||
@ -89,7 +89,7 @@ export interface modelInfo {
|
||||
export interface listOptions {
|
||||
providerId: string; // To specify which provider if a central manager calls this
|
||||
}
|
||||
export type listResult = ModelInfo[];
|
||||
export type listResult = modelInfo[];
|
||||
|
||||
// 2. /pull
|
||||
export interface pullOptions {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user