fix: app shows wrong toast on stopping inference (#2460)
This commit is contained in:
parent
5edc24d8e0
commit
b8cee875b1
@ -242,6 +242,7 @@ export default class TensorRTLLMExtension extends LocalOAIEngine {
|
||||
}
|
||||
|
||||
override stopInference() {
|
||||
if (!this.loadedModel) return
|
||||
showToast(
|
||||
'Unable to Stop Inference',
|
||||
'The model does not support stopping inference.'
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user