fix: failed provider models list due to broken cortex import (#5983)
This commit is contained in:
parent
0aaaca05a4
commit
7a3d9d765c
@ -866,6 +866,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
|
||||
const files = await fs.readdirSync(currentDir)
|
||||
for (const child of files) {
|
||||
try {
|
||||
const childPath = await joinPath([currentDir, child])
|
||||
const stat = await fs.fileStat(childPath)
|
||||
if (
|
||||
@ -926,6 +927,9 @@ export default class llamacpp_extension extends AIEngine {
|
||||
continue
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error migrating model ${child}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise, look into subdirectories
|
||||
@ -1093,9 +1097,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
attempts++
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
'Failed to find an available port for the model to load'
|
||||
)
|
||||
throw new Error('Failed to find an available port for the model to load')
|
||||
}
|
||||
|
||||
private async sleep(ms: number): Promise<void> {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user