feat: add relative path support for model loading
Implemented `isAbsolutePath` helper to correctly identify POSIX, Windows drive‑letter, and UNC absolute paths. Updated `planModelLoad` to automatically resolve relative model and mmproj paths against the Jan data folder, enhancing usability for users supplying non‑absolute paths. Also refined minor formatting for readability.
This commit is contained in:
parent
8f67f29317
commit
7c41408a1a
@ -328,7 +328,8 @@ export default class llamacpp_extension extends AIEngine {
|
|||||||
await this.determineBestBackend(version_backends)
|
await this.determineBestBackend(version_backends)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
bestAvailableBackendString = await this.determineBestBackend(version_backends)
|
bestAvailableBackendString =
|
||||||
|
await this.determineBestBackend(version_backends)
|
||||||
}
|
}
|
||||||
|
|
||||||
let settings = structuredClone(SETTINGS)
|
let settings = structuredClone(SETTINGS)
|
||||||
@ -2047,11 +2048,25 @@ export default class llamacpp_extension extends AIEngine {
|
|||||||
return { layerSize: modelSize / totalLayers, totalLayers }
|
return { layerSize: modelSize / totalLayers, totalLayers }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private isAbsolutePath(p: string): boolean {
|
||||||
|
// Normalize back‑slashes to forward‑slashes first.
|
||||||
|
const norm = p.replace(/\\/g, '/')
|
||||||
|
return (
|
||||||
|
norm.startsWith('/') || // POSIX absolute
|
||||||
|
/^[a-zA-Z]:/.test(norm) || // Drive‑letter Windows (C: or D:)
|
||||||
|
/^\/\/[^/]+/.test(norm) // UNC path //server/share
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
async planModelLoad(
|
async planModelLoad(
|
||||||
path: string,
|
path: string,
|
||||||
mmprojPath?: string,
|
mmprojPath?: string,
|
||||||
requestedCtx?: number
|
requestedCtx?: number
|
||||||
): Promise<ModelPlan> {
|
): Promise<ModelPlan> {
|
||||||
|
if (!this.isAbsolutePath(path))
|
||||||
|
path = await joinPath([await getJanDataFolderPath(), path])
|
||||||
|
if (mmprojPath && !this.isAbsolutePath(mmprojPath))
|
||||||
|
mmprojPath = await joinPath([await getJanDataFolderPath(), path])
|
||||||
const modelSize = await this.getModelSize(path)
|
const modelSize = await this.getModelSize(path)
|
||||||
const memoryInfo = await this.getTotalSystemMemory()
|
const memoryInfo = await this.getTotalSystemMemory()
|
||||||
const gguf = await readGgufMetadata(path)
|
const gguf = await readGgufMetadata(path)
|
||||||
@ -2217,8 +2232,7 @@ export default class llamacpp_extension extends AIEngine {
|
|||||||
// Calculate available system RAM for KV cache
|
// Calculate available system RAM for KV cache
|
||||||
const cpuLayers = totalLayers - gpuLayers
|
const cpuLayers = totalLayers - gpuLayers
|
||||||
const modelCPUSize = cpuLayers * layerSize
|
const modelCPUSize = cpuLayers * layerSize
|
||||||
const mmprojCPUSize =
|
const mmprojCPUSize = mmprojSize > 0 && !offloadMmproj ? mmprojSize : 0
|
||||||
mmprojSize > 0 && !offloadMmproj ? mmprojSize : 0
|
|
||||||
const systemRAMUsed = modelCPUSize + mmprojCPUSize
|
const systemRAMUsed = modelCPUSize + mmprojCPUSize
|
||||||
const availableSystemRAMForKVCache = Math.max(
|
const availableSystemRAMForKVCache = Math.max(
|
||||||
0,
|
0,
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user