fix: include lm_head and embedding layers in totalLayers count (#6415)
The original calculation used only the `block_count` from the model metadata, which excludes the final LM head and the embedding layer. This caused an underestimation of the total number of layers and consequently an incorrect `layerSize` value. Adding `+2` accounts for these two missing layers, ensuring accurate model size metrics.
This commit is contained in:
parent
d856651380
commit
5ff7935d91
@ -2042,7 +2042,7 @@ export default class llamacpp_extension extends AIEngine {
|
||||
): Promise<{ layerSize: number; totalLayers: number }> {
|
||||
const modelSize = await this.getModelSize(path)
|
||||
const arch = meta['general.architecture']
|
||||
const totalLayers = Number(meta[`${arch}.block_count`])
|
||||
const totalLayers = Number(meta[`${arch}.block_count`]) + 2 // 1 for lm_head layer and 1 for embedding layer
|
||||
if (!totalLayers) throw new Error('Invalid metadata: block_count not found')
|
||||
return { layerSize: modelSize / totalLayers, totalLayers }
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user