{ | |
"model_id": "test", | |
"model_arch": "llama", | |
"version": 20241202, | |
"llm_config": { | |
"embed_dim": 4096, | |
"ffn_dim": 10240, | |
"head_size": 128, | |
"kv_dim": 256, | |
"n_attn_heads": 32, | |
"n_attn_kv_heads": 2, | |
"n_ctx": 32768, | |
"n_layers": 48, | |
"norm_eps": 9.999999747378752e-06, | |
"vocab_size": 128512, | |
"rope_config": { | |
"n_rope_ctx_orig": 32768, | |
"rope_attn_factor": 1.0, | |
"rope_dim": 128, | |
"rope_freq_base": 50000000.0, | |
"rope_freq_scale": 1.0, | |
"rope_scale_type": "linear", | |
"rope_type": 0 | |
} | |
}, | |
"vision": {} | |
} |