rwkv7-2.9B-world / config.json
yzhangcs's picture
Upload folder using huggingface_hub
1da204a verified
raw
history blame
966 Bytes
{
"_attn_implementation_autoset": true,
"a_low_rank_dim": 96,
"architectures": [
"RWKV7ForCausalLM"
],
"attn": null,
"attn_mode": "chunk",
"auto_map": {
"AutoConfig": "modeling_rwkv7.RWKV7Config",
"AutoModel": "modeling_rwkv7.RWKV7Model",
"AutoModelForCausalLM": "modeling_rwkv7.RWKV7ForCausalLM"
},
"bos_token_id": 1,
"decay_low_rank_dim": 96,
"eos_token_id": 2,
"fuse_cross_entropy": true,
"fuse_norm": true,
"gate_low_rank_dim": 320,
"head_dim": 64,
"hidden_act": "sqrelu",
"hidden_ratio": 4.0,
"hidden_size": 2560,
"initializer_range": 0.02,
"intermediate_size": 10240,
"max_position_embeddings": 2048,
"model_type": "rwkv7",
"norm_bias": true,
"norm_eps": 1e-05,
"norm_first": true,
"num_heads": null,
"num_hidden_layers": 32,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.48.2",
"use_cache": true,
"v_low_rank_dim": 64,
"vocab_size": 65536
}