Text Generation
Transformers
PyTorch
English
retnet
custom_code
RetNet-410m-XATL / config.json
syncdoth's picture
Upload RetNetForCausalLM
7110f83 verified
{
"_name_or_path": "/nfs/checkpoints/RetNet-410m-bs1024-pile_dedup-copy_exp-skip_reten/hf-iter-050000-ckpt",
"activation_dropout": 0.0,
"activation_fn": "gelu",
"architectures": [
"RetNetForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_retnet.RetNetConfig",
"AutoModelForCausalLM": "modeling_retnet.RetNetForCausalLM"
},
"bos_token_id": 1,
"decoder_embed_dim": 1024,
"decoder_ffn_embed_dim": 4096,
"decoder_layers": 24,
"decoder_normalize_before": true,
"decoder_retention_heads": 4,
"decoder_value_embed_dim": 1024,
"deepnorm": false,
"drop_path_rate": 0.0,
"dropout": 0.0,
"eos_token_id": 2,
"forward_impl": "parallel",
"groupnorm_affine": false,
"initializer_range": 0.02,
"is_decoder": true,
"layernorm_embedding": false,
"layernorm_eps": 1e-05,
"max_position_embeddings": 2048,
"model_type": "retnet",
"no_scale_embedding": true,
"output_retentions": false,
"parallel_residual": true,
"recurrent_chunk_size": 512,
"rotary_percentage": 0.25,
"subln": false,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.31.0",
"use_bias": true,
"use_cache": false,
"use_ffn_rms_norm": false,
"use_glu": false,
"use_lm_decay": false,
"use_rms_norm": false,
"vocab_size": 50254,
"z_loss_coeff": 0.0
}