{ "_name_or_path": "LGAI-EXAONE/EXAONE-3.5-7.8B-Instruct", "activation_function": "silu", "architectures": [ "ExaoneForSpeculativeCausalLM" ], "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_exaone.ExaoneConfig", "AutoModelForCausalLM": "modeling_exaone.ExaoneForCausalLM", "AutoModelForSequenceClassification": "modeling_exaone.ExaoneForSequenceClassification", "AutoModelForSpeculativeCausalLM": "modeling_speculative_exaone.ExaoneForSpeculativeCausalLM" }, "beagle_add_noise": true, "beagle_attention_offset": "random.randrange(0, 3)", "beagle_attention_wind": "5", "beagle_ckpt_path": null, "beagle_debug": false, "beagle_decoder_key_remap": {}, "beagle_dtype": "torch.bfloat16", "beagle_frozen_targets": [], "beagle_git_diff": "", "beagle_git_sha1": "unknown", "beagle_layer_path": "transformer.h", "beagle_lmhead_path": "lm_head", "beagle_model_path": "beagle/models/exaone3.5/EXAONE-3.5-7.8B-Instruct/", "beagle_norm_path": "transformer.ln_f", "beagle_output_dir": "output", "beagle_reuse_layer": null, "beagle_rotary_path": "transformer.rotary", "beagle_run_name": "temp_run", "beagle_seed": 42, "beagle_tokenizer_path": "LGAI-EXAONE/EXAONE-3.5-7.8B-Instruct", "beagle_use_fc_eagle": false, "beagle_use_lower_layers": 1, "beagle_use_state_distill": false, "bos_token_id": 1, "embed_dropout": 0.0, "eos_token_id": 361, "head_dim": 128, "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "layer_norm_epsilon": 1e-05, "max_position_embeddings": 32768, "model_type": "exaone", "num_attention_heads": 32, "num_key_value_heads": 8, "num_layers": 32, "pad_token_id": 0, "rope_scaling": { "factor": 8.0, "high_freq_factor": 4.0, "low_freq_factor": 1.0, "original_max_position_embeddings": 8192, "rope_type": "llama3" }, "rope_theta": 1000000.0, "speculative_base_model": "LGAI-EXAONE/EXAONE-3.5-7.8B-Instruct", "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.47.1", "use_cache": true, "vocab_size": 102400 }