phoebeklett commited on
Commit
9e038ac
·
verified ·
1 Parent(s): c6f0cbc

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +19 -5
config.json CHANGED
@@ -1,14 +1,28 @@
1
  {
2
- "_name_or_path": "mosaicml/mpt-30b",
3
  "architectures": [
4
  "ExtendedMptForCausalLM"
5
  ],
6
  "attn_config": {
7
- "model_type": ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  },
9
  "auto_map": {
10
- "AutoConfig": "mosaicml/mpt-30b--configuration_mpt.MPTConfig",
11
- "AutoModelForCausalLM": "mosaicml/mpt-30b--modeling_mpt.MPTForCausalLM"
12
  },
13
  "d_model": 7168,
14
  "emb_pdrop": 0,
@@ -41,7 +55,7 @@
41
  "tokenizer_name": "EleutherAI/gpt-neox-20b",
42
  "torch_dtype": "float32",
43
  "transformers_version": "4.33.0",
44
- "use_cache": false,
45
  "use_external_mind": true,
46
  "use_external_mind_by_layer": [
47
  true,
 
1
  {
2
+ "_name_or_path": "normalcomputing/extended-mind-mpt-30b",
3
  "architectures": [
4
  "ExtendedMptForCausalLM"
5
  ],
6
  "attn_config": {
7
+ "alibi": true,
8
+ "alibi_bias_max": 8,
9
+ "attn_impl": "torch",
10
+ "attn_pdrop": 0,
11
+ "attn_type": "multihead_attention",
12
+ "attn_uses_sequence_id": false,
13
+ "clip_qkv": null,
14
+ "mask_by_sim": true,
15
+ "memory_type": "manual",
16
+ "prefix_lm": false,
17
+ "qk_ln": false,
18
+ "sim_threshold": 0.25,
19
+ "softmax_scale": null,
20
+ "topk": 10,
21
+ "use_active_externalism": true
22
  },
23
  "auto_map": {
24
+ "AutoConfig": "configuration.ExtendedMptConfig",
25
+ "AutoModelForCausalLM": "modeling.ExtendedMptForCausalLM"
26
  },
27
  "d_model": 7168,
28
  "emb_pdrop": 0,
 
55
  "tokenizer_name": "EleutherAI/gpt-neox-20b",
56
  "torch_dtype": "float32",
57
  "transformers_version": "4.33.0",
58
+ "use_cache": true,
59
  "use_external_mind": true,
60
  "use_external_mind_by_layer": [
61
  true,