robin commited on
Commit
48d3bb2
·
1 Parent(s): 2e407d7

gate_softmax_opt_125m

Browse files
all_results.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"perplexity": 15.631376592783969, "model.decoder.final_layer_norm": 90.90610245198746, "model.decoder.layers.0": 0.49678784040383867, "model.decoder.layers.1": 0.593334630880645, "model.decoder.layers.2": 1.1138245103163247, "model.decoder.layers.3": 2.8117839761377925, "model.decoder.layers.4": 8.130523421942955, "model.decoder.layers.5": 10.14440406393139, "model.decoder.layers.6": 10.894935056277287, "model.decoder.layers.7": 11.510245200261068, "model.decoder.layers.8": 12.581329170657966, "model.decoder.layers.9": 13.552380464323608, "model.decoder.layers.10": 13.696492003791526, "model.decoder.layers.11": 9.754936087726774, "model.decoder.layers.0.fc2": 0.11429211551180996, "model.decoder.layers.1.fc2": 0.06009609463362533, "model.decoder.layers.2.fc2": 0.05160791494300161, "model.decoder.layers.3.fc2": 0.0887320077314915, "model.decoder.layers.4.fc2": 0.1890865288210982, "model.decoder.layers.5.fc2": 0.13179597502571908, "model.decoder.layers.6.fc2": 0.1416791833589281, "model.decoder.layers.7.fc2": 0.18651024183565232, "model.decoder.layers.8.fc2": 0.302436637681282, "model.decoder.layers.9.fc2": 0.4609715695530369, "model.decoder.layers.10.fc2": 0.4534011542236604, "model.decoder.layers.11.fc2": 0.33333622723160705, "model.decoder.layers.0.final_layer_norm": 0.449490335093469, "model.decoder.layers.1.final_layer_norm": 0.648771574373948, "model.decoder.layers.2.final_layer_norm": 0.5434296633260325, "model.decoder.layers.3.final_layer_norm": 0.4805789946764315, "model.decoder.layers.4.final_layer_norm": 0.5119164260776593, "model.decoder.layers.5.final_layer_norm": 0.5596385335124111, "model.decoder.layers.6.final_layer_norm": 0.6510642589710424, "model.decoder.layers.7.final_layer_norm": 0.7143761033978703, "model.decoder.layers.8.final_layer_norm": 0.8607857171125509, "model.decoder.layers.9.final_layer_norm": 1.0746930431931663, "model.decoder.layers.10.final_layer_norm": 1.550172495278017, "model.decoder.layers.11.final_layer_norm": 1.4932173631082393, "model.decoder.layers.0.self_attn.out_proj": 0.28898772995450306, "model.decoder.layers.1.self_attn.out_proj": 0.1585038379317901, "model.decoder.layers.2.self_attn.out_proj": 0.10328788361506085, "model.decoder.layers.3.self_attn.out_proj": 0.15999787821689312, "model.decoder.layers.4.self_attn.out_proj": 0.4930124938410698, "model.decoder.layers.5.self_attn.out_proj": 0.41577131615229307, "model.decoder.layers.6.self_attn.out_proj": 0.2142463416904791, "model.decoder.layers.7.self_attn.out_proj": 0.27058346324084487, "model.decoder.layers.8.self_attn.out_proj": 0.5626099088003353, "model.decoder.layers.9.self_attn.out_proj": 0.823693578499919, "model.decoder.layers.10.self_attn.out_proj": 1.8404120260948647, "model.decoder.layers.11.self_attn.out_proj": 2.9111764316583577, "model.decoder.layers.0.self_attn_layer_norm": 4.416136100694982, "model.decoder.layers.1.self_attn_layer_norm": 5.7459904858033175, "model.decoder.layers.2.self_attn_layer_norm": 8.006325798739718, "model.decoder.layers.3.self_attn_layer_norm": 12.151412253172635, "model.decoder.layers.4.self_attn_layer_norm": 9.717199247596174, "model.decoder.layers.5.self_attn_layer_norm": 7.846852869181936, "model.decoder.layers.6.self_attn_layer_norm": 7.219960030762602, "model.decoder.layers.7.self_attn_layer_norm": 7.449741968926134, "model.decoder.layers.8.self_attn_layer_norm": 6.745625629885806, "model.decoder.layers.9.self_attn_layer_norm": 6.793725093244424, "model.decoder.layers.10.self_attn_layer_norm": 6.281723306414063, "model.decoder.layers.11.self_attn_layer_norm": 6.453088283190931, "max_inf_norm": 90.90610245198746, "max_ffn_inf_norm": 0.4609715695530369, "max_layer_inf_norm": 13.696492003791526, "avg_kurtosis": 89.02969567403245, "max_kurtosis": 1233.9198350735521, "max_kurtosis_layers": 1233.9198350735521}
checkpoints/checkpoint_90000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f08e63cd611118e1c7f070445e686898c2a18433344b2a5d701367713f377546
3
+ size 496331808
checkpoints/checkpoint_90000/optimizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:776d54933663402ea72e42d9afde4e66377af58cb6c5ce0e27294ed8ccc90249
3
+ size 992954750
checkpoints/checkpoint_90000/random_states_0.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a16ffb9edac6f67aef0c53fbaf369ddc12ea61a9d89c43c949ff146a8d0742df
3
+ size 14540
checkpoints/checkpoint_90000/scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:490afa1c7da65d52fef48cfe56fbb77a0666bccc41e608f4f980cb9eb3cd9ee7
3
+ size 988
checkpoints/checkpoint_90000/scheduler.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:636e5b2da1779cdd21de6d198011b18758dcc537bd340cc00f0ccb8c49a0d8cc
3
+ size 1064
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_remove_final_layer_norm": false,
3
+ "activation_function": "relu",
4
+ "architectures": [
5
+ "OPTForCausalLM"
6
+ ],
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 2,
9
+ "do_layer_norm_before": true,
10
+ "dropout": 0.1,
11
+ "enable_bias": true,
12
+ "eos_token_id": 2,
13
+ "ffn_dim": 3072,
14
+ "hidden_size": 768,
15
+ "init_std": 0.006,
16
+ "layer_norm_elementwise_affine": true,
17
+ "layerdrop": 0.0,
18
+ "max_position_embeddings": 512,
19
+ "model_type": "opt",
20
+ "num_attention_heads": 12,
21
+ "num_hidden_layers": 12,
22
+ "pad_token_id": 1,
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.31.0",
25
+ "use_cache": true,
26
+ "vocab_size": 50272,
27
+ "word_embed_proj_dim": 768
28
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 2,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 1,
6
+ "transformers_version": "4.31.0"
7
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6806d619956293ab81e663bcf0394b70949ee1941f2e0c0e3351c39e09397be1
3
+ size 496440090
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "</s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": true,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "</s>",
25
+ "lstrip": false,
26
+ "normalized": true,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_prefix_space": false,
4
+ "bos_token": {
5
+ "__type": "AddedToken",
6
+ "content": "</s>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "clean_up_tokenization_spaces": true,
13
+ "eos_token": {
14
+ "__type": "AddedToken",
15
+ "content": "</s>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false
20
+ },
21
+ "errors": "replace",
22
+ "model_max_length": 1000000000000000019884624838656,
23
+ "pad_token": {
24
+ "__type": "AddedToken",
25
+ "content": "<pad>",
26
+ "lstrip": false,
27
+ "normalized": true,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ },
31
+ "tokenizer_class": "GPT2Tokenizer",
32
+ "unk_token": {
33
+ "__type": "AddedToken",
34
+ "content": "</s>",
35
+ "lstrip": false,
36
+ "normalized": true,
37
+ "rstrip": false,
38
+ "single_word": false
39
+ }
40
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff