CryptAL commited on
Commit
3bacee4
·
verified ·
1 Parent(s): 24162db

Upload Phi3ForCausalLM

Browse files
config.json CHANGED
@@ -8,9 +8,9 @@
8
  "embd_pdrop": 0.0,
9
  "eos_token_id": 100257,
10
  "hidden_act": "silu",
11
- "hidden_size": 3296,
12
  "initializer_range": 0.02,
13
- "intermediate_size": 7300,
14
  "max_position_embedding": 4096,
15
  "max_position_embeddings": 4096,
16
  "model_type": "phi3",
 
8
  "embd_pdrop": 0.0,
9
  "eos_token_id": 100257,
10
  "hidden_act": "silu",
11
+ "hidden_size": 2784,
12
  "initializer_range": 0.02,
13
+ "intermediate_size": 6700,
14
  "max_position_embedding": 4096,
15
  "max_position_embeddings": 4096,
16
  "model_type": "phi3",
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7adac6e8b9bc725ca7585c3fb181a220d5b1843661071738dd0c5eea3717ce87
3
- size 4911184832
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0cce87bb7bc2be45e0cee964e14530f9f9c3284a983b338dadc6d7deaeba1f0
3
+ size 4968756344
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e674064d7eab456e63f718bf22abe5dff1c8a5d211bc3cb37b380fc5309e139c
3
- size 3812222760
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:780d30939b5ce6870101d2437abedfeb79e5e4289585d908a11c8db97595876d
3
+ size 1713941080
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 8723384768
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00002-of-00002.safetensors",
@@ -65,60 +65,60 @@
65
  "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
66
  "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
67
  "model.layers.17.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
68
- "model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors",
69
- "model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
70
- "model.layers.18.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors",
71
- "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
72
  "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
73
  "model.layers.18.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
74
- "model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
75
- "model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
76
- "model.layers.19.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors",
77
- "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
78
- "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
79
- "model.layers.19.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors",
80
  "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
81
  "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
82
  "model.layers.2.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors",
83
  "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
84
  "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
85
  "model.layers.2.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
86
- "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
87
- "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
88
- "model.layers.20.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors",
89
- "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
90
- "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
91
- "model.layers.20.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors",
92
- "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
93
- "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
94
- "model.layers.21.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors",
95
- "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
96
- "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
97
- "model.layers.21.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors",
98
- "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
99
- "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
100
- "model.layers.22.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors",
101
- "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
102
- "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
103
- "model.layers.22.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors",
104
- "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
105
- "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
106
- "model.layers.23.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors",
107
- "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
108
- "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
109
- "model.layers.23.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors",
110
- "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
111
- "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
112
- "model.layers.24.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors",
113
- "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
114
- "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
115
- "model.layers.24.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors",
116
  "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
117
  "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
118
  "model.layers.25.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors",
119
  "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
120
- "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
121
- "model.layers.25.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors",
122
  "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
123
  "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
124
  "model.layers.26.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 6682674624
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00002-of-00002.safetensors",
 
65
  "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
66
  "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
67
  "model.layers.17.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.18.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
72
  "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
73
  "model.layers.18.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
76
+ "model.layers.19.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
78
+ "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.19.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
80
  "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
81
  "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
82
  "model.layers.2.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors",
83
  "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
84
  "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
85
  "model.layers.2.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.layers.20.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
90
+ "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.20.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.21.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
97
+ "model.layers.21.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
99
+ "model.layers.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
100
+ "model.layers.22.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.22.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
102
+ "model.layers.22.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
103
+ "model.layers.22.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.23.input_layernorm.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.23.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.23.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.23.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
108
+ "model.layers.23.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
109
+ "model.layers.23.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
110
+ "model.layers.24.input_layernorm.weight": "model-00001-of-00002.safetensors",
111
+ "model.layers.24.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
112
+ "model.layers.24.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors",
113
+ "model.layers.24.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
114
+ "model.layers.24.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
115
+ "model.layers.24.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
116
  "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
117
  "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
118
  "model.layers.25.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors",
119
  "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
120
+ "model.layers.25.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
121
+ "model.layers.25.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors",
122
  "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
123
  "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
124
  "model.layers.26.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors",