julian-schelb
commited on
Commit
·
2ea51fa
1
Parent(s):
0eacf0a
Upload model
Browse files- adapter_config.json +4 -4
- adapter_model.bin +2 -2
adapter_config.json
CHANGED
@@ -1,17 +1,17 @@
|
|
1 |
{
|
2 |
"auto_mapping": null,
|
3 |
-
"base_model_name_or_path": "roberta-
|
4 |
"encoder_dropout": 0.0,
|
5 |
"encoder_hidden_size": 128,
|
6 |
"encoder_num_layers": 2,
|
7 |
"encoder_reparameterization_type": "MLP",
|
8 |
"inference_mode": true,
|
9 |
-
"num_attention_heads":
|
10 |
-
"num_layers":
|
11 |
"num_transformer_submodules": 1,
|
12 |
"num_virtual_tokens": 20,
|
13 |
"peft_type": "P_TUNING",
|
14 |
"revision": null,
|
15 |
"task_type": "SEQ_CLS",
|
16 |
-
"token_dim":
|
17 |
}
|
|
|
1 |
{
|
2 |
"auto_mapping": null,
|
3 |
+
"base_model_name_or_path": "roberta-large",
|
4 |
"encoder_dropout": 0.0,
|
5 |
"encoder_hidden_size": 128,
|
6 |
"encoder_num_layers": 2,
|
7 |
"encoder_reparameterization_type": "MLP",
|
8 |
"inference_mode": true,
|
9 |
+
"num_attention_heads": 16,
|
10 |
+
"num_layers": 24,
|
11 |
"num_transformer_submodules": 1,
|
12 |
"num_virtual_tokens": 20,
|
13 |
"peft_type": "P_TUNING",
|
14 |
"revision": null,
|
15 |
"task_type": "SEQ_CLS",
|
16 |
+
"token_dim": 1024
|
17 |
}
|
adapter_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4e07dd4af9b7daf67f32956e197f2beafa6b66a1cdf5271d8ceb301e39c9782a
|
3 |
+
size 4290946
|