File size: 4,523 Bytes
aa42dfe |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 |
{
"_name_or_path": "microsoft/layoutlmv3-base",
"architectures": [
"LayoutLMv3ForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"coordinate_size": 128,
"eos_token_id": 2,
"has_relative_attention_bias": true,
"has_spatial_attention_bias": true,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "Temp P1",
"1": "Temp P10",
"2": "Temp P11",
"3": "Temp P12",
"4": "Temp P13",
"5": "Temp P14",
"6": "Temp P15",
"7": "Temp P16",
"8": "Temp P17",
"9": "Temp P18",
"10": "Temp P19",
"11": "Temp P2",
"12": "Temp P20",
"13": "Temp P21",
"14": "Temp P22",
"15": "Temp P23",
"16": "Temp P24",
"17": "Temp P25",
"18": "Temp P26",
"19": "Temp P27",
"20": "Temp P28",
"21": "Temp P29",
"22": "Temp P3",
"23": "Temp P30",
"24": "Temp P31",
"25": "Temp P32",
"26": "Temp P33",
"27": "Temp P34",
"28": "Temp P35",
"29": "Temp P36",
"30": "Temp P37",
"31": "Temp P38",
"32": "Temp P39",
"33": "Temp P4",
"34": "Temp P40",
"35": "Temp P41",
"36": "Temp P42",
"37": "Temp P43",
"38": "Temp P44",
"39": "Temp P45",
"40": "Temp P46",
"41": "Temp P47",
"42": "Temp P48",
"43": "Temp P49",
"44": "Temp P5",
"45": "Temp P50",
"46": "Temp P51",
"47": "Temp P52",
"48": "Temp P53",
"49": "Temp P54",
"50": "Temp P55",
"51": "Temp P56",
"52": "Temp P57",
"53": "Temp P58",
"54": "Temp P59",
"55": "Temp P6",
"56": "Temp P60",
"57": "Temp P61",
"58": "Temp P62",
"59": "Temp P63",
"60": "Temp P64",
"61": "Temp P65",
"62": "Temp P66",
"63": "Temp P67",
"64": "Temp P68",
"65": "Temp P69",
"66": "Temp P7",
"67": "Temp P70",
"68": "Temp P71",
"69": "Temp P72",
"70": "Temp P73",
"71": "Temp P74",
"72": "Temp P75",
"73": "Temp P76",
"74": "Temp P77",
"75": "Temp P78",
"76": "Temp P79",
"77": "Temp P8",
"78": "Temp P80",
"79": "Temp P81",
"80": "Temp P82",
"81": "Temp P83",
"82": "Temp P9"
},
"initializer_range": 0.02,
"input_size": 224,
"intermediate_size": 3072,
"label2id": {
"Temp P1": 0,
"Temp P10": 1,
"Temp P11": 2,
"Temp P12": 3,
"Temp P13": 4,
"Temp P14": 5,
"Temp P15": 6,
"Temp P16": 7,
"Temp P17": 8,
"Temp P18": 9,
"Temp P19": 10,
"Temp P2": 11,
"Temp P20": 12,
"Temp P21": 13,
"Temp P22": 14,
"Temp P23": 15,
"Temp P24": 16,
"Temp P25": 17,
"Temp P26": 18,
"Temp P27": 19,
"Temp P28": 20,
"Temp P29": 21,
"Temp P3": 22,
"Temp P30": 23,
"Temp P31": 24,
"Temp P32": 25,
"Temp P33": 26,
"Temp P34": 27,
"Temp P35": 28,
"Temp P36": 29,
"Temp P37": 30,
"Temp P38": 31,
"Temp P39": 32,
"Temp P4": 33,
"Temp P40": 34,
"Temp P41": 35,
"Temp P42": 36,
"Temp P43": 37,
"Temp P44": 38,
"Temp P45": 39,
"Temp P46": 40,
"Temp P47": 41,
"Temp P48": 42,
"Temp P49": 43,
"Temp P5": 44,
"Temp P50": 45,
"Temp P51": 46,
"Temp P52": 47,
"Temp P53": 48,
"Temp P54": 49,
"Temp P55": 50,
"Temp P56": 51,
"Temp P57": 52,
"Temp P58": 53,
"Temp P59": 54,
"Temp P6": 55,
"Temp P60": 56,
"Temp P61": 57,
"Temp P62": 58,
"Temp P63": 59,
"Temp P64": 60,
"Temp P65": 61,
"Temp P66": 62,
"Temp P67": 63,
"Temp P68": 64,
"Temp P69": 65,
"Temp P7": 66,
"Temp P70": 67,
"Temp P71": 68,
"Temp P72": 69,
"Temp P73": 70,
"Temp P74": 71,
"Temp P75": 72,
"Temp P76": 73,
"Temp P77": 74,
"Temp P78": 75,
"Temp P79": 76,
"Temp P8": 77,
"Temp P80": 78,
"Temp P81": 79,
"Temp P82": 80,
"Temp P83": 81,
"Temp P9": 82
},
"layer_norm_eps": 1e-05,
"max_2d_position_embeddings": 1024,
"max_position_embeddings": 514,
"max_rel_2d_pos": 256,
"max_rel_pos": 128,
"model_type": "layoutlmv3",
"num_attention_heads": 12,
"num_channels": 3,
"num_hidden_layers": 12,
"pad_token_id": 1,
"patch_size": 16,
"rel_2d_pos_bins": 64,
"rel_pos_bins": 32,
"second_input_size": 112,
"shape_size": 128,
"text_embed": true,
"torch_dtype": "float32",
"transformers_version": "4.28.1",
"type_vocab_size": 1,
"visual_embed": true,
"vocab_size": 50265
}
|