Upload bge-m3-int8q.pt
d79d62d
verified
-
1.52 kB
initial commit
bge-m3-int8q.pt
Detected Pickle imports (605)
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_544.XLMRobertaLayer",
- "__torch__.torch.nn.modules.sparse.___torch_mangle_1.Embedding",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_70.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_563.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_454.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_500.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_164.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_279.XLMRobertaOutput",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_517.LayerNorm",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_155.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_171.LayerNorm",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaLayer",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_218.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_525.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_298.XLMRobertaIntermediate",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_359.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_83.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_7.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_374.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_271.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_43.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_315.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_128.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_152.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_311.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_428.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_193.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_538.XLMRobertaIntermediate",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_282.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_562.XLMRobertaIntermediate",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_278.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_154.XLMRobertaIntermediate",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_367.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_241.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_373.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_406.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_52.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_336.XLMRobertaSelfAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_405.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_29.XLMRobertaSelfOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_143.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_510.XLMRobertaAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaSelfAttention",
- "__torch__.transformers.activations.___torch_mangle_105.GELUActivation",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_158.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_284.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_65.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_314.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_558.XLMRobertaAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_343.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_50.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_257.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_400.XLMRobertaLayer",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_93.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_227.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_487.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_530.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_239.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_60.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_39.XLMRobertaOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_484.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_462.XLMRobertaAttention",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_138.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_223.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_476.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_291.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_344.Linear",
- "__torch__.transformers.activations.___torch_mangle_297.GELUActivation",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_88.XLMRobertaLayer",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_437.XLMRobertaSelfOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_35.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_215.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_119.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_285.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_20.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_486.XLMRobertaAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_107.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_333.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_281.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_348.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_492.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_407.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_498.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_277.LayerNorm",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_511.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_516.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_318.XLMRobertaAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_309.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_425.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_536.Linear",
- "torch._utils._rebuild_tensor_v2",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_84.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_330.Linear",
- "__torch__.transformers.activations.___torch_mangle_177.GELUActivation",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_205.LayerNorm",
- "__torch__.transformers.activations.___torch_mangle_129.GELUActivation",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_429.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_455.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_61.LayerNorm",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_419.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_19.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaModel",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_46.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_97.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_14.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_230.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_413.XLMRobertaSelfOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_529.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_435.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_166.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_244.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_48.XLMRobertaSelfAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_508.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_523.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_27.LayerNorm",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_259.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_17.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_404.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_390.XLMRobertaAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_322.XLMRobertaIntermediate",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_302.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_567.XLMRobertaOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_102.XLMRobertaAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_59.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_110.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_382.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_346.XLMRobertaIntermediate",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_415.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_473.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_45.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_209.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_231.XLMRobertaOutput",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_140.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_383.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_146.Linear",
- "__torch__.torch.nn.modules.container.ModuleList",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_63.XLMRobertaOutput",
- "__torch__.transformers.activations.___torch_mangle_153.GELUActivation",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_186.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_53.XLMRobertaSelfOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_316.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_28.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_286.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_356.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_9.LayerNorm",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_269.XLMRobertaSelfOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_519.XLMRobertaOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_307.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_319.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_210.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaSelfOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_549.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_204.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_104.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_280.XLMRobertaLayer",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_337.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_165.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_450.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_242.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_194.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_375.XLMRobertaOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_16.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_341.XLMRobertaSelfOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_30.XLMRobertaAttention",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_555.LayerNorm",
- "__torch__.transformers.activations.___torch_mangle_369.GELUActivation",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_73.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_542.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_313.LinearPackedParams",
- "__torch__.torch.nn.modules.sparse.___torch_mangle_0.Embedding",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_40.XLMRobertaLayer",
- "__torch__.transformers.activations.___torch_mangle_393.GELUActivation",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_548.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_518.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_41.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_290.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_445.LayerNorm",
- "__torch__.torch.nn.modules.dropout.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_439.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_216.XLMRobertaSelfAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_126.XLMRobertaAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_494.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_543.XLMRobertaOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_232.XLMRobertaLayer",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_213.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_534.XLMRobertaAttention",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_524.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_540.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_11.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_149.XLMRobertaSelfOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_275.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_212.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_402.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_132.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_433.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_470.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_477.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_296.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_51.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_440.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaEncoder",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaPooler",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_503.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_463.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_12.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_68.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_431.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_203.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_481.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_565.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_114.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_253.LayerNorm",
- "__torch__.transformers.activations.___torch_mangle_417.GELUActivation",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_467.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_122.Linear",
- "__torch__.transformers.activations.___torch_mangle_441.GELUActivation",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_472.XLMRobertaLayer",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_364.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_397.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_176.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_478.Linear",
- "__torch__.transformers.activations.___torch_mangle_537.GELUActivation",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_408.XLMRobertaSelfAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_2.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_502.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_234.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_342.XLMRobertaAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_202.XLMRobertaIntermediate",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_270.XLMRobertaAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_151.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_147.LayerNorm",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_293.XLMRobertaSelfOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_101.XLMRobertaSelfOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_409.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_23.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_26.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_247.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_380.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_570.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_18.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_139.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_358.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_90.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_236.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_251.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_299.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_546.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_557.XLMRobertaSelfOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_71.Dropout",
- "__torch__.transformers.activations.___torch_mangle_273.GELUActivation",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_381.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_170.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_352.XLMRobertaLayer",
- "torch.LongStorage",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_72.XLMRobertaSelfAttention",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_32.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_240.XLMRobertaSelfAttention",
- "__torch__.torch.nn.modules.activation.Tanh",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_386.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_66.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_258.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_252.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_559.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_566.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_80.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_108.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_157.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_556.Dropout",
- "collections.OrderedDict",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_120.XLMRobertaSelfAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_505.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_507.LayerNorm",
- "__torch__.transformers.activations.___torch_mangle_249.GELUActivation",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_180.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_564.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_427.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_185.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_37.LayerNorm",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_305.LinearPackedParams",
- "__torch__.transformers.activations.___torch_mangle_321.GELUActivation",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_515.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_449.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_308.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_5.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_79.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_351.XLMRobertaOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_528.XLMRobertaSelfAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_91.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_479.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_568.XLMRobertaLayer",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_554.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_468.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_268.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_95.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_96.XLMRobertaSelfAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_366.XLMRobertaAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_125.XLMRobertaSelfOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_357.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_430.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_483.LayerNorm",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_217.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_506.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_21.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_178.XLMRobertaIntermediate",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_378.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_501.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_514.XLMRobertaIntermediate",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_144.XLMRobertaSelfAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_552.XLMRobertaSelfAttention",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_224.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_67.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_411.LayerNorm",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_168.XLMRobertaSelfAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_183.XLMRobertaOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_527.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_137.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_385.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_551.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_130.XLMRobertaIntermediate",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_410.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_287.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_499.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_75.LayerNorm",
- "__torch__.torch.nn.modules.normalization.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_188.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_395.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_539.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_3.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_77.XLMRobertaSelfOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_424.XLMRobertaLayer",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_58.XLMRobertaIntermediate",
- "__torch__.transformers.activations.___torch_mangle_561.GELUActivation",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_422.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_391.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_403.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_493.LayerNorm",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_535.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_47.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_423.XLMRobertaOutput",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_142.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_532.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_34.XLMRobertaIntermediate",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_112.XLMRobertaLayer",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_211.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_350.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_531.LayerNorm",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_389.XLMRobertaSelfOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_448.XLMRobertaLayer",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_145.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_238.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_246.XLMRobertaAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_256.XLMRobertaLayer",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_541.LayerNorm",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_89.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_226.XLMRobertaIntermediate",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_464.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_320.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_49.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_69.LinearPackedParams",
- "__torch__.transformers.activations.GELUActivation",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_182.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_274.XLMRobertaIntermediate",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_334.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_187.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_38.Dropout",
- "torch.FloatStorage",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_416.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_195.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_118.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_370.XLMRobertaIntermediate",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_547.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_312.XLMRobertaSelfAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_179.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_457.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_229.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_332.Linear",
- "__torch__.torch.nn.modules.sparse.Embedding",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_485.XLMRobertaSelfOutput",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_372.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_163.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_414.XLMRobertaAttention",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_368.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_310.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_459.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_398.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_434.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_159.XLMRobertaOutput",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_362.Linear",
- "__torch__.transformers.activations.___torch_mangle_489.GELUActivation",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_121.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_214.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_377.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_245.XLMRobertaSelfOutput",
- "__torch__.transformers.activations.___torch_mangle_201.GELUActivation",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_55.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_412.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_160.XLMRobertaLayer",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_156.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_243.LayerNorm",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_175.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_384.XLMRobertaSelfAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_361.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_401.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_480.XLMRobertaSelfAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_329.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_418.XLMRobertaIntermediate",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_421.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_326.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_92.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_262.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_267.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_420.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaEmbeddings",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_222.XLMRobertaAttention",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_349.LayerNorm",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_438.XLMRobertaAttention",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_266.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_6.Dropout",
- "__torch__.transformers.activations.___torch_mangle_57.GELUActivation",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_189.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_13.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_64.XLMRobertaLayer",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_504.XLMRobertaSelfAttention",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_248.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_340.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_446.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_526.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaIntermediate",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_456.XLMRobertaSelfAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_379.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_135.XLMRobertaOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_521.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_264.XLMRobertaSelfAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_148.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_354.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_167.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_458.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_328.XLMRobertaLayer",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_426.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_184.XLMRobertaLayer",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_133.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_272.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_82.XLMRobertaIntermediate",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_123.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_76.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_347.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_387.LayerNorm",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_327.XLMRobertaOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_460.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_260.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_4.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_331.LinearPackedParams",
- "torch.QInt8Storage",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_301.LayerNorm",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_360.XLMRobertaSelfAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_250.XLMRobertaIntermediate",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_335.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_306.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_136.XLMRobertaLayer",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_124.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_198.XLMRobertaAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_173.XLMRobertaSelfOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_197.XLMRobertaSelfOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_199.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_237.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_131.LinearPackedParams",
- "torch._utils._rebuild_qtensor",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_86.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_228.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_482.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_233.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_292.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_15.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_263.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_113.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_339.LayerNorm",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_491.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_261.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_87.XLMRobertaOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_553.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_303.XLMRobertaOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_103.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_443.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_471.XLMRobertaOutput",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_206.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_355.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_94.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_219.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_172.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_162.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_192.XLMRobertaSelfAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_220.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_394.XLMRobertaIntermediate",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_98.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_22.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_174.XLMRobertaAttention",
- "__torch__.transformers.activations.___torch_mangle_225.GELUActivation",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_300.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_25.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_109.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_116.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_10.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_100.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_191.Dropout",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_117.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_512.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_288.XLMRobertaSelfAttention",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_488.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_496.XLMRobertaLayer",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_365.XLMRobertaSelfOutput",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_56.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_363.LayerNorm",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_447.XLMRobertaOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_520.XLMRobertaLayer",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_353.LinearPackedParams",
- "__torch__.transformers.activations.___torch_mangle_513.GELUActivation",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_509.XLMRobertaSelfOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_490.XLMRobertaIntermediate",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.XLMRobertaOutput",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_44.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_495.XLMRobertaOutput",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_190.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_265.LinearPackedParams",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_85.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_338.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_141.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_533.XLMRobertaSelfOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_323.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_74.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_432.XLMRobertaSelfAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_255.XLMRobertaOutput",
- "__torch__.transformers.activations.___torch_mangle_81.GELUActivation",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_451.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_444.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_550.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_324.Linear",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_42.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_207.XLMRobertaOutput",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_169.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_208.XLMRobertaLayer",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_436.Dropout",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_522.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_181.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_392.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_295.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_466.XLMRobertaIntermediate",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_99.LayerNorm",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_78.XLMRobertaAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_371.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_200.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_325.LayerNorm",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_452.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_388.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_399.XLMRobertaOutput",
- "torch.per_tensor_affine",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_36.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_453.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_376.XLMRobertaLayer",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_560.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_254.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_111.XLMRobertaOutput",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_150.XLMRobertaAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_196.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_317.XLMRobertaSelfOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_161.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_569.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_8.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_469.LayerNorm",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_115.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_289.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_442.XLMRobertaIntermediate",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_127.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_396.Linear",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_497.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_461.XLMRobertaSelfOutput",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_475.LinearPackedParams",
- "__torch__.transformers.activations.___torch_mangle_33.GELUActivation",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_283.LinearPackedParams",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_62.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_134.Dropout",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_221.XLMRobertaSelfOutput",
- "__torch__.transformers.activations.___torch_mangle_345.GELUActivation",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_545.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_31.LinearPackedParams",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_276.Linear",
- "__torch__.torch.classes.quantized.LinearPackedParamsBase",
- "__torch__.transformers.activations.___torch_mangle_465.GELUActivation",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_294.XLMRobertaAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_106.XLMRobertaIntermediate",
- "__torch__.torch.ao.nn.quantized.dynamic.modules.linear.___torch_mangle_474.Linear",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_54.XLMRobertaAttention",
- "__torch__.torch.ao.nn.quantized.modules.linear.___torch_mangle_235.LinearPackedParams",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_24.XLMRobertaSelfAttention",
- "__torch__.transformers.models.xlm_roberta.modeling_xlm_roberta.___torch_mangle_304.XLMRobertaLayer",
- "torch._utils._rebuild_tensor_v2",
- "collections.OrderedDict",
- "torch.LongStorage",
- "torch.DoubleStorage",
- "torch._utils._rebuild_tensor_v2",
- "torch.jit._pickle.restore_type_tag",
- "torch.LongStorage",
- "collections.OrderedDict"
How to fix it?
1.36 GB
Upload bge-m3-int8q.pt