Lawnel's picture
Upload LayoutLMv3ForSequenceClassification
55870b2 verified
{
"_name_or_path": "microsoft/layoutlmv3-base",
"architectures": [
"LayoutLMv3ForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"coordinate_size": 128,
"eos_token_id": 2,
"has_relative_attention_bias": true,
"has_spatial_attention_bias": true,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "page_1.png",
"1": "page_10.png",
"2": "page_100.png",
"3": "page_101.png",
"4": "page_102.png",
"5": "page_103.png",
"6": "page_104.png",
"7": "page_105.png",
"8": "page_106.png",
"9": "page_107.png",
"10": "page_108.png",
"11": "page_109.png",
"12": "page_11.png",
"13": "page_110.png",
"14": "page_111.png",
"15": "page_112.png",
"16": "page_113.png",
"17": "page_114.png",
"18": "page_115.png",
"19": "page_116.png",
"20": "page_117.png",
"21": "page_118.png",
"22": "page_119.png",
"23": "page_12.png",
"24": "page_120.png",
"25": "page_121.png",
"26": "page_122.png",
"27": "page_123.png",
"28": "page_124.png",
"29": "page_125.png",
"30": "page_126.png",
"31": "page_127.png",
"32": "page_128.png",
"33": "page_129.png",
"34": "page_13.png",
"35": "page_14.png",
"36": "page_15.png",
"37": "page_16.png",
"38": "page_17.png",
"39": "page_18.png",
"40": "page_19.png",
"41": "page_2.png",
"42": "page_20.png",
"43": "page_21.png",
"44": "page_22.png",
"45": "page_23.png",
"46": "page_24.png",
"47": "page_25.png",
"48": "page_26.png",
"49": "page_27.png",
"50": "page_28.png",
"51": "page_29.png",
"52": "page_3.png",
"53": "page_30.png",
"54": "page_31.png",
"55": "page_32.png",
"56": "page_33.png",
"57": "page_34.png",
"58": "page_35.png",
"59": "page_36.png",
"60": "page_37.png",
"61": "page_38.png",
"62": "page_39.png",
"63": "page_4.png",
"64": "page_40.png",
"65": "page_41.png",
"66": "page_42.png",
"67": "page_43.png",
"68": "page_44.png",
"69": "page_45.png",
"70": "page_46.png",
"71": "page_47.png",
"72": "page_48.png",
"73": "page_49.png",
"74": "page_5.png",
"75": "page_50.png",
"76": "page_51.png",
"77": "page_52.png",
"78": "page_53.png",
"79": "page_54.png",
"80": "page_55.png",
"81": "page_56.png",
"82": "page_57.png",
"83": "page_58.png",
"84": "page_59.png",
"85": "page_6.png",
"86": "page_60.png",
"87": "page_61.png",
"88": "page_62.png",
"89": "page_63.png",
"90": "page_64.png",
"91": "page_65.png",
"92": "page_66.png",
"93": "page_67.png",
"94": "page_68.png",
"95": "page_69.png",
"96": "page_7.png",
"97": "page_70.png",
"98": "page_71.png",
"99": "page_72.png",
"100": "page_73.png",
"101": "page_74.png",
"102": "page_75.png",
"103": "page_76.png",
"104": "page_77.png",
"105": "page_78.png",
"106": "page_79.png",
"107": "page_8.png",
"108": "page_80.png",
"109": "page_81.png",
"110": "page_82.png",
"111": "page_83.png",
"112": "page_84.png",
"113": "page_85.png",
"114": "page_86.png",
"115": "page_87.png",
"116": "page_88.png",
"117": "page_89.png",
"118": "page_9.png",
"119": "page_90.png",
"120": "page_91.png",
"121": "page_92.png",
"122": "page_93.png",
"123": "page_94.png",
"124": "page_95.png",
"125": "page_96.png",
"126": "page_97.png",
"127": "page_98.png",
"128": "page_99.png"
},
"initializer_range": 0.02,
"input_size": 224,
"intermediate_size": 3072,
"label2id": {
"page_1.png": 0,
"page_10.png": 1,
"page_100.png": 2,
"page_101.png": 3,
"page_102.png": 4,
"page_103.png": 5,
"page_104.png": 6,
"page_105.png": 7,
"page_106.png": 8,
"page_107.png": 9,
"page_108.png": 10,
"page_109.png": 11,
"page_11.png": 12,
"page_110.png": 13,
"page_111.png": 14,
"page_112.png": 15,
"page_113.png": 16,
"page_114.png": 17,
"page_115.png": 18,
"page_116.png": 19,
"page_117.png": 20,
"page_118.png": 21,
"page_119.png": 22,
"page_12.png": 23,
"page_120.png": 24,
"page_121.png": 25,
"page_122.png": 26,
"page_123.png": 27,
"page_124.png": 28,
"page_125.png": 29,
"page_126.png": 30,
"page_127.png": 31,
"page_128.png": 32,
"page_129.png": 33,
"page_13.png": 34,
"page_14.png": 35,
"page_15.png": 36,
"page_16.png": 37,
"page_17.png": 38,
"page_18.png": 39,
"page_19.png": 40,
"page_2.png": 41,
"page_20.png": 42,
"page_21.png": 43,
"page_22.png": 44,
"page_23.png": 45,
"page_24.png": 46,
"page_25.png": 47,
"page_26.png": 48,
"page_27.png": 49,
"page_28.png": 50,
"page_29.png": 51,
"page_3.png": 52,
"page_30.png": 53,
"page_31.png": 54,
"page_32.png": 55,
"page_33.png": 56,
"page_34.png": 57,
"page_35.png": 58,
"page_36.png": 59,
"page_37.png": 60,
"page_38.png": 61,
"page_39.png": 62,
"page_4.png": 63,
"page_40.png": 64,
"page_41.png": 65,
"page_42.png": 66,
"page_43.png": 67,
"page_44.png": 68,
"page_45.png": 69,
"page_46.png": 70,
"page_47.png": 71,
"page_48.png": 72,
"page_49.png": 73,
"page_5.png": 74,
"page_50.png": 75,
"page_51.png": 76,
"page_52.png": 77,
"page_53.png": 78,
"page_54.png": 79,
"page_55.png": 80,
"page_56.png": 81,
"page_57.png": 82,
"page_58.png": 83,
"page_59.png": 84,
"page_6.png": 85,
"page_60.png": 86,
"page_61.png": 87,
"page_62.png": 88,
"page_63.png": 89,
"page_64.png": 90,
"page_65.png": 91,
"page_66.png": 92,
"page_67.png": 93,
"page_68.png": 94,
"page_69.png": 95,
"page_7.png": 96,
"page_70.png": 97,
"page_71.png": 98,
"page_72.png": 99,
"page_73.png": 100,
"page_74.png": 101,
"page_75.png": 102,
"page_76.png": 103,
"page_77.png": 104,
"page_78.png": 105,
"page_79.png": 106,
"page_8.png": 107,
"page_80.png": 108,
"page_81.png": 109,
"page_82.png": 110,
"page_83.png": 111,
"page_84.png": 112,
"page_85.png": 113,
"page_86.png": 114,
"page_87.png": 115,
"page_88.png": 116,
"page_89.png": 117,
"page_9.png": 118,
"page_90.png": 119,
"page_91.png": 120,
"page_92.png": 121,
"page_93.png": 122,
"page_94.png": 123,
"page_95.png": 124,
"page_96.png": 125,
"page_97.png": 126,
"page_98.png": 127,
"page_99.png": 128
},
"layer_norm_eps": 1e-05,
"max_2d_position_embeddings": 1024,
"max_position_embeddings": 514,
"max_rel_2d_pos": 256,
"max_rel_pos": 128,
"model_type": "layoutlmv3",
"num_attention_heads": 12,
"num_channels": 3,
"num_hidden_layers": 12,
"pad_token_id": 1,
"patch_size": 16,
"rel_2d_pos_bins": 64,
"rel_pos_bins": 32,
"second_input_size": 112,
"shape_size": 128,
"text_embed": true,
"torch_dtype": "float32",
"transformers_version": "4.42.3",
"type_vocab_size": 1,
"visual_embed": true,
"vocab_size": 50265
}