card-tiny-ob / config.json
karming-wan's picture
Upload config
270bd7a
raw
history blame
2.38 kB
{
"_name_or_path": "karming-wan/card-tiny-ob",
"architectures": [
"YolosForObjectDetection"
],
"attention_probs_dropout_prob": 0.0,
"auxiliary_loss": false,
"bbox_cost": 5,
"bbox_loss_coefficient": 5,
"class_cost": 1,
"eos_coefficient": 0.1,
"giou_cost": 2,
"giou_loss_coefficient": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 192,
"id2label": {
"0": "10c",
"1": "10d",
"2": "10h",
"3": "10s",
"4": "2c",
"5": "2d",
"6": "2h",
"7": "2s",
"8": "3c",
"9": "3d",
"10": "3h",
"11": "3s",
"12": "4c",
"13": "4d",
"14": "4h",
"15": "4s",
"16": "5c",
"17": "5d",
"18": "5h",
"19": "5s",
"20": "6c",
"21": "6d",
"22": "6h",
"23": "6s",
"24": "7c",
"25": "7d",
"26": "7h",
"27": "7s",
"28": "8c",
"29": "8d",
"30": "8h",
"31": "8s",
"32": "9c",
"33": "9d",
"34": "9h",
"35": "9s",
"36": "Ac",
"37": "Ad",
"38": "Ah",
"39": "As",
"40": "Jc",
"41": "Jd",
"42": "Jh",
"43": "Js",
"44": "Kc",
"45": "Kd",
"46": "Kh",
"47": "Ks",
"48": "Qc",
"49": "Qd",
"50": "Qh",
"51": "Qs"
},
"image_size": [
800,
1333
],
"initializer_range": 0.02,
"intermediate_size": 768,
"label2id": {
"10c": 0,
"10d": 1,
"10h": 2,
"10s": 3,
"2c": 4,
"2d": 5,
"2h": 6,
"2s": 7,
"3c": 8,
"3d": 9,
"3h": 10,
"3s": 11,
"4c": 12,
"4d": 13,
"4h": 14,
"4s": 15,
"5c": 16,
"5d": 17,
"5h": 18,
"5s": 19,
"6c": 20,
"6d": 21,
"6h": 22,
"6s": 23,
"7c": 24,
"7d": 25,
"7h": 26,
"7s": 27,
"8c": 28,
"8d": 29,
"8h": 30,
"8s": 31,
"9c": 32,
"9d": 33,
"9h": 34,
"9s": 35,
"Ac": 36,
"Ad": 37,
"Ah": 38,
"As": 39,
"Jc": 40,
"Jd": 41,
"Jh": 42,
"Js": 43,
"Kc": 44,
"Kd": 45,
"Kh": 46,
"Ks": 47,
"Qc": 48,
"Qd": 49,
"Qh": 50,
"Qs": 51
},
"layer_norm_eps": 1e-12,
"model_type": "yolos",
"num_attention_heads": 3,
"num_channels": 3,
"num_detection_tokens": 100,
"num_hidden_layers": 12,
"patch_size": 16,
"qkv_bias": true,
"torch_dtype": "float32",
"transformers_version": "4.35.2",
"use_mid_position_embeddings": false
}