File size: 1,098 Bytes
c7c944c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
{
"act_layer": "GELU",
"architectures": [
"PATHViTModel"
],
"auto_map": {
"AutoConfig": "configuration_path.PATHViTConfig",
"AutoModel": "modeling_path.PATHViTModel"
},
"compat": false,
"depth": 12,
"drop_path_rate": 0.0,
"embed_dim": 768,
"ending_norm": true,
"freeze_iters": 0,
"img_size": [
384,
128
],
"in_chans": 3,
"interval": 3,
"learnable_pos": true,
"lms_checkpoint_train": "fairscale",
"mask_input": false,
"mlp_ratio": 4.0,
"model_type": "vit-b16",
"neck_sp_list": [],
"norm_layer": null,
"norm_layer_eps": 1e-06,
"num_classes": 80,
"num_heads": 12,
"pad_attn_mask": false,
"patch_size": 16,
"pre_ln": false,
"pretrained": true,
"prompt": null,
"qkv_bias": true,
"rel_pos_spatial": false,
"round_padding": true,
"task_sp_list": [
"cls_token",
"cls_token_pos_embed",
"rel_pos_h",
"rel_pos_w"
],
"test_pos_mode": "learnable_simple_interpolate",
"torch_dtype": "float32",
"transformers_version": "4.43.3",
"use_abs_pos_emb": true,
"use_cls_token": true,
"window": false
}
|