fan_large_24_p16_224 / config.json
ksmcg's picture
Upload config
3603586
raw
history blame
837 Bytes
{
"act_layer": null,
"attn_drop_rate": 0.0,
"backbone": null,
"c_head_num": null,
"channel_dims": null,
"cls_attn_layers": 2,
"depth": 24,
"drop_path_rate": 0.0,
"drop_rate": 0.0,
"embed_dim": 480,
"eta": 1.0,
"hybrid_patch_size": 2,
"img_size": [
224,
224
],
"in_chans": 3,
"initializer_range": 1.0,
"mlp_ratio": 4.0,
"model_type": "fan",
"norm_layer": null,
"num_classes": 1000,
"num_heads": 10,
"patch_size": 16,
"qkv_bias": true,
"se_mlp": false,
"sharpen_attn": false,
"sr_ratio": [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
],
"tokens_norm": true,
"transformers_version": "4.22.0.dev0",
"use_checkpoint": false,
"use_pos_embed": true
}