fan_large_24_p16_224 / config.json
ksmcg's picture
Upload config
8775b52
raw
history blame
432 Bytes
{
"depth": 24,
"embed_dim": 480,
"eta": 1.0,
"model_type": "fully_attentional_networks",
"num_heads": 10,
"patch_size": 16,
"se_mlp": true,
"sharpen_attn": false,
"sr_ratio": [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
],
"tokens_norm": true,
"transformers_version": "4.22.0.dev0"
}