{ | |
"depth": 24, | |
"embed_dim": 480, | |
"eta": 1.0, | |
"model_type": "fully_attentional_networks", | |
"num_heads": 10, | |
"patch_size": 16, | |
"se_mlp": true, | |
"sharpen_attn": false, | |
"sr_ratio": [ | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1, | |
1 | |
], | |
"tokens_norm": true, | |
"transformers_version": "4.22.0.dev0" | |
} | |