Upload config
Browse files- config.json +2 -4
config.json
CHANGED
@@ -8,7 +8,6 @@
|
|
8 |
"cls_attn_layers": 2,
|
9 |
"decoder_dropout": 0.1,
|
10 |
"decoder_hidden_size": 768,
|
11 |
-
"depth": 24,
|
12 |
"depths": null,
|
13 |
"drop_path_rate": 0.0,
|
14 |
"drop_rate": 0.0,
|
@@ -43,10 +42,9 @@
|
|
43 |
"mlp_ratio": 4.0,
|
44 |
"model_type": "fan",
|
45 |
"norm_layer": null,
|
46 |
-
"num_attention_heads":
|
47 |
"num_classes": 1000,
|
48 |
-
"
|
49 |
-
"num_hidden_layers": 12,
|
50 |
"out_index": -1,
|
51 |
"patch_size": 16,
|
52 |
"qkv_bias": true,
|
|
|
8 |
"cls_attn_layers": 2,
|
9 |
"decoder_dropout": 0.1,
|
10 |
"decoder_hidden_size": 768,
|
|
|
11 |
"depths": null,
|
12 |
"drop_path_rate": 0.0,
|
13 |
"drop_rate": 0.0,
|
|
|
42 |
"mlp_ratio": 4.0,
|
43 |
"model_type": "fan",
|
44 |
"norm_layer": null,
|
45 |
+
"num_attention_heads": 10,
|
46 |
"num_classes": 1000,
|
47 |
+
"num_hidden_layers": 24,
|
|
|
48 |
"out_index": -1,
|
49 |
"patch_size": 16,
|
50 |
"qkv_bias": true,
|