Upload config
Browse files- config.json +2 -2
config.json
CHANGED
@@ -43,8 +43,8 @@
|
|
43 |
"mlp_ratio": 4.0,
|
44 |
"model_type": "fan",
|
45 |
"norm_layer": null,
|
|
|
46 |
"num_classes": 1000,
|
47 |
-
"num_heads": 10,
|
48 |
"out_index": -1,
|
49 |
"patch_size": 16,
|
50 |
"qkv_bias": true,
|
@@ -80,7 +80,7 @@
|
|
80 |
1
|
81 |
],
|
82 |
"tokens_norm": true,
|
83 |
-
"transformers_version": "4.
|
84 |
"use_checkpoint": false,
|
85 |
"use_pos_embed": true
|
86 |
}
|
|
|
43 |
"mlp_ratio": 4.0,
|
44 |
"model_type": "fan",
|
45 |
"norm_layer": null,
|
46 |
+
"num_attention_heads": 10,
|
47 |
"num_classes": 1000,
|
|
|
48 |
"out_index": -1,
|
49 |
"patch_size": 16,
|
50 |
"qkv_bias": true,
|
|
|
80 |
1
|
81 |
],
|
82 |
"tokens_norm": true,
|
83 |
+
"transformers_version": "4.24.0.dev0",
|
84 |
"use_checkpoint": false,
|
85 |
"use_pos_embed": true
|
86 |
}
|