ksmcg commited on
Commit
ce53898
1 Parent(s): 3fbfa87

Upload config

Browse files
Files changed (1) hide show
  1. config.json +15 -1
config.json CHANGED
@@ -1,17 +1,29 @@
1
  {
 
 
 
 
 
 
2
  "depth": 24,
 
 
3
  "embed_dim": 480,
4
  "eta": 1.0,
 
5
  "img_size": [
6
  224,
7
  224
8
  ],
9
  "in_chans": 3,
10
  "initializer_range": 1.0,
 
11
  "model_type": "fully_attentional_networks",
 
12
  "num_classes": 1000,
13
  "num_heads": 10,
14
  "patch_size": 16,
 
15
  "se_mlp": false,
16
  "sharpen_attn": false,
17
  "sr_ratio": [
@@ -41,5 +53,7 @@
41
  1
42
  ],
43
  "tokens_norm": true,
44
- "transformers_version": "4.22.0.dev0"
 
 
45
  }
 
1
  {
2
+ "act_layer": null,
3
+ "attn_drop_rate": 0.0,
4
+ "backbone": null,
5
+ "c_head_num": null,
6
+ "channel_dims": null,
7
+ "cls_attn_layers": 2,
8
  "depth": 24,
9
+ "drop_path_rate": 0.0,
10
+ "drop_rate": 0.0,
11
  "embed_dim": 480,
12
  "eta": 1.0,
13
+ "hybrid_patch_size": 2,
14
  "img_size": [
15
  224,
16
  224
17
  ],
18
  "in_chans": 3,
19
  "initializer_range": 1.0,
20
+ "mlp_ratio": 4.0,
21
  "model_type": "fully_attentional_networks",
22
+ "norm_layer": null,
23
  "num_classes": 1000,
24
  "num_heads": 10,
25
  "patch_size": 16,
26
+ "qkv_bias": true,
27
  "se_mlp": false,
28
  "sharpen_attn": false,
29
  "sr_ratio": [
 
53
  1
54
  ],
55
  "tokens_norm": true,
56
+ "transformers_version": "4.22.0.dev0",
57
+ "use_checkpoint": false,
58
+ "use_pos_embed": true
59
  }