ksmcg commited on
Commit
df93839
1 Parent(s): 909f3b0

Upload config

Browse files
Files changed (1) hide show
  1. config.json +2 -4
config.json CHANGED
@@ -1,5 +1,6 @@
1
  {
2
  "act_layer": null,
 
3
  "attn_drop_rate": 0.0,
4
  "backbone": "hybrid",
5
  "c_head_num": null,
@@ -22,7 +23,6 @@
22
  "decoder_params": {
23
  "embed_dim": 768
24
  },
25
- "depth": 10,
26
  "depths": [
27
  3,
28
  3
@@ -35,7 +35,6 @@
35
  ],
36
  "drop_path_rate": 0.0,
37
  "drop_rate": 0.0,
38
- "dropout_ratio": 0.1,
39
  "embed_dim": 384,
40
  "eta": 1.0,
41
  "feat_downsample": false,
@@ -69,8 +68,7 @@
69
  "norm_layer": null,
70
  "num_attention_heads": 8,
71
  "num_classes": 1000,
72
- "num_heads": 8,
73
- "num_hidden_layers": 12,
74
  "out_index": 9,
75
  "patch_size": 16,
76
  "qkv_bias": true,
 
1
  {
2
  "act_layer": null,
3
+ "align_corners": false,
4
  "attn_drop_rate": 0.0,
5
  "backbone": "hybrid",
6
  "c_head_num": null,
 
23
  "decoder_params": {
24
  "embed_dim": 768
25
  },
 
26
  "depths": [
27
  3,
28
  3
 
35
  ],
36
  "drop_path_rate": 0.0,
37
  "drop_rate": 0.0,
 
38
  "embed_dim": 384,
39
  "eta": 1.0,
40
  "feat_downsample": false,
 
68
  "norm_layer": null,
69
  "num_attention_heads": 8,
70
  "num_classes": 1000,
71
+ "num_hidden_layers": 10,
 
72
  "out_index": 9,
73
  "patch_size": 16,
74
  "qkv_bias": true,