ksmcg commited on
Commit
06d566b
1 Parent(s): a53136d

Upload config

Browse files
Files changed (1) hide show
  1. config.json +92 -0
config.json ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "act_layer": null,
3
+ "attn_drop_rate": 0.0,
4
+ "backbone": "hybrid",
5
+ "c_head_num": null,
6
+ "channel_dims": [
7
+ 384,
8
+ 384,
9
+ 384,
10
+ 384,
11
+ 384,
12
+ 384,
13
+ 384,
14
+ 384,
15
+ 384,
16
+ 384
17
+ ],
18
+ "channels": 256,
19
+ "cls_attn_layers": 2,
20
+ "decoder_hidden_size": 768,
21
+ "depth": 10,
22
+ "depths": [
23
+ 3,
24
+ 3
25
+ ],
26
+ "dims": [
27
+ 128,
28
+ 256,
29
+ 512,
30
+ 1024
31
+ ],
32
+ "drop_path_rate": 0.0,
33
+ "drop_rate": 0.0,
34
+ "dropout_ratio": 0.1,
35
+ "embed_dim": 384,
36
+ "eta": 1.0,
37
+ "feat_downsample": false,
38
+ "feature_strides": [
39
+ 4,
40
+ 8,
41
+ 16,
42
+ 32
43
+ ],
44
+ "hybrid_patch_size": 2,
45
+ "img_size": [
46
+ 224,
47
+ 224
48
+ ],
49
+ "in_channels": [
50
+ 128,
51
+ 256,
52
+ 480,
53
+ 480
54
+ ],
55
+ "in_chans": 3,
56
+ "in_index": [
57
+ 0,
58
+ 1,
59
+ 2,
60
+ 3
61
+ ],
62
+ "initializer_range": 1.0,
63
+ "mlp_ratio": 4.0,
64
+ "model_type": "fan",
65
+ "norm_layer": null,
66
+ "num_classes": 1000,
67
+ "num_heads": 8,
68
+ "out_index": -1,
69
+ "patch_size": 16,
70
+ "qkv_bias": true,
71
+ "reshape_last_stage": false,
72
+ "rounding_mode": "floor",
73
+ "se_mlp": false,
74
+ "sharpen_attn": false,
75
+ "sr_ratio": [
76
+ 1,
77
+ 1,
78
+ 1,
79
+ 1,
80
+ 1,
81
+ 1,
82
+ 1,
83
+ 1,
84
+ 1,
85
+ 1
86
+ ],
87
+ "tokens_norm": true,
88
+ "transformers_version": "4.22.0.dev0",
89
+ "use_checkpoint": false,
90
+ "use_head": false,
91
+ "use_pos_embed": true
92
+ }