File size: 1,103 Bytes
547b60e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
{
  "_class_name": "AllegroTransformer3DModel",
  "_diffusers_version": "0.28.0",
  "_name_or_path": "/cpfs/data/user/yanghuan/expr/rsora/RSoraT2V_L32AH24AD96_122_20240918_88x720x1280_fps15_t5/checkpoint-38000/model",
  "activation_fn": "gelu-approximate",
  "attention_bias": true,
  "attention_head_dim": 96,
  "ca_attention_mode": "xformers",
  "caption_channels": 4096,
  "cross_attention_dim": 2304,
  "double_self_attention": false,
  "downsampler": null,
  "dropout": 0.0,
  "in_channels": 4,
  "interpolation_scale_h": 2.0,
  "interpolation_scale_t": 2.2,
  "interpolation_scale_w": 2.0,
  "model_max_length": 300,
  "norm_elementwise_affine": false,
  "norm_eps": 1e-06,
  "norm_type": "ada_norm_single",
  "num_attention_heads": 24,
  "num_embeds_ada_norm": 1000,
  "num_layers": 32,
  "only_cross_attention": false,
  "out_channels": 4,
  "patch_size": 2,
  "patch_size_t": 1,
  "sa_attention_mode": "flash",
  "sample_size": [
    90,
    160
  ],
  "sample_size_t": 22,
  "upcast_attention": false,
  "use_additional_conditions": null,
  "use_linear_projection": false,
  "use_rope": true
}