Allegro / transformer /config.json
a-r-r-o-w's picture
a-r-r-o-w HF staff
update
7defb98
raw
history blame
707 Bytes
{
"_class_name": "AllegroTransformer3DModel",
"_diffusers_version": "0.31.0.dev0",
"activation_fn": "gelu-approximate",
"attention_bias": true,
"attention_head_dim": 96,
"caption_channels": 4096,
"cross_attention_dim": 2304,
"dropout": 0.0,
"in_channels": 4,
"interpolation_scale_h": 2.0,
"interpolation_scale_t": 2.2,
"interpolation_scale_w": 2.0,
"norm_elementwise_affine": false,
"norm_eps": 1e-06,
"norm_type": "ada_norm_single",
"num_attention_heads": 24,
"num_layers": 32,
"out_channels": 4,
"patch_size": 2,
"patch_size_t": 1,
"sample_frames": 22,
"sample_height": 90,
"sample_size": [
90,
160
],
"sample_size_t": 22,
"sample_width": 160
}