File size: 735 Bytes
6336520
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
------------ Options -------------
base_dim: 512
batch_size: 64
beta_schedule: linear
checkpoints_dir: ./checkpoints
clip_grad_norm: 1
cond_mask_prob: 0.1
continue_ckpt: latest.tar
dataset_name: t2m
debug: False
decay_rate: 0.9
diffusion_steps: 1000
dim_mults: [2, 2, 2, 2]
dropout: 0.1
feat_bias: 5
is_continue: False
latent_dim: 512
log_every: 500
lr: 0.0001
model_ema: True
model_ema_decay: 0.9999
model_ema_steps: 32
name: self_attn—fulllayer-ffn-drop0_1-lr1e4
no_adagn: False
no_eff: True
num_layers: 8
num_train_steps: 50000
prediction_type: sample
save_interval: 10000
seed: 0
self_attention: True
text_latent_dim: 256
time_dim: 512
update_lr_steps: 5000
vis_attn: False
weight_decay: 0.01
-------------- End ----------------