File size: 1,942 Bytes
87e21d1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
#!/bin/bash
# ================= sampler & data =================
np=8    # number of GPU to use
default_step=20   # 14
default_sample_nums=533
default_sampling_algo="dpm-solver"
default_add_label=''

# parser
config_file=$1
model_paths=$2

for arg in "$@"
do
    case $arg in
        --step=*)
        step="${arg#*=}"
        shift
        ;;
        --sampling_algo=*)
        sampling_algo="${arg#*=}"
        shift
        ;;
        --add_label=*)
        add_label="${arg#*=}"
        shift
        ;;
        --model_path=*)
        model_paths="${arg#*=}"
        shift
        ;;
        --exist_time_prefix=*)
        exist_time_prefix="${arg#*=}"
        shift
        ;;
        --if_save_dirname=*)
        if_save_dirname="${arg#*=}"
        shift
        ;;
        *)
        ;;
    esac
done

sample_nums=$default_sample_nums
samples_per_gpu=$((sample_nums / np))
add_label=${add_label:-$default_add_label}
echo "Sample numbers: $sample_nums"
echo "Add label: $add_label"
echo "Exist time prefix: $exist_time_prefix"

cmd_template="DPM_TQDM=True python scripts/inference_geneval_diffusers.py \
    --model_path=$model_paths \
    --gpu_id {gpu_id} --start_index {start_index} --end_index {end_index}"
if [ -n "${add_label}" ]; then
    cmd_template="${cmd_template} --add_label ${add_label}"
fi

echo "==================== inferencing ===================="
for gpu_id in $(seq 0 $((np - 1))); do
  start_index=$((gpu_id * samples_per_gpu))
  end_index=$((start_index + samples_per_gpu))
  if [ $gpu_id -eq $((np - 1)) ]; then
    end_index=$sample_nums
  fi

  cmd="${cmd_template//\{config_file\}/$config_file}"
  cmd="${cmd//\{model_path\}/$model_paths}"
  cmd="${cmd//\{gpu_id\}/$gpu_id}"
  cmd="${cmd//\{start_index\}/$start_index}"
  cmd="${cmd//\{end_index\}/$end_index}"

  echo "Running on GPU $gpu_id: samples $start_index to $end_index"
  eval CUDA_VISIBLE_DEVICES=$gpu_id $cmd &
done
wait

echo infer finally done