File size: 1,760 Bytes
f8ce820 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
{
"model_name_or_path": "output/sft",
"max_length": 512,
"trust_remote_code": true,
"train_datasets": [
[
"PKU-SafeRLHF/train",
{
"proportion": 1.0,
"path": "PKU-SafeRLHF-harmless-only-30k"
}
]
],
"eval_datasets": [
[
"PKU-SafeRLHF/test",
{
"proportion": 1.0
}
]
],
"loss_type": "sequence-wise",
"epochs": 2,
"per_device_train_batch_size": 16,
"per_device_eval_batch_size": 16,
"gradient_accumulation_steps": 2,
"gradient_checkpointing": true,
"normalize_score_during_training": false,
"normalizer_type": "ExponentialMovingAverage",
"normalizer_momentum": 0.9,
"lr": 2e-05,
"lr_scheduler_type": "cosine",
"lr_warmup_ratio": 0.03,
"weight_decay": 0.1,
"seed": 42,
"fp16": false,
"bf16": true,
"tf32": true,
"eval_strategy": "epoch",
"eval_interval": 1000000,
"need_eval": false,
"eval_split_ratio": null,
"output_dir": "/data/jiongxiao_wang/rlhf_attack/safe-rlhf/output/rm_30k",
"log_type": "wandb",
"log_dir": "/data/jiongxiao_wang/rlhf_attack/safe-rlhf/output/rm_30k",
"log_project": "Safe-RLHF-RM",
"log_run_name": "reward-2024-01-05-20-03-25",
"save_16bit": false,
"save_interval": 1000000,
"local_rank": 0,
"zero_stage": 3,
"deepspeed": false,
"deepspeed_config": null,
"deepscale": false,
"deepscale_config": null,
"deepspeed_mpi": false,
"global_rank": 0,
"device": {
"type": "torch.device",
"repr": "device(type='cuda', index=0)"
},
"num_update_steps_per_epoch": 210,
"total_training_steps": 420
} |