|
{ |
|
"accelerator_kwargs": {}, |
|
"adap_kl_ctrl": true, |
|
"backward_batch_size": 16, |
|
"batch_size": 16, |
|
"cliprange": 0.2, |
|
"cliprange_value": 0.2, |
|
"compare_steps": 1, |
|
"early_stopping": true, |
|
"exp_name": "ppo_v2", |
|
"forward_batch_size": null, |
|
"gamma": 1, |
|
"global_backward_batch_size": 64, |
|
"global_batch_size": 64, |
|
"gradient_accumulation_steps": 4, |
|
"gradient_checkpointing": false, |
|
"horizon": 10000, |
|
"init_kl_coef": 0.2, |
|
"is_encoder_decoder": false, |
|
"is_peft_model": true, |
|
"kl_penalty": "kl", |
|
"lam": 0.95, |
|
"learning_rate": 1.41e-05, |
|
"log_with": null, |
|
"max_grad_norm": null, |
|
"mini_batch_size": 4, |
|
"model_name": "Maykeye/TinyLLama-v0", |
|
"optimize_cuda_cache": true, |
|
"optimize_device_cache": false, |
|
"ppo_epochs": 4, |
|
"project_kwargs": {}, |
|
"push_to_hub_if_best_kwargs": {}, |
|
"query_dataset": "imdb", |
|
"ratio_threshold": 10.0, |
|
"remove_unused_columns": true, |
|
"reward_model": "sentiment-analysis:lvwerra/distilbert-imdb", |
|
"score_clip": null, |
|
"seed": 0, |
|
"steps": 20000, |
|
"target": 6, |
|
"target_kl": 0.1, |
|
"task_name": null, |
|
"tracker_kwargs": {}, |
|
"tracker_project_name": "trl", |
|
"use_score_norm": false, |
|
"use_score_scaling": false, |
|
"vf_coef": 0.1, |
|
"whiten_rewards": false, |
|
"world_size": 4 |
|
} |