AARon99's picture
Upload 7 files
90395e6
raw
history blame contribute delete
465 Bytes
{
"base_model_name": "llama-2-70b-Guanaco-QLoRA-fp16",
"base_model_class": "LlamaForCausalLM",
"base_loaded_in_4bit": true,
"base_loaded_in_8bit": false,
"projections": "q, v",
"loss": 0.964,
"learning_rate": 8.68421052631579e-05,
"epoch": 2.84,
"current_steps": 1323,
"train_runtime": 6257.8339,
"train_samples_per_second": 0.908,
"train_steps_per_second": 0.007,
"total_flos": 2.8663842040971264e+17,
"train_loss": 1.2440977039791288
}