|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.23255813953488372, |
|
"eval_steps": 2, |
|
"global_step": 8, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.029069767441860465, |
|
"grad_norm": 13.73514461517334, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 6.7536, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"grad_norm": 16.892961502075195, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 6.6203, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_NLI-v2_cosine_accuracy": 1.0, |
|
"eval_NLI-v2_dot_accuracy": 0.109375, |
|
"eval_NLI-v2_euclidean_accuracy": 1.0, |
|
"eval_NLI-v2_manhattan_accuracy": 1.0, |
|
"eval_NLI-v2_max_accuracy": 1.0, |
|
"eval_VitaminC_cosine_accuracy": 0.55078125, |
|
"eval_VitaminC_cosine_accuracy_threshold": 0.9469717741012573, |
|
"eval_VitaminC_cosine_ap": 0.5165514227338435, |
|
"eval_VitaminC_cosine_f1": 0.6525198938992042, |
|
"eval_VitaminC_cosine_f1_threshold": 0.4987494945526123, |
|
"eval_VitaminC_cosine_precision": 0.484251968503937, |
|
"eval_VitaminC_cosine_recall": 1.0, |
|
"eval_VitaminC_dot_accuracy": 0.55078125, |
|
"eval_VitaminC_dot_accuracy_threshold": 417.45166015625, |
|
"eval_VitaminC_dot_ap": 0.5125282416460941, |
|
"eval_VitaminC_dot_f1": 0.6525198938992042, |
|
"eval_VitaminC_dot_f1_threshold": 200.84530639648438, |
|
"eval_VitaminC_dot_precision": 0.484251968503937, |
|
"eval_VitaminC_dot_recall": 1.0, |
|
"eval_VitaminC_euclidean_accuracy": 0.546875, |
|
"eval_VitaminC_euclidean_accuracy_threshold": 6.841136932373047, |
|
"eval_VitaminC_euclidean_ap": 0.512994913068431, |
|
"eval_VitaminC_euclidean_f1": 0.6525198938992042, |
|
"eval_VitaminC_euclidean_f1_threshold": 20.22179412841797, |
|
"eval_VitaminC_euclidean_precision": 0.484251968503937, |
|
"eval_VitaminC_euclidean_recall": 1.0, |
|
"eval_VitaminC_manhattan_accuracy": 0.546875, |
|
"eval_VitaminC_manhattan_accuracy_threshold": 117.1143569946289, |
|
"eval_VitaminC_manhattan_ap": 0.5177961641566705, |
|
"eval_VitaminC_manhattan_f1": 0.6542553191489362, |
|
"eval_VitaminC_manhattan_f1_threshold": 292.3347473144531, |
|
"eval_VitaminC_manhattan_precision": 0.48616600790513836, |
|
"eval_VitaminC_manhattan_recall": 1.0, |
|
"eval_VitaminC_max_accuracy": 0.55078125, |
|
"eval_VitaminC_max_accuracy_threshold": 417.45166015625, |
|
"eval_VitaminC_max_ap": 0.5177961641566705, |
|
"eval_VitaminC_max_f1": 0.6542553191489362, |
|
"eval_VitaminC_max_f1_threshold": 292.3347473144531, |
|
"eval_VitaminC_max_precision": 0.48616600790513836, |
|
"eval_VitaminC_max_recall": 1.0, |
|
"eval_sequential_score": 0.5177961641566705, |
|
"eval_sts-test_pearson_cosine": 0.017178505918243114, |
|
"eval_sts-test_pearson_dot": 0.16228524441844774, |
|
"eval_sts-test_pearson_euclidean": 0.024391561236282323, |
|
"eval_sts-test_pearson_manhattan": 0.059272519732015624, |
|
"eval_sts-test_pearson_max": 0.16228524441844774, |
|
"eval_sts-test_spearman_cosine": 0.07115563415775981, |
|
"eval_sts-test_spearman_dot": 0.1967074668301984, |
|
"eval_sts-test_spearman_euclidean": 0.05117306486959643, |
|
"eval_sts-test_spearman_manhattan": 0.07641354546391785, |
|
"eval_sts-test_spearman_max": 0.1967074668301984, |
|
"eval_vitaminc-pairs_loss": 2.72658371925354, |
|
"eval_vitaminc-pairs_runtime": 1.4719, |
|
"eval_vitaminc-pairs_samples_per_second": 73.373, |
|
"eval_vitaminc-pairs_steps_per_second": 1.359, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_negation-triplets_loss": 5.094177722930908, |
|
"eval_negation-triplets_runtime": 0.3027, |
|
"eval_negation-triplets_samples_per_second": 211.46, |
|
"eval_negation-triplets_steps_per_second": 3.304, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_scitail-pairs-pos_loss": 1.9183871746063232, |
|
"eval_scitail-pairs-pos_runtime": 0.3785, |
|
"eval_scitail-pairs-pos_samples_per_second": 142.654, |
|
"eval_scitail-pairs-pos_steps_per_second": 2.642, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_xsum-pairs_loss": 6.3277668952941895, |
|
"eval_xsum-pairs_runtime": 3.4205, |
|
"eval_xsum-pairs_samples_per_second": 37.422, |
|
"eval_xsum-pairs_steps_per_second": 0.585, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_sciq_pairs_loss": 0.36539140343666077, |
|
"eval_sciq_pairs_runtime": 3.4462, |
|
"eval_sciq_pairs_samples_per_second": 37.143, |
|
"eval_sciq_pairs_steps_per_second": 0.58, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_qasc_pairs_loss": 3.9689104557037354, |
|
"eval_qasc_pairs_runtime": 0.6477, |
|
"eval_qasc_pairs_samples_per_second": 197.636, |
|
"eval_qasc_pairs_steps_per_second": 3.088, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_openbookqa_pairs_loss": 4.7439374923706055, |
|
"eval_openbookqa_pairs_runtime": 0.5759, |
|
"eval_openbookqa_pairs_samples_per_second": 222.248, |
|
"eval_openbookqa_pairs_steps_per_second": 3.473, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_msmarco_pairs_loss": 10.51363754272461, |
|
"eval_msmarco_pairs_runtime": 1.3036, |
|
"eval_msmarco_pairs_samples_per_second": 98.189, |
|
"eval_msmarco_pairs_steps_per_second": 1.534, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_nq_pairs_loss": 4.969011306762695, |
|
"eval_nq_pairs_runtime": 2.5922, |
|
"eval_nq_pairs_samples_per_second": 49.378, |
|
"eval_nq_pairs_steps_per_second": 0.772, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_trivia_pairs_loss": 3.8609507083892822, |
|
"eval_trivia_pairs_runtime": 4.3666, |
|
"eval_trivia_pairs_samples_per_second": 29.314, |
|
"eval_trivia_pairs_steps_per_second": 0.458, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_gooaq_pairs_loss": 8.041110038757324, |
|
"eval_gooaq_pairs_runtime": 0.9119, |
|
"eval_gooaq_pairs_samples_per_second": 140.361, |
|
"eval_gooaq_pairs_steps_per_second": 2.193, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05813953488372093, |
|
"eval_paws-pos_loss": 2.21897029876709, |
|
"eval_paws-pos_runtime": 0.6905, |
|
"eval_paws-pos_samples_per_second": 185.364, |
|
"eval_paws-pos_steps_per_second": 2.896, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0872093023255814, |
|
"grad_norm": 15.801941871643066, |
|
"learning_rate": 1e-05, |
|
"loss": 6.7963, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"grad_norm": 11.94517993927002, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 6.4488, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"eval_NLI-v2_cosine_accuracy": 1.0, |
|
"eval_NLI-v2_dot_accuracy": 0.109375, |
|
"eval_NLI-v2_euclidean_accuracy": 1.0, |
|
"eval_NLI-v2_manhattan_accuracy": 1.0, |
|
"eval_NLI-v2_max_accuracy": 1.0, |
|
"eval_VitaminC_cosine_accuracy": 0.55078125, |
|
"eval_VitaminC_cosine_accuracy_threshold": 0.9437637329101562, |
|
"eval_VitaminC_cosine_ap": 0.5161989667198088, |
|
"eval_VitaminC_cosine_f1": 0.6525198938992042, |
|
"eval_VitaminC_cosine_f1_threshold": 0.56722092628479, |
|
"eval_VitaminC_cosine_precision": 0.484251968503937, |
|
"eval_VitaminC_cosine_recall": 1.0, |
|
"eval_VitaminC_dot_accuracy": 0.546875, |
|
"eval_VitaminC_dot_accuracy_threshold": 416.9976806640625, |
|
"eval_VitaminC_dot_ap": 0.515337582265173, |
|
"eval_VitaminC_dot_f1": 0.6507936507936508, |
|
"eval_VitaminC_dot_f1_threshold": 223.1047821044922, |
|
"eval_VitaminC_dot_precision": 0.4823529411764706, |
|
"eval_VitaminC_dot_recall": 1.0, |
|
"eval_VitaminC_euclidean_accuracy": 0.55078125, |
|
"eval_VitaminC_euclidean_accuracy_threshold": 6.841610908508301, |
|
"eval_VitaminC_euclidean_ap": 0.5167890569390883, |
|
"eval_VitaminC_euclidean_f1": 0.6542553191489362, |
|
"eval_VitaminC_euclidean_f1_threshold": 18.548978805541992, |
|
"eval_VitaminC_euclidean_precision": 0.48616600790513836, |
|
"eval_VitaminC_euclidean_recall": 1.0, |
|
"eval_VitaminC_manhattan_accuracy": 0.54296875, |
|
"eval_VitaminC_manhattan_accuracy_threshold": 110.40855407714844, |
|
"eval_VitaminC_manhattan_ap": 0.5219582103185639, |
|
"eval_VitaminC_manhattan_f1": 0.6542553191489362, |
|
"eval_VitaminC_manhattan_f1_threshold": 282.48602294921875, |
|
"eval_VitaminC_manhattan_precision": 0.48616600790513836, |
|
"eval_VitaminC_manhattan_recall": 1.0, |
|
"eval_VitaminC_max_accuracy": 0.55078125, |
|
"eval_VitaminC_max_accuracy_threshold": 416.9976806640625, |
|
"eval_VitaminC_max_ap": 0.5219582103185639, |
|
"eval_VitaminC_max_f1": 0.6542553191489362, |
|
"eval_VitaminC_max_f1_threshold": 282.48602294921875, |
|
"eval_VitaminC_max_precision": 0.48616600790513836, |
|
"eval_VitaminC_max_recall": 1.0, |
|
"eval_sequential_score": 0.5219582103185639, |
|
"eval_sts-test_pearson_cosine": 0.025276809579094173, |
|
"eval_sts-test_pearson_dot": 0.21470147681711343, |
|
"eval_sts-test_pearson_euclidean": 0.027783806662194415, |
|
"eval_sts-test_pearson_manhattan": 0.059736177006118926, |
|
"eval_sts-test_pearson_max": 0.21470147681711343, |
|
"eval_sts-test_spearman_cosine": 0.0776783582433888, |
|
"eval_sts-test_spearman_dot": 0.2491347682204262, |
|
"eval_sts-test_spearman_euclidean": 0.054640386155863654, |
|
"eval_sts-test_spearman_manhattan": 0.07764379287610436, |
|
"eval_sts-test_spearman_max": 0.2491347682204262, |
|
"eval_vitaminc-pairs_loss": 2.7045021057128906, |
|
"eval_vitaminc-pairs_runtime": 1.4394, |
|
"eval_vitaminc-pairs_samples_per_second": 75.033, |
|
"eval_vitaminc-pairs_steps_per_second": 1.389, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"eval_negation-triplets_loss": 5.081584453582764, |
|
"eval_negation-triplets_runtime": 0.2952, |
|
"eval_negation-triplets_samples_per_second": 216.79, |
|
"eval_negation-triplets_steps_per_second": 3.387, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"eval_scitail-pairs-pos_loss": 1.9186776876449585, |
|
"eval_scitail-pairs-pos_runtime": 0.3702, |
|
"eval_scitail-pairs-pos_samples_per_second": 145.879, |
|
"eval_scitail-pairs-pos_steps_per_second": 2.701, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"eval_xsum-pairs_loss": 6.199001312255859, |
|
"eval_xsum-pairs_runtime": 3.4227, |
|
"eval_xsum-pairs_samples_per_second": 37.398, |
|
"eval_xsum-pairs_steps_per_second": 0.584, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"eval_sciq_pairs_loss": 0.3414052426815033, |
|
"eval_sciq_pairs_runtime": 3.4403, |
|
"eval_sciq_pairs_samples_per_second": 37.206, |
|
"eval_sciq_pairs_steps_per_second": 0.581, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"eval_qasc_pairs_loss": 3.662200927734375, |
|
"eval_qasc_pairs_runtime": 0.6486, |
|
"eval_qasc_pairs_samples_per_second": 197.349, |
|
"eval_qasc_pairs_steps_per_second": 3.084, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"eval_openbookqa_pairs_loss": 4.650759220123291, |
|
"eval_openbookqa_pairs_runtime": 0.5722, |
|
"eval_openbookqa_pairs_samples_per_second": 223.702, |
|
"eval_openbookqa_pairs_steps_per_second": 3.495, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"eval_msmarco_pairs_loss": 9.487885475158691, |
|
"eval_msmarco_pairs_runtime": 1.2978, |
|
"eval_msmarco_pairs_samples_per_second": 98.63, |
|
"eval_msmarco_pairs_steps_per_second": 1.541, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"eval_nq_pairs_loss": 4.4714131355285645, |
|
"eval_nq_pairs_runtime": 2.569, |
|
"eval_nq_pairs_samples_per_second": 49.824, |
|
"eval_nq_pairs_steps_per_second": 0.779, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"eval_trivia_pairs_loss": 3.5246145725250244, |
|
"eval_trivia_pairs_runtime": 4.3557, |
|
"eval_trivia_pairs_samples_per_second": 29.387, |
|
"eval_trivia_pairs_steps_per_second": 0.459, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"eval_gooaq_pairs_loss": 7.395092010498047, |
|
"eval_gooaq_pairs_runtime": 0.9109, |
|
"eval_gooaq_pairs_samples_per_second": 140.521, |
|
"eval_gooaq_pairs_steps_per_second": 2.196, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"eval_paws-pos_loss": 2.233201742172241, |
|
"eval_paws-pos_runtime": 0.6853, |
|
"eval_paws-pos_samples_per_second": 186.773, |
|
"eval_paws-pos_steps_per_second": 2.918, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.14534883720930233, |
|
"grad_norm": 6.336462497711182, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 6.5567, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"grad_norm": 30.24117660522461, |
|
"learning_rate": 2e-05, |
|
"loss": 7.994, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"eval_NLI-v2_cosine_accuracy": 1.0, |
|
"eval_NLI-v2_dot_accuracy": 0.125, |
|
"eval_NLI-v2_euclidean_accuracy": 1.0, |
|
"eval_NLI-v2_manhattan_accuracy": 1.0, |
|
"eval_NLI-v2_max_accuracy": 1.0, |
|
"eval_VitaminC_cosine_accuracy": 0.546875, |
|
"eval_VitaminC_cosine_accuracy_threshold": 0.9505010843276978, |
|
"eval_VitaminC_cosine_ap": 0.5220332354916479, |
|
"eval_VitaminC_cosine_f1": 0.6542553191489362, |
|
"eval_VitaminC_cosine_f1_threshold": 0.6742120385169983, |
|
"eval_VitaminC_cosine_precision": 0.48616600790513836, |
|
"eval_VitaminC_cosine_recall": 1.0, |
|
"eval_VitaminC_dot_accuracy": 0.55859375, |
|
"eval_VitaminC_dot_accuracy_threshold": 410.96356201171875, |
|
"eval_VitaminC_dot_ap": 0.5098545013010067, |
|
"eval_VitaminC_dot_f1": 0.6507936507936508, |
|
"eval_VitaminC_dot_f1_threshold": 259.4541015625, |
|
"eval_VitaminC_dot_precision": 0.4823529411764706, |
|
"eval_VitaminC_dot_recall": 1.0, |
|
"eval_VitaminC_euclidean_accuracy": 0.55078125, |
|
"eval_VitaminC_euclidean_accuracy_threshold": 6.85032844543457, |
|
"eval_VitaminC_euclidean_ap": 0.5187640034678729, |
|
"eval_VitaminC_euclidean_f1": 0.6522911051212938, |
|
"eval_VitaminC_euclidean_f1_threshold": 15.907812118530273, |
|
"eval_VitaminC_euclidean_precision": 0.4879032258064516, |
|
"eval_VitaminC_euclidean_recall": 0.983739837398374, |
|
"eval_VitaminC_manhattan_accuracy": 0.54296875, |
|
"eval_VitaminC_manhattan_accuracy_threshold": 128.04562377929688, |
|
"eval_VitaminC_manhattan_ap": 0.5216728184773658, |
|
"eval_VitaminC_manhattan_f1": 0.6542553191489362, |
|
"eval_VitaminC_manhattan_f1_threshold": 266.50201416015625, |
|
"eval_VitaminC_manhattan_precision": 0.48616600790513836, |
|
"eval_VitaminC_manhattan_recall": 1.0, |
|
"eval_VitaminC_max_accuracy": 0.55859375, |
|
"eval_VitaminC_max_accuracy_threshold": 410.96356201171875, |
|
"eval_VitaminC_max_ap": 0.5220332354916479, |
|
"eval_VitaminC_max_f1": 0.6542553191489362, |
|
"eval_VitaminC_max_f1_threshold": 266.50201416015625, |
|
"eval_VitaminC_max_precision": 0.4879032258064516, |
|
"eval_VitaminC_max_recall": 1.0, |
|
"eval_sequential_score": 0.5220332354916479, |
|
"eval_sts-test_pearson_cosine": 0.03816033089318139, |
|
"eval_sts-test_pearson_dot": 0.2714501319216333, |
|
"eval_sts-test_pearson_euclidean": 0.03534045207172642, |
|
"eval_sts-test_pearson_manhattan": 0.061786120590017285, |
|
"eval_sts-test_pearson_max": 0.2714501319216333, |
|
"eval_sts-test_spearman_cosine": 0.09093735567962451, |
|
"eval_sts-test_spearman_dot": 0.29608861437488704, |
|
"eval_sts-test_spearman_euclidean": 0.06076512778358849, |
|
"eval_sts-test_spearman_manhattan": 0.08153826351124775, |
|
"eval_sts-test_spearman_max": 0.29608861437488704, |
|
"eval_vitaminc-pairs_loss": 2.6921818256378174, |
|
"eval_vitaminc-pairs_runtime": 1.4315, |
|
"eval_vitaminc-pairs_samples_per_second": 75.443, |
|
"eval_vitaminc-pairs_steps_per_second": 1.397, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"eval_negation-triplets_loss": 5.068139553070068, |
|
"eval_negation-triplets_runtime": 0.2981, |
|
"eval_negation-triplets_samples_per_second": 214.716, |
|
"eval_negation-triplets_steps_per_second": 3.355, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"eval_scitail-pairs-pos_loss": 1.9319576025009155, |
|
"eval_scitail-pairs-pos_runtime": 0.3653, |
|
"eval_scitail-pairs-pos_samples_per_second": 147.818, |
|
"eval_scitail-pairs-pos_steps_per_second": 2.737, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"eval_xsum-pairs_loss": 6.079549789428711, |
|
"eval_xsum-pairs_runtime": 3.4013, |
|
"eval_xsum-pairs_samples_per_second": 37.633, |
|
"eval_xsum-pairs_steps_per_second": 0.588, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"eval_sciq_pairs_loss": 0.32075461745262146, |
|
"eval_sciq_pairs_runtime": 3.442, |
|
"eval_sciq_pairs_samples_per_second": 37.188, |
|
"eval_sciq_pairs_steps_per_second": 0.581, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"eval_qasc_pairs_loss": 3.36326265335083, |
|
"eval_qasc_pairs_runtime": 0.6495, |
|
"eval_qasc_pairs_samples_per_second": 197.087, |
|
"eval_qasc_pairs_steps_per_second": 3.079, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"eval_openbookqa_pairs_loss": 4.481137275695801, |
|
"eval_openbookqa_pairs_runtime": 0.5729, |
|
"eval_openbookqa_pairs_samples_per_second": 223.432, |
|
"eval_openbookqa_pairs_steps_per_second": 3.491, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"eval_msmarco_pairs_loss": 8.048794746398926, |
|
"eval_msmarco_pairs_runtime": 1.2936, |
|
"eval_msmarco_pairs_samples_per_second": 98.947, |
|
"eval_msmarco_pairs_steps_per_second": 1.546, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"eval_nq_pairs_loss": 3.7927472591400146, |
|
"eval_nq_pairs_runtime": 2.5733, |
|
"eval_nq_pairs_samples_per_second": 49.741, |
|
"eval_nq_pairs_steps_per_second": 0.777, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"eval_trivia_pairs_loss": 3.284496545791626, |
|
"eval_trivia_pairs_runtime": 4.355, |
|
"eval_trivia_pairs_samples_per_second": 29.391, |
|
"eval_trivia_pairs_steps_per_second": 0.459, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"eval_gooaq_pairs_loss": 6.677769184112549, |
|
"eval_gooaq_pairs_runtime": 0.9178, |
|
"eval_gooaq_pairs_samples_per_second": 139.457, |
|
"eval_gooaq_pairs_steps_per_second": 2.179, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1744186046511628, |
|
"eval_paws-pos_loss": 2.2626476287841797, |
|
"eval_paws-pos_runtime": 0.6901, |
|
"eval_paws-pos_samples_per_second": 185.475, |
|
"eval_paws-pos_steps_per_second": 2.898, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.20348837209302326, |
|
"grad_norm": 11.503045082092285, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 7.1037, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"grad_norm": 6.524661540985107, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 6.6239, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"eval_NLI-v2_cosine_accuracy": 1.0, |
|
"eval_NLI-v2_dot_accuracy": 0.140625, |
|
"eval_NLI-v2_euclidean_accuracy": 1.0, |
|
"eval_NLI-v2_manhattan_accuracy": 1.0, |
|
"eval_NLI-v2_max_accuracy": 1.0, |
|
"eval_VitaminC_cosine_accuracy": 0.55078125, |
|
"eval_VitaminC_cosine_accuracy_threshold": 0.9556466341018677, |
|
"eval_VitaminC_cosine_ap": 0.5262234678756146, |
|
"eval_VitaminC_cosine_f1": 0.6542553191489362, |
|
"eval_VitaminC_cosine_f1_threshold": 0.7482036352157593, |
|
"eval_VitaminC_cosine_precision": 0.48616600790513836, |
|
"eval_VitaminC_cosine_recall": 1.0, |
|
"eval_VitaminC_dot_accuracy": 0.54296875, |
|
"eval_VitaminC_dot_accuracy_threshold": 429.2105712890625, |
|
"eval_VitaminC_dot_ap": 0.5105928204741528, |
|
"eval_VitaminC_dot_f1": 0.6525198938992042, |
|
"eval_VitaminC_dot_f1_threshold": 304.46807861328125, |
|
"eval_VitaminC_dot_precision": 0.484251968503937, |
|
"eval_VitaminC_dot_recall": 1.0, |
|
"eval_VitaminC_euclidean_accuracy": 0.55078125, |
|
"eval_VitaminC_euclidean_accuracy_threshold": 6.457396030426025, |
|
"eval_VitaminC_euclidean_ap": 0.5262113804807371, |
|
"eval_VitaminC_euclidean_f1": 0.6505376344086021, |
|
"eval_VitaminC_euclidean_f1_threshold": 14.363262176513672, |
|
"eval_VitaminC_euclidean_precision": 0.4859437751004016, |
|
"eval_VitaminC_euclidean_recall": 0.983739837398374, |
|
"eval_VitaminC_manhattan_accuracy": 0.546875, |
|
"eval_VitaminC_manhattan_accuracy_threshold": 144.2735137939453, |
|
"eval_VitaminC_manhattan_ap": 0.5216238817948092, |
|
"eval_VitaminC_manhattan_f1": 0.6542553191489362, |
|
"eval_VitaminC_manhattan_f1_threshold": 248.78077697753906, |
|
"eval_VitaminC_manhattan_precision": 0.48616600790513836, |
|
"eval_VitaminC_manhattan_recall": 1.0, |
|
"eval_VitaminC_max_accuracy": 0.55078125, |
|
"eval_VitaminC_max_accuracy_threshold": 429.2105712890625, |
|
"eval_VitaminC_max_ap": 0.5262234678756146, |
|
"eval_VitaminC_max_f1": 0.6542553191489362, |
|
"eval_VitaminC_max_f1_threshold": 304.46807861328125, |
|
"eval_VitaminC_max_precision": 0.48616600790513836, |
|
"eval_VitaminC_max_recall": 1.0, |
|
"eval_sequential_score": 0.5262234678756146, |
|
"eval_sts-test_pearson_cosine": 0.04822016371804072, |
|
"eval_sts-test_pearson_dot": 0.2933829804301914, |
|
"eval_sts-test_pearson_euclidean": 0.0440427995340741, |
|
"eval_sts-test_pearson_manhattan": 0.06814345626491929, |
|
"eval_sts-test_pearson_max": 0.2933829804301914, |
|
"eval_sts-test_spearman_cosine": 0.10311449730221736, |
|
"eval_sts-test_spearman_dot": 0.30840433621335606, |
|
"eval_sts-test_spearman_euclidean": 0.06897119310369988, |
|
"eval_sts-test_spearman_manhattan": 0.08808657402390177, |
|
"eval_sts-test_spearman_max": 0.30840433621335606, |
|
"eval_vitaminc-pairs_loss": 2.7006633281707764, |
|
"eval_vitaminc-pairs_runtime": 1.435, |
|
"eval_vitaminc-pairs_samples_per_second": 75.262, |
|
"eval_vitaminc-pairs_steps_per_second": 1.394, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"eval_negation-triplets_loss": 5.0855937004089355, |
|
"eval_negation-triplets_runtime": 0.2946, |
|
"eval_negation-triplets_samples_per_second": 217.208, |
|
"eval_negation-triplets_steps_per_second": 3.394, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"eval_scitail-pairs-pos_loss": 1.9701284170150757, |
|
"eval_scitail-pairs-pos_runtime": 0.3657, |
|
"eval_scitail-pairs-pos_samples_per_second": 147.656, |
|
"eval_scitail-pairs-pos_steps_per_second": 2.734, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"eval_xsum-pairs_loss": 6.050904273986816, |
|
"eval_xsum-pairs_runtime": 3.4161, |
|
"eval_xsum-pairs_samples_per_second": 37.47, |
|
"eval_xsum-pairs_steps_per_second": 0.585, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"eval_sciq_pairs_loss": 0.31550008058547974, |
|
"eval_sciq_pairs_runtime": 3.4195, |
|
"eval_sciq_pairs_samples_per_second": 37.432, |
|
"eval_sciq_pairs_steps_per_second": 0.585, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"eval_qasc_pairs_loss": 3.174623727798462, |
|
"eval_qasc_pairs_runtime": 0.6393, |
|
"eval_qasc_pairs_samples_per_second": 200.217, |
|
"eval_qasc_pairs_steps_per_second": 3.128, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"eval_openbookqa_pairs_loss": 4.325982570648193, |
|
"eval_openbookqa_pairs_runtime": 0.5723, |
|
"eval_openbookqa_pairs_samples_per_second": 223.64, |
|
"eval_openbookqa_pairs_steps_per_second": 3.494, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"eval_msmarco_pairs_loss": 6.989845275878906, |
|
"eval_msmarco_pairs_runtime": 1.2986, |
|
"eval_msmarco_pairs_samples_per_second": 98.569, |
|
"eval_msmarco_pairs_steps_per_second": 1.54, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"eval_nq_pairs_loss": 3.3526666164398193, |
|
"eval_nq_pairs_runtime": 2.5766, |
|
"eval_nq_pairs_samples_per_second": 49.677, |
|
"eval_nq_pairs_steps_per_second": 0.776, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"eval_trivia_pairs_loss": 3.2416629791259766, |
|
"eval_trivia_pairs_runtime": 4.3655, |
|
"eval_trivia_pairs_samples_per_second": 29.321, |
|
"eval_trivia_pairs_steps_per_second": 0.458, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"eval_gooaq_pairs_loss": 6.288425922393799, |
|
"eval_gooaq_pairs_runtime": 0.9111, |
|
"eval_gooaq_pairs_samples_per_second": 140.492, |
|
"eval_gooaq_pairs_steps_per_second": 2.195, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"eval_paws-pos_loss": 2.351145029067993, |
|
"eval_paws-pos_runtime": 0.6907, |
|
"eval_paws-pos_samples_per_second": 185.325, |
|
"eval_paws-pos_steps_per_second": 2.896, |
|
"step": 8 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 34, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 2, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 224, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|