Upload folder using huggingface_hub
Browse files
checkpoint-1500/latest
CHANGED
@@ -1 +1 @@
|
|
1 |
-
|
|
|
1 |
+
global_step1500
|
checkpoint-1500/model-00001-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4989973456
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fb7d96677028968fa0efb3ac3d9b0f18d8849cd26216974d967e16891767623f
|
3 |
size 4989973456
|
checkpoint-1500/model-00002-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 3786358064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:da032c5a4be981c7a258e92ebfcb03f706ecc928b1ac61a51409a411a3716ecd
|
3 |
size 3786358064
|
checkpoint-1500/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5bbec261470d413996479fcbb03408ee846cb206811a71888c56aa5caa61e5cc
|
3 |
size 1064
|
checkpoint-1500/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -10507,3506 +10507,6 @@
|
|
10507 |
"learning_rate": 1.6687597216781584e-05,
|
10508 |
"loss": 0.8145,
|
10509 |
"step": 1500
|
10510 |
-
},
|
10511 |
-
{
|
10512 |
-
"epoch": 0.29,
|
10513 |
-
"grad_norm": 1.6814585945564913,
|
10514 |
-
"learning_rate": 1.668295881701969e-05,
|
10515 |
-
"loss": 0.9505,
|
10516 |
-
"step": 1501
|
10517 |
-
},
|
10518 |
-
{
|
10519 |
-
"epoch": 0.29,
|
10520 |
-
"grad_norm": 1.8977009968762184,
|
10521 |
-
"learning_rate": 1.667831781754954e-05,
|
10522 |
-
"loss": 0.9042,
|
10523 |
-
"step": 1502
|
10524 |
-
},
|
10525 |
-
{
|
10526 |
-
"epoch": 0.29,
|
10527 |
-
"grad_norm": 1.8711464591936275,
|
10528 |
-
"learning_rate": 1.6673674220176496e-05,
|
10529 |
-
"loss": 0.9478,
|
10530 |
-
"step": 1503
|
10531 |
-
},
|
10532 |
-
{
|
10533 |
-
"epoch": 0.29,
|
10534 |
-
"grad_norm": 1.7180539423192296,
|
10535 |
-
"learning_rate": 1.666902802670695e-05,
|
10536 |
-
"loss": 0.962,
|
10537 |
-
"step": 1504
|
10538 |
-
},
|
10539 |
-
{
|
10540 |
-
"epoch": 0.29,
|
10541 |
-
"grad_norm": 1.6666626104248825,
|
10542 |
-
"learning_rate": 1.66643792389483e-05,
|
10543 |
-
"loss": 0.9123,
|
10544 |
-
"step": 1505
|
10545 |
-
},
|
10546 |
-
{
|
10547 |
-
"epoch": 0.29,
|
10548 |
-
"grad_norm": 1.1973527998514766,
|
10549 |
-
"learning_rate": 1.6659727858708947e-05,
|
10550 |
-
"loss": 0.856,
|
10551 |
-
"step": 1506
|
10552 |
-
},
|
10553 |
-
{
|
10554 |
-
"epoch": 0.29,
|
10555 |
-
"grad_norm": 1.5673259814240763,
|
10556 |
-
"learning_rate": 1.66550738877983e-05,
|
10557 |
-
"loss": 0.9118,
|
10558 |
-
"step": 1507
|
10559 |
-
},
|
10560 |
-
{
|
10561 |
-
"epoch": 0.29,
|
10562 |
-
"grad_norm": 1.5728628446079507,
|
10563 |
-
"learning_rate": 1.6650417328026793e-05,
|
10564 |
-
"loss": 0.9139,
|
10565 |
-
"step": 1508
|
10566 |
-
},
|
10567 |
-
{
|
10568 |
-
"epoch": 0.29,
|
10569 |
-
"grad_norm": 1.5857396432980118,
|
10570 |
-
"learning_rate": 1.6645758181205834e-05,
|
10571 |
-
"loss": 0.8822,
|
10572 |
-
"step": 1509
|
10573 |
-
},
|
10574 |
-
{
|
10575 |
-
"epoch": 0.29,
|
10576 |
-
"grad_norm": 1.8890331201978154,
|
10577 |
-
"learning_rate": 1.6641096449147875e-05,
|
10578 |
-
"loss": 0.9863,
|
10579 |
-
"step": 1510
|
10580 |
-
},
|
10581 |
-
{
|
10582 |
-
"epoch": 0.29,
|
10583 |
-
"grad_norm": 1.6118515343399076,
|
10584 |
-
"learning_rate": 1.6636432133666342e-05,
|
10585 |
-
"loss": 0.9455,
|
10586 |
-
"step": 1511
|
10587 |
-
},
|
10588 |
-
{
|
10589 |
-
"epoch": 0.29,
|
10590 |
-
"grad_norm": 1.6742412673222384,
|
10591 |
-
"learning_rate": 1.663176523657569e-05,
|
10592 |
-
"loss": 0.9777,
|
10593 |
-
"step": 1512
|
10594 |
-
},
|
10595 |
-
{
|
10596 |
-
"epoch": 0.29,
|
10597 |
-
"grad_norm": 1.8259098282089554,
|
10598 |
-
"learning_rate": 1.6627095759691364e-05,
|
10599 |
-
"loss": 1.0421,
|
10600 |
-
"step": 1513
|
10601 |
-
},
|
10602 |
-
{
|
10603 |
-
"epoch": 0.29,
|
10604 |
-
"grad_norm": 1.7330916535087395,
|
10605 |
-
"learning_rate": 1.6622423704829812e-05,
|
10606 |
-
"loss": 0.8842,
|
10607 |
-
"step": 1514
|
10608 |
-
},
|
10609 |
-
{
|
10610 |
-
"epoch": 0.29,
|
10611 |
-
"grad_norm": 1.6424763863303073,
|
10612 |
-
"learning_rate": 1.66177490738085e-05,
|
10613 |
-
"loss": 0.8998,
|
10614 |
-
"step": 1515
|
10615 |
-
},
|
10616 |
-
{
|
10617 |
-
"epoch": 0.29,
|
10618 |
-
"grad_norm": 1.7477206896837032,
|
10619 |
-
"learning_rate": 1.661307186844588e-05,
|
10620 |
-
"loss": 0.9033,
|
10621 |
-
"step": 1516
|
10622 |
-
},
|
10623 |
-
{
|
10624 |
-
"epoch": 0.29,
|
10625 |
-
"grad_norm": 1.557691444398473,
|
10626 |
-
"learning_rate": 1.660839209056141e-05,
|
10627 |
-
"loss": 0.8975,
|
10628 |
-
"step": 1517
|
10629 |
-
},
|
10630 |
-
{
|
10631 |
-
"epoch": 0.29,
|
10632 |
-
"grad_norm": 1.689261636764078,
|
10633 |
-
"learning_rate": 1.6603709741975545e-05,
|
10634 |
-
"loss": 0.9635,
|
10635 |
-
"step": 1518
|
10636 |
-
},
|
10637 |
-
{
|
10638 |
-
"epoch": 0.29,
|
10639 |
-
"grad_norm": 1.7840501705145897,
|
10640 |
-
"learning_rate": 1.6599024824509752e-05,
|
10641 |
-
"loss": 0.9215,
|
10642 |
-
"step": 1519
|
10643 |
-
},
|
10644 |
-
{
|
10645 |
-
"epoch": 0.29,
|
10646 |
-
"grad_norm": 1.9083255044750755,
|
10647 |
-
"learning_rate": 1.659433733998649e-05,
|
10648 |
-
"loss": 0.9896,
|
10649 |
-
"step": 1520
|
10650 |
-
},
|
10651 |
-
{
|
10652 |
-
"epoch": 0.29,
|
10653 |
-
"grad_norm": 1.6931027780438765,
|
10654 |
-
"learning_rate": 1.6589647290229215e-05,
|
10655 |
-
"loss": 0.858,
|
10656 |
-
"step": 1521
|
10657 |
-
},
|
10658 |
-
{
|
10659 |
-
"epoch": 0.29,
|
10660 |
-
"grad_norm": 1.7778446951035027,
|
10661 |
-
"learning_rate": 1.6584954677062382e-05,
|
10662 |
-
"loss": 0.9867,
|
10663 |
-
"step": 1522
|
10664 |
-
},
|
10665 |
-
{
|
10666 |
-
"epoch": 0.29,
|
10667 |
-
"grad_norm": 1.6874904324233353,
|
10668 |
-
"learning_rate": 1.6580259502311445e-05,
|
10669 |
-
"loss": 1.0151,
|
10670 |
-
"step": 1523
|
10671 |
-
},
|
10672 |
-
{
|
10673 |
-
"epoch": 0.29,
|
10674 |
-
"grad_norm": 1.8272429728687376,
|
10675 |
-
"learning_rate": 1.657556176780285e-05,
|
10676 |
-
"loss": 0.9711,
|
10677 |
-
"step": 1524
|
10678 |
-
},
|
10679 |
-
{
|
10680 |
-
"epoch": 0.29,
|
10681 |
-
"grad_norm": 1.8411467883969832,
|
10682 |
-
"learning_rate": 1.6570861475364046e-05,
|
10683 |
-
"loss": 0.9206,
|
10684 |
-
"step": 1525
|
10685 |
-
},
|
10686 |
-
{
|
10687 |
-
"epoch": 0.29,
|
10688 |
-
"grad_norm": 1.8710930737452116,
|
10689 |
-
"learning_rate": 1.6566158626823465e-05,
|
10690 |
-
"loss": 0.9872,
|
10691 |
-
"step": 1526
|
10692 |
-
},
|
10693 |
-
{
|
10694 |
-
"epoch": 0.29,
|
10695 |
-
"grad_norm": 1.720506195988624,
|
10696 |
-
"learning_rate": 1.6561453224010554e-05,
|
10697 |
-
"loss": 0.9681,
|
10698 |
-
"step": 1527
|
10699 |
-
},
|
10700 |
-
{
|
10701 |
-
"epoch": 0.29,
|
10702 |
-
"grad_norm": 1.6647183073763208,
|
10703 |
-
"learning_rate": 1.655674526875573e-05,
|
10704 |
-
"loss": 0.9251,
|
10705 |
-
"step": 1528
|
10706 |
-
},
|
10707 |
-
{
|
10708 |
-
"epoch": 0.29,
|
10709 |
-
"grad_norm": 1.577609447670818,
|
10710 |
-
"learning_rate": 1.6552034762890422e-05,
|
10711 |
-
"loss": 0.9072,
|
10712 |
-
"step": 1529
|
10713 |
-
},
|
10714 |
-
{
|
10715 |
-
"epoch": 0.29,
|
10716 |
-
"grad_norm": 2.066399167027944,
|
10717 |
-
"learning_rate": 1.6547321708247037e-05,
|
10718 |
-
"loss": 0.9379,
|
10719 |
-
"step": 1530
|
10720 |
-
},
|
10721 |
-
{
|
10722 |
-
"epoch": 0.29,
|
10723 |
-
"grad_norm": 1.840506676774067,
|
10724 |
-
"learning_rate": 1.654260610665898e-05,
|
10725 |
-
"loss": 0.9352,
|
10726 |
-
"step": 1531
|
10727 |
-
},
|
10728 |
-
{
|
10729 |
-
"epoch": 0.3,
|
10730 |
-
"grad_norm": 1.7208292126684335,
|
10731 |
-
"learning_rate": 1.653788795996065e-05,
|
10732 |
-
"loss": 0.899,
|
10733 |
-
"step": 1532
|
10734 |
-
},
|
10735 |
-
{
|
10736 |
-
"epoch": 0.3,
|
10737 |
-
"grad_norm": 1.8414566876249059,
|
10738 |
-
"learning_rate": 1.6533167269987435e-05,
|
10739 |
-
"loss": 0.8995,
|
10740 |
-
"step": 1533
|
10741 |
-
},
|
10742 |
-
{
|
10743 |
-
"epoch": 0.3,
|
10744 |
-
"grad_norm": 1.8163661936250057,
|
10745 |
-
"learning_rate": 1.65284440385757e-05,
|
10746 |
-
"loss": 1.0081,
|
10747 |
-
"step": 1534
|
10748 |
-
},
|
10749 |
-
{
|
10750 |
-
"epoch": 0.3,
|
10751 |
-
"grad_norm": 1.705322353027036,
|
10752 |
-
"learning_rate": 1.6523718267562815e-05,
|
10753 |
-
"loss": 0.9674,
|
10754 |
-
"step": 1535
|
10755 |
-
},
|
10756 |
-
{
|
10757 |
-
"epoch": 0.3,
|
10758 |
-
"grad_norm": 1.7366560844179042,
|
10759 |
-
"learning_rate": 1.6518989958787126e-05,
|
10760 |
-
"loss": 0.9463,
|
10761 |
-
"step": 1536
|
10762 |
-
},
|
10763 |
-
{
|
10764 |
-
"epoch": 0.3,
|
10765 |
-
"grad_norm": 1.1520789836347844,
|
10766 |
-
"learning_rate": 1.651425911408798e-05,
|
10767 |
-
"loss": 0.8954,
|
10768 |
-
"step": 1537
|
10769 |
-
},
|
10770 |
-
{
|
10771 |
-
"epoch": 0.3,
|
10772 |
-
"grad_norm": 1.8026482343246513,
|
10773 |
-
"learning_rate": 1.6509525735305697e-05,
|
10774 |
-
"loss": 0.9674,
|
10775 |
-
"step": 1538
|
10776 |
-
},
|
10777 |
-
{
|
10778 |
-
"epoch": 0.3,
|
10779 |
-
"grad_norm": 1.8502578015677411,
|
10780 |
-
"learning_rate": 1.6504789824281586e-05,
|
10781 |
-
"loss": 0.9929,
|
10782 |
-
"step": 1539
|
10783 |
-
},
|
10784 |
-
{
|
10785 |
-
"epoch": 0.3,
|
10786 |
-
"grad_norm": 1.7705139842133477,
|
10787 |
-
"learning_rate": 1.6500051382857944e-05,
|
10788 |
-
"loss": 0.8824,
|
10789 |
-
"step": 1540
|
10790 |
-
},
|
10791 |
-
{
|
10792 |
-
"epoch": 0.3,
|
10793 |
-
"grad_norm": 1.8066747761487192,
|
10794 |
-
"learning_rate": 1.6495310412878053e-05,
|
10795 |
-
"loss": 0.8852,
|
10796 |
-
"step": 1541
|
10797 |
-
},
|
10798 |
-
{
|
10799 |
-
"epoch": 0.3,
|
10800 |
-
"grad_norm": 1.673141236636469,
|
10801 |
-
"learning_rate": 1.649056691618618e-05,
|
10802 |
-
"loss": 0.9782,
|
10803 |
-
"step": 1542
|
10804 |
-
},
|
10805 |
-
{
|
10806 |
-
"epoch": 0.3,
|
10807 |
-
"grad_norm": 1.8277718379488843,
|
10808 |
-
"learning_rate": 1.648582089462756e-05,
|
10809 |
-
"loss": 0.9895,
|
10810 |
-
"step": 1543
|
10811 |
-
},
|
10812 |
-
{
|
10813 |
-
"epoch": 0.3,
|
10814 |
-
"grad_norm": 1.62439404205175,
|
10815 |
-
"learning_rate": 1.648107235004843e-05,
|
10816 |
-
"loss": 0.9448,
|
10817 |
-
"step": 1544
|
10818 |
-
},
|
10819 |
-
{
|
10820 |
-
"epoch": 0.3,
|
10821 |
-
"grad_norm": 1.6323693740805412,
|
10822 |
-
"learning_rate": 1.6476321284296005e-05,
|
10823 |
-
"loss": 0.9419,
|
10824 |
-
"step": 1545
|
10825 |
-
},
|
10826 |
-
{
|
10827 |
-
"epoch": 0.3,
|
10828 |
-
"grad_norm": 1.8284109095149372,
|
10829 |
-
"learning_rate": 1.647156769921847e-05,
|
10830 |
-
"loss": 0.8998,
|
10831 |
-
"step": 1546
|
10832 |
-
},
|
10833 |
-
{
|
10834 |
-
"epoch": 0.3,
|
10835 |
-
"grad_norm": 1.6500951414316578,
|
10836 |
-
"learning_rate": 1.6466811596664997e-05,
|
10837 |
-
"loss": 0.897,
|
10838 |
-
"step": 1547
|
10839 |
-
},
|
10840 |
-
{
|
10841 |
-
"epoch": 0.3,
|
10842 |
-
"grad_norm": 1.6636394238778047,
|
10843 |
-
"learning_rate": 1.6462052978485733e-05,
|
10844 |
-
"loss": 0.991,
|
10845 |
-
"step": 1548
|
10846 |
-
},
|
10847 |
-
{
|
10848 |
-
"epoch": 0.3,
|
10849 |
-
"grad_norm": 1.6471253017947654,
|
10850 |
-
"learning_rate": 1.645729184653181e-05,
|
10851 |
-
"loss": 0.9912,
|
10852 |
-
"step": 1549
|
10853 |
-
},
|
10854 |
-
{
|
10855 |
-
"epoch": 0.3,
|
10856 |
-
"grad_norm": 1.900480841118098,
|
10857 |
-
"learning_rate": 1.645252820265534e-05,
|
10858 |
-
"loss": 0.9342,
|
10859 |
-
"step": 1550
|
10860 |
-
},
|
10861 |
-
{
|
10862 |
-
"epoch": 0.3,
|
10863 |
-
"grad_norm": 1.7274929771579584,
|
10864 |
-
"learning_rate": 1.6447762048709397e-05,
|
10865 |
-
"loss": 1.0001,
|
10866 |
-
"step": 1551
|
10867 |
-
},
|
10868 |
-
{
|
10869 |
-
"epoch": 0.3,
|
10870 |
-
"grad_norm": 1.8024044277346902,
|
10871 |
-
"learning_rate": 1.644299338654805e-05,
|
10872 |
-
"loss": 0.8785,
|
10873 |
-
"step": 1552
|
10874 |
-
},
|
10875 |
-
{
|
10876 |
-
"epoch": 0.3,
|
10877 |
-
"grad_norm": 1.650686902566554,
|
10878 |
-
"learning_rate": 1.643822221802633e-05,
|
10879 |
-
"loss": 0.9584,
|
10880 |
-
"step": 1553
|
10881 |
-
},
|
10882 |
-
{
|
10883 |
-
"epoch": 0.3,
|
10884 |
-
"grad_norm": 1.5474371202081496,
|
10885 |
-
"learning_rate": 1.6433448545000252e-05,
|
10886 |
-
"loss": 0.9775,
|
10887 |
-
"step": 1554
|
10888 |
-
},
|
10889 |
-
{
|
10890 |
-
"epoch": 0.3,
|
10891 |
-
"grad_norm": 1.5597079451380407,
|
10892 |
-
"learning_rate": 1.64286723693268e-05,
|
10893 |
-
"loss": 0.9947,
|
10894 |
-
"step": 1555
|
10895 |
-
},
|
10896 |
-
{
|
10897 |
-
"epoch": 0.3,
|
10898 |
-
"grad_norm": 1.6385235106949365,
|
10899 |
-
"learning_rate": 1.642389369286393e-05,
|
10900 |
-
"loss": 0.8831,
|
10901 |
-
"step": 1556
|
10902 |
-
},
|
10903 |
-
{
|
10904 |
-
"epoch": 0.3,
|
10905 |
-
"grad_norm": 1.6742026888304702,
|
10906 |
-
"learning_rate": 1.6419112517470577e-05,
|
10907 |
-
"loss": 0.9836,
|
10908 |
-
"step": 1557
|
10909 |
-
},
|
10910 |
-
{
|
10911 |
-
"epoch": 0.3,
|
10912 |
-
"grad_norm": 1.7075133360117316,
|
10913 |
-
"learning_rate": 1.6414328845006644e-05,
|
10914 |
-
"loss": 0.9422,
|
10915 |
-
"step": 1558
|
10916 |
-
},
|
10917 |
-
{
|
10918 |
-
"epoch": 0.3,
|
10919 |
-
"grad_norm": 1.5892520663236174,
|
10920 |
-
"learning_rate": 1.6409542677333007e-05,
|
10921 |
-
"loss": 0.8709,
|
10922 |
-
"step": 1559
|
10923 |
-
},
|
10924 |
-
{
|
10925 |
-
"epoch": 0.3,
|
10926 |
-
"grad_norm": 1.6384876281252254,
|
10927 |
-
"learning_rate": 1.640475401631151e-05,
|
10928 |
-
"loss": 1.0045,
|
10929 |
-
"step": 1560
|
10930 |
-
},
|
10931 |
-
{
|
10932 |
-
"epoch": 0.3,
|
10933 |
-
"grad_norm": 1.693979144639973,
|
10934 |
-
"learning_rate": 1.6399962863804974e-05,
|
10935 |
-
"loss": 0.9034,
|
10936 |
-
"step": 1561
|
10937 |
-
},
|
10938 |
-
{
|
10939 |
-
"epoch": 0.3,
|
10940 |
-
"grad_norm": 1.8320477072069328,
|
10941 |
-
"learning_rate": 1.6395169221677176e-05,
|
10942 |
-
"loss": 1.0343,
|
10943 |
-
"step": 1562
|
10944 |
-
},
|
10945 |
-
{
|
10946 |
-
"epoch": 0.3,
|
10947 |
-
"grad_norm": 1.7645628809213245,
|
10948 |
-
"learning_rate": 1.6390373091792873e-05,
|
10949 |
-
"loss": 1.0095,
|
10950 |
-
"step": 1563
|
10951 |
-
},
|
10952 |
-
{
|
10953 |
-
"epoch": 0.3,
|
10954 |
-
"grad_norm": 1.6613618239883878,
|
10955 |
-
"learning_rate": 1.6385574476017795e-05,
|
10956 |
-
"loss": 0.9025,
|
10957 |
-
"step": 1564
|
10958 |
-
},
|
10959 |
-
{
|
10960 |
-
"epoch": 0.3,
|
10961 |
-
"grad_norm": 1.870626964940079,
|
10962 |
-
"learning_rate": 1.638077337621861e-05,
|
10963 |
-
"loss": 0.9294,
|
10964 |
-
"step": 1565
|
10965 |
-
},
|
10966 |
-
{
|
10967 |
-
"epoch": 0.3,
|
10968 |
-
"grad_norm": 1.6781919217989782,
|
10969 |
-
"learning_rate": 1.6375969794262997e-05,
|
10970 |
-
"loss": 0.9265,
|
10971 |
-
"step": 1566
|
10972 |
-
},
|
10973 |
-
{
|
10974 |
-
"epoch": 0.3,
|
10975 |
-
"grad_norm": 1.6732809661254076,
|
10976 |
-
"learning_rate": 1.637116373201956e-05,
|
10977 |
-
"loss": 0.9116,
|
10978 |
-
"step": 1567
|
10979 |
-
},
|
10980 |
-
{
|
10981 |
-
"epoch": 0.3,
|
10982 |
-
"grad_norm": 2.0888776005788916,
|
10983 |
-
"learning_rate": 1.6366355191357885e-05,
|
10984 |
-
"loss": 1.0467,
|
10985 |
-
"step": 1568
|
10986 |
-
},
|
10987 |
-
{
|
10988 |
-
"epoch": 0.3,
|
10989 |
-
"grad_norm": 1.8650433291396247,
|
10990 |
-
"learning_rate": 1.6361544174148527e-05,
|
10991 |
-
"loss": 0.9516,
|
10992 |
-
"step": 1569
|
10993 |
-
},
|
10994 |
-
{
|
10995 |
-
"epoch": 0.3,
|
10996 |
-
"grad_norm": 1.7447193484240076,
|
10997 |
-
"learning_rate": 1.6356730682262997e-05,
|
10998 |
-
"loss": 0.9328,
|
10999 |
-
"step": 1570
|
11000 |
-
},
|
11001 |
-
{
|
11002 |
-
"epoch": 0.3,
|
11003 |
-
"grad_norm": 1.544513865520117,
|
11004 |
-
"learning_rate": 1.6351914717573767e-05,
|
11005 |
-
"loss": 0.8765,
|
11006 |
-
"step": 1571
|
11007 |
-
},
|
11008 |
-
{
|
11009 |
-
"epoch": 0.3,
|
11010 |
-
"grad_norm": 1.7101828627501428,
|
11011 |
-
"learning_rate": 1.6347096281954277e-05,
|
11012 |
-
"loss": 0.9101,
|
11013 |
-
"step": 1572
|
11014 |
-
},
|
11015 |
-
{
|
11016 |
-
"epoch": 0.3,
|
11017 |
-
"grad_norm": 1.7209199573338974,
|
11018 |
-
"learning_rate": 1.6342275377278923e-05,
|
11019 |
-
"loss": 0.8928,
|
11020 |
-
"step": 1573
|
11021 |
-
},
|
11022 |
-
{
|
11023 |
-
"epoch": 0.3,
|
11024 |
-
"grad_norm": 1.730187436877261,
|
11025 |
-
"learning_rate": 1.6337452005423067e-05,
|
11026 |
-
"loss": 0.9241,
|
11027 |
-
"step": 1574
|
11028 |
-
},
|
11029 |
-
{
|
11030 |
-
"epoch": 0.3,
|
11031 |
-
"grad_norm": 1.6976839779004649,
|
11032 |
-
"learning_rate": 1.6332626168263025e-05,
|
11033 |
-
"loss": 0.8881,
|
11034 |
-
"step": 1575
|
11035 |
-
},
|
11036 |
-
{
|
11037 |
-
"epoch": 0.3,
|
11038 |
-
"grad_norm": 1.6303874238073626,
|
11039 |
-
"learning_rate": 1.6327797867676077e-05,
|
11040 |
-
"loss": 0.943,
|
11041 |
-
"step": 1576
|
11042 |
-
},
|
11043 |
-
{
|
11044 |
-
"epoch": 0.3,
|
11045 |
-
"grad_norm": 1.670673560161254,
|
11046 |
-
"learning_rate": 1.6322967105540457e-05,
|
11047 |
-
"loss": 0.9151,
|
11048 |
-
"step": 1577
|
11049 |
-
},
|
11050 |
-
{
|
11051 |
-
"epoch": 0.3,
|
11052 |
-
"grad_norm": 1.7163060222751576,
|
11053 |
-
"learning_rate": 1.6318133883735358e-05,
|
11054 |
-
"loss": 0.9639,
|
11055 |
-
"step": 1578
|
11056 |
-
},
|
11057 |
-
{
|
11058 |
-
"epoch": 0.3,
|
11059 |
-
"grad_norm": 1.687703952131334,
|
11060 |
-
"learning_rate": 1.6313298204140934e-05,
|
11061 |
-
"loss": 0.9525,
|
11062 |
-
"step": 1579
|
11063 |
-
},
|
11064 |
-
{
|
11065 |
-
"epoch": 0.3,
|
11066 |
-
"grad_norm": 1.9942262110876094,
|
11067 |
-
"learning_rate": 1.6308460068638287e-05,
|
11068 |
-
"loss": 0.955,
|
11069 |
-
"step": 1580
|
11070 |
-
},
|
11071 |
-
{
|
11072 |
-
"epoch": 0.3,
|
11073 |
-
"grad_norm": 1.8919831342466207,
|
11074 |
-
"learning_rate": 1.6303619479109475e-05,
|
11075 |
-
"loss": 0.9573,
|
11076 |
-
"step": 1581
|
11077 |
-
},
|
11078 |
-
{
|
11079 |
-
"epoch": 0.3,
|
11080 |
-
"grad_norm": 1.778747910790833,
|
11081 |
-
"learning_rate": 1.6298776437437526e-05,
|
11082 |
-
"loss": 0.9421,
|
11083 |
-
"step": 1582
|
11084 |
-
},
|
11085 |
-
{
|
11086 |
-
"epoch": 0.3,
|
11087 |
-
"grad_norm": 1.745824812138257,
|
11088 |
-
"learning_rate": 1.62939309455064e-05,
|
11089 |
-
"loss": 0.9841,
|
11090 |
-
"step": 1583
|
11091 |
-
},
|
11092 |
-
{
|
11093 |
-
"epoch": 0.31,
|
11094 |
-
"grad_norm": 2.005077387084947,
|
11095 |
-
"learning_rate": 1.6289083005201026e-05,
|
11096 |
-
"loss": 0.9857,
|
11097 |
-
"step": 1584
|
11098 |
-
},
|
11099 |
-
{
|
11100 |
-
"epoch": 0.31,
|
11101 |
-
"grad_norm": 1.898514749339766,
|
11102 |
-
"learning_rate": 1.6284232618407276e-05,
|
11103 |
-
"loss": 0.9036,
|
11104 |
-
"step": 1585
|
11105 |
-
},
|
11106 |
-
{
|
11107 |
-
"epoch": 0.31,
|
11108 |
-
"grad_norm": 1.5823702179562669,
|
11109 |
-
"learning_rate": 1.627937978701198e-05,
|
11110 |
-
"loss": 0.9336,
|
11111 |
-
"step": 1586
|
11112 |
-
},
|
11113 |
-
{
|
11114 |
-
"epoch": 0.31,
|
11115 |
-
"grad_norm": 1.6580159968617827,
|
11116 |
-
"learning_rate": 1.6274524512902912e-05,
|
11117 |
-
"loss": 0.9015,
|
11118 |
-
"step": 1587
|
11119 |
-
},
|
11120 |
-
{
|
11121 |
-
"epoch": 0.31,
|
11122 |
-
"grad_norm": 1.6328945384243476,
|
11123 |
-
"learning_rate": 1.626966679796881e-05,
|
11124 |
-
"loss": 0.9527,
|
11125 |
-
"step": 1588
|
11126 |
-
},
|
11127 |
-
{
|
11128 |
-
"epoch": 0.31,
|
11129 |
-
"grad_norm": 1.7353326953684673,
|
11130 |
-
"learning_rate": 1.6264806644099347e-05,
|
11131 |
-
"loss": 0.9191,
|
11132 |
-
"step": 1589
|
11133 |
-
},
|
11134 |
-
{
|
11135 |
-
"epoch": 0.31,
|
11136 |
-
"grad_norm": 1.731152621721883,
|
11137 |
-
"learning_rate": 1.6259944053185148e-05,
|
11138 |
-
"loss": 0.9689,
|
11139 |
-
"step": 1590
|
11140 |
-
},
|
11141 |
-
{
|
11142 |
-
"epoch": 0.31,
|
11143 |
-
"grad_norm": 1.8267882683217032,
|
11144 |
-
"learning_rate": 1.625507902711779e-05,
|
11145 |
-
"loss": 0.9006,
|
11146 |
-
"step": 1591
|
11147 |
-
},
|
11148 |
-
{
|
11149 |
-
"epoch": 0.31,
|
11150 |
-
"grad_norm": 1.660673153228956,
|
11151 |
-
"learning_rate": 1.6250211567789796e-05,
|
11152 |
-
"loss": 0.9531,
|
11153 |
-
"step": 1592
|
11154 |
-
},
|
11155 |
-
{
|
11156 |
-
"epoch": 0.31,
|
11157 |
-
"grad_norm": 1.7726958038187408,
|
11158 |
-
"learning_rate": 1.6245341677094632e-05,
|
11159 |
-
"loss": 0.9605,
|
11160 |
-
"step": 1593
|
11161 |
-
},
|
11162 |
-
{
|
11163 |
-
"epoch": 0.31,
|
11164 |
-
"grad_norm": 1.5638525823490939,
|
11165 |
-
"learning_rate": 1.6240469356926714e-05,
|
11166 |
-
"loss": 0.9121,
|
11167 |
-
"step": 1594
|
11168 |
-
},
|
11169 |
-
{
|
11170 |
-
"epoch": 0.31,
|
11171 |
-
"grad_norm": 1.615819492487161,
|
11172 |
-
"learning_rate": 1.6235594609181404e-05,
|
11173 |
-
"loss": 0.9112,
|
11174 |
-
"step": 1595
|
11175 |
-
},
|
11176 |
-
{
|
11177 |
-
"epoch": 0.31,
|
11178 |
-
"grad_norm": 1.1452214964318819,
|
11179 |
-
"learning_rate": 1.6230717435755002e-05,
|
11180 |
-
"loss": 0.8227,
|
11181 |
-
"step": 1596
|
11182 |
-
},
|
11183 |
-
{
|
11184 |
-
"epoch": 0.31,
|
11185 |
-
"grad_norm": 1.1150563856276265,
|
11186 |
-
"learning_rate": 1.6225837838544763e-05,
|
11187 |
-
"loss": 0.8186,
|
11188 |
-
"step": 1597
|
11189 |
-
},
|
11190 |
-
{
|
11191 |
-
"epoch": 0.31,
|
11192 |
-
"grad_norm": 1.8136187860164037,
|
11193 |
-
"learning_rate": 1.622095581944887e-05,
|
11194 |
-
"loss": 0.8627,
|
11195 |
-
"step": 1598
|
11196 |
-
},
|
11197 |
-
{
|
11198 |
-
"epoch": 0.31,
|
11199 |
-
"grad_norm": 1.9577379459272641,
|
11200 |
-
"learning_rate": 1.6216071380366453e-05,
|
11201 |
-
"loss": 0.9854,
|
11202 |
-
"step": 1599
|
11203 |
-
},
|
11204 |
-
{
|
11205 |
-
"epoch": 0.31,
|
11206 |
-
"grad_norm": 1.4458602998328087,
|
11207 |
-
"learning_rate": 1.62111845231976e-05,
|
11208 |
-
"loss": 0.9476,
|
11209 |
-
"step": 1600
|
11210 |
-
},
|
11211 |
-
{
|
11212 |
-
"epoch": 0.31,
|
11213 |
-
"grad_norm": 1.6626009143381728,
|
11214 |
-
"learning_rate": 1.6206295249843312e-05,
|
11215 |
-
"loss": 0.9418,
|
11216 |
-
"step": 1601
|
11217 |
-
},
|
11218 |
-
{
|
11219 |
-
"epoch": 0.31,
|
11220 |
-
"grad_norm": 1.8417180161902174,
|
11221 |
-
"learning_rate": 1.6201403562205547e-05,
|
11222 |
-
"loss": 0.928,
|
11223 |
-
"step": 1602
|
11224 |
-
},
|
11225 |
-
{
|
11226 |
-
"epoch": 0.31,
|
11227 |
-
"grad_norm": 1.6342966301969772,
|
11228 |
-
"learning_rate": 1.6196509462187205e-05,
|
11229 |
-
"loss": 0.8499,
|
11230 |
-
"step": 1603
|
11231 |
-
},
|
11232 |
-
{
|
11233 |
-
"epoch": 0.31,
|
11234 |
-
"grad_norm": 1.4967835565015328,
|
11235 |
-
"learning_rate": 1.619161295169211e-05,
|
11236 |
-
"loss": 0.8553,
|
11237 |
-
"step": 1604
|
11238 |
-
},
|
11239 |
-
{
|
11240 |
-
"epoch": 0.31,
|
11241 |
-
"grad_norm": 1.7224864536848172,
|
11242 |
-
"learning_rate": 1.6186714032625036e-05,
|
11243 |
-
"loss": 0.925,
|
11244 |
-
"step": 1605
|
11245 |
-
},
|
11246 |
-
{
|
11247 |
-
"epoch": 0.31,
|
11248 |
-
"grad_norm": 1.7763197537104958,
|
11249 |
-
"learning_rate": 1.618181270689169e-05,
|
11250 |
-
"loss": 0.9975,
|
11251 |
-
"step": 1606
|
11252 |
-
},
|
11253 |
-
{
|
11254 |
-
"epoch": 0.31,
|
11255 |
-
"grad_norm": 1.831926739868879,
|
11256 |
-
"learning_rate": 1.6176908976398713e-05,
|
11257 |
-
"loss": 1.0196,
|
11258 |
-
"step": 1607
|
11259 |
-
},
|
11260 |
-
{
|
11261 |
-
"epoch": 0.31,
|
11262 |
-
"grad_norm": 1.4575672638416406,
|
11263 |
-
"learning_rate": 1.6172002843053687e-05,
|
11264 |
-
"loss": 0.8872,
|
11265 |
-
"step": 1608
|
11266 |
-
},
|
11267 |
-
{
|
11268 |
-
"epoch": 0.31,
|
11269 |
-
"grad_norm": 1.8265500625497886,
|
11270 |
-
"learning_rate": 1.6167094308765122e-05,
|
11271 |
-
"loss": 0.8805,
|
11272 |
-
"step": 1609
|
11273 |
-
},
|
11274 |
-
{
|
11275 |
-
"epoch": 0.31,
|
11276 |
-
"grad_norm": 1.6430601567156466,
|
11277 |
-
"learning_rate": 1.6162183375442464e-05,
|
11278 |
-
"loss": 0.9035,
|
11279 |
-
"step": 1610
|
11280 |
-
},
|
11281 |
-
{
|
11282 |
-
"epoch": 0.31,
|
11283 |
-
"grad_norm": 1.4940280767553107,
|
11284 |
-
"learning_rate": 1.6157270044996098e-05,
|
11285 |
-
"loss": 0.8646,
|
11286 |
-
"step": 1611
|
11287 |
-
},
|
11288 |
-
{
|
11289 |
-
"epoch": 0.31,
|
11290 |
-
"grad_norm": 1.7627770422295945,
|
11291 |
-
"learning_rate": 1.6152354319337336e-05,
|
11292 |
-
"loss": 0.8689,
|
11293 |
-
"step": 1612
|
11294 |
-
},
|
11295 |
-
{
|
11296 |
-
"epoch": 0.31,
|
11297 |
-
"grad_norm": 1.6788474884705797,
|
11298 |
-
"learning_rate": 1.614743620037842e-05,
|
11299 |
-
"loss": 0.8535,
|
11300 |
-
"step": 1613
|
11301 |
-
},
|
11302 |
-
{
|
11303 |
-
"epoch": 0.31,
|
11304 |
-
"grad_norm": 1.8638924023543273,
|
11305 |
-
"learning_rate": 1.6142515690032525e-05,
|
11306 |
-
"loss": 0.9121,
|
11307 |
-
"step": 1614
|
11308 |
-
},
|
11309 |
-
{
|
11310 |
-
"epoch": 0.31,
|
11311 |
-
"grad_norm": 2.0612905331097275,
|
11312 |
-
"learning_rate": 1.6137592790213765e-05,
|
11313 |
-
"loss": 0.9246,
|
11314 |
-
"step": 1615
|
11315 |
-
},
|
11316 |
-
{
|
11317 |
-
"epoch": 0.31,
|
11318 |
-
"grad_norm": 1.7216050549850093,
|
11319 |
-
"learning_rate": 1.6132667502837164e-05,
|
11320 |
-
"loss": 0.9596,
|
11321 |
-
"step": 1616
|
11322 |
-
},
|
11323 |
-
{
|
11324 |
-
"epoch": 0.31,
|
11325 |
-
"grad_norm": 1.5634428918355618,
|
11326 |
-
"learning_rate": 1.6127739829818695e-05,
|
11327 |
-
"loss": 0.8957,
|
11328 |
-
"step": 1617
|
11329 |
-
},
|
11330 |
-
{
|
11331 |
-
"epoch": 0.31,
|
11332 |
-
"grad_norm": 1.6959549456380303,
|
11333 |
-
"learning_rate": 1.6122809773075247e-05,
|
11334 |
-
"loss": 0.752,
|
11335 |
-
"step": 1618
|
11336 |
-
},
|
11337 |
-
{
|
11338 |
-
"epoch": 0.31,
|
11339 |
-
"grad_norm": 1.8052961117745467,
|
11340 |
-
"learning_rate": 1.6117877334524635e-05,
|
11341 |
-
"loss": 0.9041,
|
11342 |
-
"step": 1619
|
11343 |
-
},
|
11344 |
-
{
|
11345 |
-
"epoch": 0.31,
|
11346 |
-
"grad_norm": 1.82138238416882,
|
11347 |
-
"learning_rate": 1.6112942516085616e-05,
|
11348 |
-
"loss": 0.9431,
|
11349 |
-
"step": 1620
|
11350 |
-
},
|
11351 |
-
{
|
11352 |
-
"epoch": 0.31,
|
11353 |
-
"grad_norm": 1.8821211929280084,
|
11354 |
-
"learning_rate": 1.6108005319677853e-05,
|
11355 |
-
"loss": 0.9189,
|
11356 |
-
"step": 1621
|
11357 |
-
},
|
11358 |
-
{
|
11359 |
-
"epoch": 0.31,
|
11360 |
-
"grad_norm": 1.6797384699671483,
|
11361 |
-
"learning_rate": 1.6103065747221945e-05,
|
11362 |
-
"loss": 0.9559,
|
11363 |
-
"step": 1622
|
11364 |
-
},
|
11365 |
-
{
|
11366 |
-
"epoch": 0.31,
|
11367 |
-
"grad_norm": 1.6335614363214426,
|
11368 |
-
"learning_rate": 1.6098123800639415e-05,
|
11369 |
-
"loss": 1.0283,
|
11370 |
-
"step": 1623
|
11371 |
-
},
|
11372 |
-
{
|
11373 |
-
"epoch": 0.31,
|
11374 |
-
"grad_norm": 1.663077182850145,
|
11375 |
-
"learning_rate": 1.6093179481852706e-05,
|
11376 |
-
"loss": 0.8679,
|
11377 |
-
"step": 1624
|
11378 |
-
},
|
11379 |
-
{
|
11380 |
-
"epoch": 0.31,
|
11381 |
-
"grad_norm": 1.736097411032046,
|
11382 |
-
"learning_rate": 1.6088232792785187e-05,
|
11383 |
-
"loss": 0.9939,
|
11384 |
-
"step": 1625
|
11385 |
-
},
|
11386 |
-
{
|
11387 |
-
"epoch": 0.31,
|
11388 |
-
"grad_norm": 1.9331864705161093,
|
11389 |
-
"learning_rate": 1.608328373536115e-05,
|
11390 |
-
"loss": 0.8924,
|
11391 |
-
"step": 1626
|
11392 |
-
},
|
11393 |
-
{
|
11394 |
-
"epoch": 0.31,
|
11395 |
-
"grad_norm": 1.7154518294143721,
|
11396 |
-
"learning_rate": 1.60783323115058e-05,
|
11397 |
-
"loss": 0.9131,
|
11398 |
-
"step": 1627
|
11399 |
-
},
|
11400 |
-
{
|
11401 |
-
"epoch": 0.31,
|
11402 |
-
"grad_norm": 1.6772390452717216,
|
11403 |
-
"learning_rate": 1.6073378523145272e-05,
|
11404 |
-
"loss": 1.0287,
|
11405 |
-
"step": 1628
|
11406 |
-
},
|
11407 |
-
{
|
11408 |
-
"epoch": 0.31,
|
11409 |
-
"grad_norm": 1.7439611127964232,
|
11410 |
-
"learning_rate": 1.6068422372206613e-05,
|
11411 |
-
"loss": 0.9183,
|
11412 |
-
"step": 1629
|
11413 |
-
},
|
11414 |
-
{
|
11415 |
-
"epoch": 0.31,
|
11416 |
-
"grad_norm": 1.691458239632585,
|
11417 |
-
"learning_rate": 1.6063463860617802e-05,
|
11418 |
-
"loss": 0.9311,
|
11419 |
-
"step": 1630
|
11420 |
-
},
|
11421 |
-
{
|
11422 |
-
"epoch": 0.31,
|
11423 |
-
"grad_norm": 1.658304169586574,
|
11424 |
-
"learning_rate": 1.605850299030772e-05,
|
11425 |
-
"loss": 0.9134,
|
11426 |
-
"step": 1631
|
11427 |
-
},
|
11428 |
-
{
|
11429 |
-
"epoch": 0.31,
|
11430 |
-
"grad_norm": 1.7083337750889018,
|
11431 |
-
"learning_rate": 1.6053539763206177e-05,
|
11432 |
-
"loss": 0.9304,
|
11433 |
-
"step": 1632
|
11434 |
-
},
|
11435 |
-
{
|
11436 |
-
"epoch": 0.31,
|
11437 |
-
"grad_norm": 1.7508484136879259,
|
11438 |
-
"learning_rate": 1.6048574181243895e-05,
|
11439 |
-
"loss": 0.939,
|
11440 |
-
"step": 1633
|
11441 |
-
},
|
11442 |
-
{
|
11443 |
-
"epoch": 0.31,
|
11444 |
-
"grad_norm": 1.7852876914422533,
|
11445 |
-
"learning_rate": 1.6043606246352516e-05,
|
11446 |
-
"loss": 0.9663,
|
11447 |
-
"step": 1634
|
11448 |
-
},
|
11449 |
-
{
|
11450 |
-
"epoch": 0.31,
|
11451 |
-
"grad_norm": 1.6659108647906151,
|
11452 |
-
"learning_rate": 1.6038635960464586e-05,
|
11453 |
-
"loss": 0.9219,
|
11454 |
-
"step": 1635
|
11455 |
-
},
|
11456 |
-
{
|
11457 |
-
"epoch": 0.32,
|
11458 |
-
"grad_norm": 1.8060772275287673,
|
11459 |
-
"learning_rate": 1.6033663325513583e-05,
|
11460 |
-
"loss": 0.9479,
|
11461 |
-
"step": 1636
|
11462 |
-
},
|
11463 |
-
{
|
11464 |
-
"epoch": 0.32,
|
11465 |
-
"grad_norm": 1.894086011906853,
|
11466 |
-
"learning_rate": 1.602868834343388e-05,
|
11467 |
-
"loss": 0.9187,
|
11468 |
-
"step": 1637
|
11469 |
-
},
|
11470 |
-
{
|
11471 |
-
"epoch": 0.32,
|
11472 |
-
"grad_norm": 1.7600387051701272,
|
11473 |
-
"learning_rate": 1.602371101616079e-05,
|
11474 |
-
"loss": 0.9938,
|
11475 |
-
"step": 1638
|
11476 |
-
},
|
11477 |
-
{
|
11478 |
-
"epoch": 0.32,
|
11479 |
-
"grad_norm": 1.6732938933125896,
|
11480 |
-
"learning_rate": 1.60187313456305e-05,
|
11481 |
-
"loss": 0.941,
|
11482 |
-
"step": 1639
|
11483 |
-
},
|
11484 |
-
{
|
11485 |
-
"epoch": 0.32,
|
11486 |
-
"grad_norm": 1.543451364693145,
|
11487 |
-
"learning_rate": 1.6013749333780143e-05,
|
11488 |
-
"loss": 0.9488,
|
11489 |
-
"step": 1640
|
11490 |
-
},
|
11491 |
-
{
|
11492 |
-
"epoch": 0.32,
|
11493 |
-
"grad_norm": 1.6555084246553577,
|
11494 |
-
"learning_rate": 1.600876498254775e-05,
|
11495 |
-
"loss": 0.96,
|
11496 |
-
"step": 1641
|
11497 |
-
},
|
11498 |
-
{
|
11499 |
-
"epoch": 0.32,
|
11500 |
-
"grad_norm": 1.8317018556108886,
|
11501 |
-
"learning_rate": 1.600377829387225e-05,
|
11502 |
-
"loss": 0.9523,
|
11503 |
-
"step": 1642
|
11504 |
-
},
|
11505 |
-
{
|
11506 |
-
"epoch": 0.32,
|
11507 |
-
"grad_norm": 1.635513167409773,
|
11508 |
-
"learning_rate": 1.5998789269693508e-05,
|
11509 |
-
"loss": 0.9135,
|
11510 |
-
"step": 1643
|
11511 |
-
},
|
11512 |
-
{
|
11513 |
-
"epoch": 0.32,
|
11514 |
-
"grad_norm": 1.6171341568108941,
|
11515 |
-
"learning_rate": 1.599379791195227e-05,
|
11516 |
-
"loss": 0.8896,
|
11517 |
-
"step": 1644
|
11518 |
-
},
|
11519 |
-
{
|
11520 |
-
"epoch": 0.32,
|
11521 |
-
"grad_norm": 1.8946636753822537,
|
11522 |
-
"learning_rate": 1.598880422259021e-05,
|
11523 |
-
"loss": 1.0003,
|
11524 |
-
"step": 1645
|
11525 |
-
},
|
11526 |
-
{
|
11527 |
-
"epoch": 0.32,
|
11528 |
-
"grad_norm": 1.76766590571786,
|
11529 |
-
"learning_rate": 1.598380820354989e-05,
|
11530 |
-
"loss": 0.9844,
|
11531 |
-
"step": 1646
|
11532 |
-
},
|
11533 |
-
{
|
11534 |
-
"epoch": 0.32,
|
11535 |
-
"grad_norm": 1.6685097327074356,
|
11536 |
-
"learning_rate": 1.5978809856774807e-05,
|
11537 |
-
"loss": 0.9786,
|
11538 |
-
"step": 1647
|
11539 |
-
},
|
11540 |
-
{
|
11541 |
-
"epoch": 0.32,
|
11542 |
-
"grad_norm": 1.5918070005843183,
|
11543 |
-
"learning_rate": 1.5973809184209328e-05,
|
11544 |
-
"loss": 0.8771,
|
11545 |
-
"step": 1648
|
11546 |
-
},
|
11547 |
-
{
|
11548 |
-
"epoch": 0.32,
|
11549 |
-
"grad_norm": 1.7023742031839773,
|
11550 |
-
"learning_rate": 1.596880618779875e-05,
|
11551 |
-
"loss": 0.8801,
|
11552 |
-
"step": 1649
|
11553 |
-
},
|
11554 |
-
{
|
11555 |
-
"epoch": 0.32,
|
11556 |
-
"grad_norm": 1.724381937302016,
|
11557 |
-
"learning_rate": 1.5963800869489273e-05,
|
11558 |
-
"loss": 0.9386,
|
11559 |
-
"step": 1650
|
11560 |
-
},
|
11561 |
-
{
|
11562 |
-
"epoch": 0.32,
|
11563 |
-
"grad_norm": 1.8494707991079804,
|
11564 |
-
"learning_rate": 1.595879323122798e-05,
|
11565 |
-
"loss": 1.0036,
|
11566 |
-
"step": 1651
|
11567 |
-
},
|
11568 |
-
{
|
11569 |
-
"epoch": 0.32,
|
11570 |
-
"grad_norm": 1.5226561189767724,
|
11571 |
-
"learning_rate": 1.5953783274962883e-05,
|
11572 |
-
"loss": 0.8788,
|
11573 |
-
"step": 1652
|
11574 |
-
},
|
11575 |
-
{
|
11576 |
-
"epoch": 0.32,
|
11577 |
-
"grad_norm": 1.6318911919928796,
|
11578 |
-
"learning_rate": 1.5948771002642874e-05,
|
11579 |
-
"loss": 0.9095,
|
11580 |
-
"step": 1653
|
11581 |
-
},
|
11582 |
-
{
|
11583 |
-
"epoch": 0.32,
|
11584 |
-
"grad_norm": 1.912279971229655,
|
11585 |
-
"learning_rate": 1.5943756416217758e-05,
|
11586 |
-
"loss": 1.0128,
|
11587 |
-
"step": 1654
|
11588 |
-
},
|
11589 |
-
{
|
11590 |
-
"epoch": 0.32,
|
11591 |
-
"grad_norm": 1.1650261536863196,
|
11592 |
-
"learning_rate": 1.5938739517638236e-05,
|
11593 |
-
"loss": 0.8428,
|
11594 |
-
"step": 1655
|
11595 |
-
},
|
11596 |
-
{
|
11597 |
-
"epoch": 0.32,
|
11598 |
-
"grad_norm": 1.7415643450863434,
|
11599 |
-
"learning_rate": 1.5933720308855915e-05,
|
11600 |
-
"loss": 0.9211,
|
11601 |
-
"step": 1656
|
11602 |
-
},
|
11603 |
-
{
|
11604 |
-
"epoch": 0.32,
|
11605 |
-
"grad_norm": 1.8820348107062908,
|
11606 |
-
"learning_rate": 1.5928698791823292e-05,
|
11607 |
-
"loss": 0.955,
|
11608 |
-
"step": 1657
|
11609 |
-
},
|
11610 |
-
{
|
11611 |
-
"epoch": 0.32,
|
11612 |
-
"grad_norm": 1.6280225538899704,
|
11613 |
-
"learning_rate": 1.5923674968493762e-05,
|
11614 |
-
"loss": 1.0581,
|
11615 |
-
"step": 1658
|
11616 |
-
},
|
11617 |
-
{
|
11618 |
-
"epoch": 0.32,
|
11619 |
-
"grad_norm": 1.6107594870919844,
|
11620 |
-
"learning_rate": 1.5918648840821622e-05,
|
11621 |
-
"loss": 0.9198,
|
11622 |
-
"step": 1659
|
11623 |
-
},
|
11624 |
-
{
|
11625 |
-
"epoch": 0.32,
|
11626 |
-
"grad_norm": 1.9027448285073223,
|
11627 |
-
"learning_rate": 1.5913620410762066e-05,
|
11628 |
-
"loss": 0.9414,
|
11629 |
-
"step": 1660
|
11630 |
-
},
|
11631 |
-
{
|
11632 |
-
"epoch": 0.32,
|
11633 |
-
"grad_norm": 1.1187156575317108,
|
11634 |
-
"learning_rate": 1.590858968027118e-05,
|
11635 |
-
"loss": 0.8779,
|
11636 |
-
"step": 1661
|
11637 |
-
},
|
11638 |
-
{
|
11639 |
-
"epoch": 0.32,
|
11640 |
-
"grad_norm": 1.632013326547582,
|
11641 |
-
"learning_rate": 1.5903556651305945e-05,
|
11642 |
-
"loss": 0.8975,
|
11643 |
-
"step": 1662
|
11644 |
-
},
|
11645 |
-
{
|
11646 |
-
"epoch": 0.32,
|
11647 |
-
"grad_norm": 1.6182938839187448,
|
11648 |
-
"learning_rate": 1.5898521325824238e-05,
|
11649 |
-
"loss": 0.855,
|
11650 |
-
"step": 1663
|
11651 |
-
},
|
11652 |
-
{
|
11653 |
-
"epoch": 0.32,
|
11654 |
-
"grad_norm": 1.7066599421699202,
|
11655 |
-
"learning_rate": 1.589348370578483e-05,
|
11656 |
-
"loss": 0.9101,
|
11657 |
-
"step": 1664
|
11658 |
-
},
|
11659 |
-
{
|
11660 |
-
"epoch": 0.32,
|
11661 |
-
"grad_norm": 1.5337852130580103,
|
11662 |
-
"learning_rate": 1.588844379314738e-05,
|
11663 |
-
"loss": 0.9258,
|
11664 |
-
"step": 1665
|
11665 |
-
},
|
11666 |
-
{
|
11667 |
-
"epoch": 0.32,
|
11668 |
-
"grad_norm": 1.5552796689485406,
|
11669 |
-
"learning_rate": 1.5883401589872444e-05,
|
11670 |
-
"loss": 0.8547,
|
11671 |
-
"step": 1666
|
11672 |
-
},
|
11673 |
-
{
|
11674 |
-
"epoch": 0.32,
|
11675 |
-
"grad_norm": 1.637891557924487,
|
11676 |
-
"learning_rate": 1.587835709792147e-05,
|
11677 |
-
"loss": 0.9402,
|
11678 |
-
"step": 1667
|
11679 |
-
},
|
11680 |
-
{
|
11681 |
-
"epoch": 0.32,
|
11682 |
-
"grad_norm": 1.6360176706001976,
|
11683 |
-
"learning_rate": 1.587331031925679e-05,
|
11684 |
-
"loss": 0.8344,
|
11685 |
-
"step": 1668
|
11686 |
-
},
|
11687 |
-
{
|
11688 |
-
"epoch": 0.32,
|
11689 |
-
"grad_norm": 1.7108867430309864,
|
11690 |
-
"learning_rate": 1.586826125584163e-05,
|
11691 |
-
"loss": 0.8892,
|
11692 |
-
"step": 1669
|
11693 |
-
},
|
11694 |
-
{
|
11695 |
-
"epoch": 0.32,
|
11696 |
-
"grad_norm": 1.7038609470120825,
|
11697 |
-
"learning_rate": 1.5863209909640106e-05,
|
11698 |
-
"loss": 0.9929,
|
11699 |
-
"step": 1670
|
11700 |
-
},
|
11701 |
-
{
|
11702 |
-
"epoch": 0.32,
|
11703 |
-
"grad_norm": 1.5206042599249054,
|
11704 |
-
"learning_rate": 1.5858156282617215e-05,
|
11705 |
-
"loss": 0.8909,
|
11706 |
-
"step": 1671
|
11707 |
-
},
|
11708 |
-
{
|
11709 |
-
"epoch": 0.32,
|
11710 |
-
"grad_norm": 1.6711511627920845,
|
11711 |
-
"learning_rate": 1.585310037673885e-05,
|
11712 |
-
"loss": 0.8614,
|
11713 |
-
"step": 1672
|
11714 |
-
},
|
11715 |
-
{
|
11716 |
-
"epoch": 0.32,
|
11717 |
-
"grad_norm": 1.8427061122981878,
|
11718 |
-
"learning_rate": 1.5848042193971788e-05,
|
11719 |
-
"loss": 1.0136,
|
11720 |
-
"step": 1673
|
11721 |
-
},
|
11722 |
-
{
|
11723 |
-
"epoch": 0.32,
|
11724 |
-
"grad_norm": 1.6340322224497956,
|
11725 |
-
"learning_rate": 1.5842981736283686e-05,
|
11726 |
-
"loss": 0.977,
|
11727 |
-
"step": 1674
|
11728 |
-
},
|
11729 |
-
{
|
11730 |
-
"epoch": 0.32,
|
11731 |
-
"grad_norm": 1.6997279400264118,
|
11732 |
-
"learning_rate": 1.583791900564309e-05,
|
11733 |
-
"loss": 0.9009,
|
11734 |
-
"step": 1675
|
11735 |
-
},
|
11736 |
-
{
|
11737 |
-
"epoch": 0.32,
|
11738 |
-
"grad_norm": 1.7262098742750869,
|
11739 |
-
"learning_rate": 1.583285400401944e-05,
|
11740 |
-
"loss": 0.9738,
|
11741 |
-
"step": 1676
|
11742 |
-
},
|
11743 |
-
{
|
11744 |
-
"epoch": 0.32,
|
11745 |
-
"grad_norm": 1.5265533453619249,
|
11746 |
-
"learning_rate": 1.5827786733383033e-05,
|
11747 |
-
"loss": 0.855,
|
11748 |
-
"step": 1677
|
11749 |
-
},
|
11750 |
-
{
|
11751 |
-
"epoch": 0.32,
|
11752 |
-
"grad_norm": 1.5822635288749958,
|
11753 |
-
"learning_rate": 1.5822717195705082e-05,
|
11754 |
-
"loss": 0.8655,
|
11755 |
-
"step": 1678
|
11756 |
-
},
|
11757 |
-
{
|
11758 |
-
"epoch": 0.32,
|
11759 |
-
"grad_norm": 1.4231646639501143,
|
11760 |
-
"learning_rate": 1.581764539295765e-05,
|
11761 |
-
"loss": 0.9177,
|
11762 |
-
"step": 1679
|
11763 |
-
},
|
11764 |
-
{
|
11765 |
-
"epoch": 0.32,
|
11766 |
-
"grad_norm": 1.6751968342336907,
|
11767 |
-
"learning_rate": 1.5812571327113715e-05,
|
11768 |
-
"loss": 0.8745,
|
11769 |
-
"step": 1680
|
11770 |
-
},
|
11771 |
-
{
|
11772 |
-
"epoch": 0.32,
|
11773 |
-
"grad_norm": 1.6747266634292668,
|
11774 |
-
"learning_rate": 1.5807495000147103e-05,
|
11775 |
-
"loss": 0.867,
|
11776 |
-
"step": 1681
|
11777 |
-
},
|
11778 |
-
{
|
11779 |
-
"epoch": 0.32,
|
11780 |
-
"grad_norm": 1.8696931576923919,
|
11781 |
-
"learning_rate": 1.5802416414032537e-05,
|
11782 |
-
"loss": 0.9896,
|
11783 |
-
"step": 1682
|
11784 |
-
},
|
11785 |
-
{
|
11786 |
-
"epoch": 0.32,
|
11787 |
-
"grad_norm": 1.6926163574834707,
|
11788 |
-
"learning_rate": 1.5797335570745614e-05,
|
11789 |
-
"loss": 0.868,
|
11790 |
-
"step": 1683
|
11791 |
-
},
|
11792 |
-
{
|
11793 |
-
"epoch": 0.32,
|
11794 |
-
"grad_norm": 1.7188418247492503,
|
11795 |
-
"learning_rate": 1.5792252472262813e-05,
|
11796 |
-
"loss": 0.9944,
|
11797 |
-
"step": 1684
|
11798 |
-
},
|
11799 |
-
{
|
11800 |
-
"epoch": 0.32,
|
11801 |
-
"grad_norm": 1.566218028695576,
|
11802 |
-
"learning_rate": 1.578716712056149e-05,
|
11803 |
-
"loss": 0.9525,
|
11804 |
-
"step": 1685
|
11805 |
-
},
|
11806 |
-
{
|
11807 |
-
"epoch": 0.32,
|
11808 |
-
"grad_norm": 1.699566762060431,
|
11809 |
-
"learning_rate": 1.578207951761987e-05,
|
11810 |
-
"loss": 0.9356,
|
11811 |
-
"step": 1686
|
11812 |
-
},
|
11813 |
-
{
|
11814 |
-
"epoch": 0.32,
|
11815 |
-
"grad_norm": 1.6877601287806214,
|
11816 |
-
"learning_rate": 1.577698966541706e-05,
|
11817 |
-
"loss": 1.0246,
|
11818 |
-
"step": 1687
|
11819 |
-
},
|
11820 |
-
{
|
11821 |
-
"epoch": 0.33,
|
11822 |
-
"grad_norm": 1.514033754520559,
|
11823 |
-
"learning_rate": 1.577189756593304e-05,
|
11824 |
-
"loss": 0.9412,
|
11825 |
-
"step": 1688
|
11826 |
-
},
|
11827 |
-
{
|
11828 |
-
"epoch": 0.33,
|
11829 |
-
"grad_norm": 1.8053392098687253,
|
11830 |
-
"learning_rate": 1.5766803221148676e-05,
|
11831 |
-
"loss": 0.9257,
|
11832 |
-
"step": 1689
|
11833 |
-
},
|
11834 |
-
{
|
11835 |
-
"epoch": 0.33,
|
11836 |
-
"grad_norm": 1.637881254327796,
|
11837 |
-
"learning_rate": 1.5761706633045677e-05,
|
11838 |
-
"loss": 0.9698,
|
11839 |
-
"step": 1690
|
11840 |
-
},
|
11841 |
-
{
|
11842 |
-
"epoch": 0.33,
|
11843 |
-
"grad_norm": 1.4978510106761729,
|
11844 |
-
"learning_rate": 1.575660780360666e-05,
|
11845 |
-
"loss": 0.8801,
|
11846 |
-
"step": 1691
|
11847 |
-
},
|
11848 |
-
{
|
11849 |
-
"epoch": 0.33,
|
11850 |
-
"grad_norm": 1.6878496657903825,
|
11851 |
-
"learning_rate": 1.575150673481509e-05,
|
11852 |
-
"loss": 0.8644,
|
11853 |
-
"step": 1692
|
11854 |
-
},
|
11855 |
-
{
|
11856 |
-
"epoch": 0.33,
|
11857 |
-
"grad_norm": 1.6593397363940259,
|
11858 |
-
"learning_rate": 1.5746403428655318e-05,
|
11859 |
-
"loss": 1.023,
|
11860 |
-
"step": 1693
|
11861 |
-
},
|
11862 |
-
{
|
11863 |
-
"epoch": 0.33,
|
11864 |
-
"grad_norm": 1.6235841003825928,
|
11865 |
-
"learning_rate": 1.5741297887112554e-05,
|
11866 |
-
"loss": 0.9081,
|
11867 |
-
"step": 1694
|
11868 |
-
},
|
11869 |
-
{
|
11870 |
-
"epoch": 0.33,
|
11871 |
-
"grad_norm": 1.8109036039046256,
|
11872 |
-
"learning_rate": 1.573619011217288e-05,
|
11873 |
-
"loss": 1.008,
|
11874 |
-
"step": 1695
|
11875 |
-
},
|
11876 |
-
{
|
11877 |
-
"epoch": 0.33,
|
11878 |
-
"grad_norm": 1.7366906202563615,
|
11879 |
-
"learning_rate": 1.5731080105823253e-05,
|
11880 |
-
"loss": 1.0069,
|
11881 |
-
"step": 1696
|
11882 |
-
},
|
11883 |
-
{
|
11884 |
-
"epoch": 0.33,
|
11885 |
-
"grad_norm": 1.7910543152195322,
|
11886 |
-
"learning_rate": 1.572596787005149e-05,
|
11887 |
-
"loss": 1.009,
|
11888 |
-
"step": 1697
|
11889 |
-
},
|
11890 |
-
{
|
11891 |
-
"epoch": 0.33,
|
11892 |
-
"grad_norm": 1.7770513140362456,
|
11893 |
-
"learning_rate": 1.5720853406846284e-05,
|
11894 |
-
"loss": 0.9124,
|
11895 |
-
"step": 1698
|
11896 |
-
},
|
11897 |
-
{
|
11898 |
-
"epoch": 0.33,
|
11899 |
-
"grad_norm": 1.663563980279973,
|
11900 |
-
"learning_rate": 1.571573671819719e-05,
|
11901 |
-
"loss": 0.9501,
|
11902 |
-
"step": 1699
|
11903 |
-
},
|
11904 |
-
{
|
11905 |
-
"epoch": 0.33,
|
11906 |
-
"grad_norm": 1.798302917814401,
|
11907 |
-
"learning_rate": 1.5710617806094623e-05,
|
11908 |
-
"loss": 1.0329,
|
11909 |
-
"step": 1700
|
11910 |
-
},
|
11911 |
-
{
|
11912 |
-
"epoch": 0.33,
|
11913 |
-
"grad_norm": 1.591105191580427,
|
11914 |
-
"learning_rate": 1.5705496672529875e-05,
|
11915 |
-
"loss": 0.9011,
|
11916 |
-
"step": 1701
|
11917 |
-
},
|
11918 |
-
{
|
11919 |
-
"epoch": 0.33,
|
11920 |
-
"grad_norm": 1.6919167077809922,
|
11921 |
-
"learning_rate": 1.5700373319495087e-05,
|
11922 |
-
"loss": 0.954,
|
11923 |
-
"step": 1702
|
11924 |
-
},
|
11925 |
-
{
|
11926 |
-
"epoch": 0.33,
|
11927 |
-
"grad_norm": 1.6001732503077335,
|
11928 |
-
"learning_rate": 1.5695247748983277e-05,
|
11929 |
-
"loss": 0.7931,
|
11930 |
-
"step": 1703
|
11931 |
-
},
|
11932 |
-
{
|
11933 |
-
"epoch": 0.33,
|
11934 |
-
"grad_norm": 1.7339939929632062,
|
11935 |
-
"learning_rate": 1.5690119962988325e-05,
|
11936 |
-
"loss": 0.915,
|
11937 |
-
"step": 1704
|
11938 |
-
},
|
11939 |
-
{
|
11940 |
-
"epoch": 0.33,
|
11941 |
-
"grad_norm": 1.548451551637994,
|
11942 |
-
"learning_rate": 1.568498996350496e-05,
|
11943 |
-
"loss": 0.9463,
|
11944 |
-
"step": 1705
|
11945 |
-
},
|
11946 |
-
{
|
11947 |
-
"epoch": 0.33,
|
11948 |
-
"grad_norm": 1.748437218428189,
|
11949 |
-
"learning_rate": 1.5679857752528788e-05,
|
11950 |
-
"loss": 0.9676,
|
11951 |
-
"step": 1706
|
11952 |
-
},
|
11953 |
-
{
|
11954 |
-
"epoch": 0.33,
|
11955 |
-
"grad_norm": 1.6471556654707846,
|
11956 |
-
"learning_rate": 1.567472333205626e-05,
|
11957 |
-
"loss": 0.926,
|
11958 |
-
"step": 1707
|
11959 |
-
},
|
11960 |
-
{
|
11961 |
-
"epoch": 0.33,
|
11962 |
-
"grad_norm": 1.6877746907602753,
|
11963 |
-
"learning_rate": 1.5669586704084697e-05,
|
11964 |
-
"loss": 1.0535,
|
11965 |
-
"step": 1708
|
11966 |
-
},
|
11967 |
-
{
|
11968 |
-
"epoch": 0.33,
|
11969 |
-
"grad_norm": 1.7356876446324196,
|
11970 |
-
"learning_rate": 1.566444787061228e-05,
|
11971 |
-
"loss": 0.9476,
|
11972 |
-
"step": 1709
|
11973 |
-
},
|
11974 |
-
{
|
11975 |
-
"epoch": 0.33,
|
11976 |
-
"grad_norm": 1.6872419025030534,
|
11977 |
-
"learning_rate": 1.565930683363804e-05,
|
11978 |
-
"loss": 0.9989,
|
11979 |
-
"step": 1710
|
11980 |
-
},
|
11981 |
-
{
|
11982 |
-
"epoch": 0.33,
|
11983 |
-
"grad_norm": 1.0437985973600723,
|
11984 |
-
"learning_rate": 1.565416359516187e-05,
|
11985 |
-
"loss": 0.8111,
|
11986 |
-
"step": 1711
|
11987 |
-
},
|
11988 |
-
{
|
11989 |
-
"epoch": 0.33,
|
11990 |
-
"grad_norm": 1.0088547902055691,
|
11991 |
-
"learning_rate": 1.564901815718452e-05,
|
11992 |
-
"loss": 0.8493,
|
11993 |
-
"step": 1712
|
11994 |
-
},
|
11995 |
-
{
|
11996 |
-
"epoch": 0.33,
|
11997 |
-
"grad_norm": 1.8308929603793904,
|
11998 |
-
"learning_rate": 1.5643870521707588e-05,
|
11999 |
-
"loss": 0.9714,
|
12000 |
-
"step": 1713
|
12001 |
-
},
|
12002 |
-
{
|
12003 |
-
"epoch": 0.33,
|
12004 |
-
"grad_norm": 1.6678011598396532,
|
12005 |
-
"learning_rate": 1.5638720690733543e-05,
|
12006 |
-
"loss": 0.8796,
|
12007 |
-
"step": 1714
|
12008 |
-
},
|
12009 |
-
{
|
12010 |
-
"epoch": 0.33,
|
12011 |
-
"grad_norm": 1.6676069738231614,
|
12012 |
-
"learning_rate": 1.563356866626569e-05,
|
12013 |
-
"loss": 0.9325,
|
12014 |
-
"step": 1715
|
12015 |
-
},
|
12016 |
-
{
|
12017 |
-
"epoch": 0.33,
|
12018 |
-
"grad_norm": 1.687942802391812,
|
12019 |
-
"learning_rate": 1.56284144503082e-05,
|
12020 |
-
"loss": 0.879,
|
12021 |
-
"step": 1716
|
12022 |
-
},
|
12023 |
-
{
|
12024 |
-
"epoch": 0.33,
|
12025 |
-
"grad_norm": 1.5751332093640165,
|
12026 |
-
"learning_rate": 1.5623258044866087e-05,
|
12027 |
-
"loss": 0.9392,
|
12028 |
-
"step": 1717
|
12029 |
-
},
|
12030 |
-
{
|
12031 |
-
"epoch": 0.33,
|
12032 |
-
"grad_norm": 1.5575620694150407,
|
12033 |
-
"learning_rate": 1.561809945194522e-05,
|
12034 |
-
"loss": 0.9539,
|
12035 |
-
"step": 1718
|
12036 |
-
},
|
12037 |
-
{
|
12038 |
-
"epoch": 0.33,
|
12039 |
-
"grad_norm": 1.6785024493599945,
|
12040 |
-
"learning_rate": 1.561293867355233e-05,
|
12041 |
-
"loss": 0.8994,
|
12042 |
-
"step": 1719
|
12043 |
-
},
|
12044 |
-
{
|
12045 |
-
"epoch": 0.33,
|
12046 |
-
"grad_norm": 1.5229589448071077,
|
12047 |
-
"learning_rate": 1.560777571169498e-05,
|
12048 |
-
"loss": 0.9325,
|
12049 |
-
"step": 1720
|
12050 |
-
},
|
12051 |
-
{
|
12052 |
-
"epoch": 0.33,
|
12053 |
-
"grad_norm": 1.70296765640194,
|
12054 |
-
"learning_rate": 1.5602610568381594e-05,
|
12055 |
-
"loss": 0.9706,
|
12056 |
-
"step": 1721
|
12057 |
-
},
|
12058 |
-
{
|
12059 |
-
"epoch": 0.33,
|
12060 |
-
"grad_norm": 1.6492987597197222,
|
12061 |
-
"learning_rate": 1.5597443245621444e-05,
|
12062 |
-
"loss": 0.9846,
|
12063 |
-
"step": 1722
|
12064 |
-
},
|
12065 |
-
{
|
12066 |
-
"epoch": 0.33,
|
12067 |
-
"grad_norm": 1.5539259406249721,
|
12068 |
-
"learning_rate": 1.5592273745424643e-05,
|
12069 |
-
"loss": 0.8847,
|
12070 |
-
"step": 1723
|
12071 |
-
},
|
12072 |
-
{
|
12073 |
-
"epoch": 0.33,
|
12074 |
-
"grad_norm": 1.9574877365730032,
|
12075 |
-
"learning_rate": 1.5587102069802156e-05,
|
12076 |
-
"loss": 0.9819,
|
12077 |
-
"step": 1724
|
12078 |
-
},
|
12079 |
-
{
|
12080 |
-
"epoch": 0.33,
|
12081 |
-
"grad_norm": 1.059334241300379,
|
12082 |
-
"learning_rate": 1.5581928220765792e-05,
|
12083 |
-
"loss": 0.8633,
|
12084 |
-
"step": 1725
|
12085 |
-
},
|
12086 |
-
{
|
12087 |
-
"epoch": 0.33,
|
12088 |
-
"grad_norm": 1.8777654599053444,
|
12089 |
-
"learning_rate": 1.5576752200328217e-05,
|
12090 |
-
"loss": 0.9534,
|
12091 |
-
"step": 1726
|
12092 |
-
},
|
12093 |
-
{
|
12094 |
-
"epoch": 0.33,
|
12095 |
-
"grad_norm": 1.6369275243616173,
|
12096 |
-
"learning_rate": 1.5571574010502925e-05,
|
12097 |
-
"loss": 0.9465,
|
12098 |
-
"step": 1727
|
12099 |
-
},
|
12100 |
-
{
|
12101 |
-
"epoch": 0.33,
|
12102 |
-
"grad_norm": 1.956314276556218,
|
12103 |
-
"learning_rate": 1.5566393653304255e-05,
|
12104 |
-
"loss": 0.9679,
|
12105 |
-
"step": 1728
|
12106 |
-
},
|
12107 |
-
{
|
12108 |
-
"epoch": 0.33,
|
12109 |
-
"grad_norm": 1.6159927205594145,
|
12110 |
-
"learning_rate": 1.5561211130747405e-05,
|
12111 |
-
"loss": 0.892,
|
12112 |
-
"step": 1729
|
12113 |
-
},
|
12114 |
-
{
|
12115 |
-
"epoch": 0.33,
|
12116 |
-
"grad_norm": 1.7028870865116463,
|
12117 |
-
"learning_rate": 1.5556026444848398e-05,
|
12118 |
-
"loss": 0.9115,
|
12119 |
-
"step": 1730
|
12120 |
-
},
|
12121 |
-
{
|
12122 |
-
"epoch": 0.33,
|
12123 |
-
"grad_norm": 1.8208461469027895,
|
12124 |
-
"learning_rate": 1.5550839597624106e-05,
|
12125 |
-
"loss": 0.8889,
|
12126 |
-
"step": 1731
|
12127 |
-
},
|
12128 |
-
{
|
12129 |
-
"epoch": 0.33,
|
12130 |
-
"grad_norm": 1.91661243257076,
|
12131 |
-
"learning_rate": 1.5545650591092248e-05,
|
12132 |
-
"loss": 0.97,
|
12133 |
-
"step": 1732
|
12134 |
-
},
|
12135 |
-
{
|
12136 |
-
"epoch": 0.33,
|
12137 |
-
"grad_norm": 1.60671095140956,
|
12138 |
-
"learning_rate": 1.5540459427271373e-05,
|
12139 |
-
"loss": 0.9894,
|
12140 |
-
"step": 1733
|
12141 |
-
},
|
12142 |
-
{
|
12143 |
-
"epoch": 0.33,
|
12144 |
-
"grad_norm": 1.6111175098454973,
|
12145 |
-
"learning_rate": 1.553526610818087e-05,
|
12146 |
-
"loss": 0.9202,
|
12147 |
-
"step": 1734
|
12148 |
-
},
|
12149 |
-
{
|
12150 |
-
"epoch": 0.33,
|
12151 |
-
"grad_norm": 1.7534583643676716,
|
12152 |
-
"learning_rate": 1.553007063584097e-05,
|
12153 |
-
"loss": 0.8502,
|
12154 |
-
"step": 1735
|
12155 |
-
},
|
12156 |
-
{
|
12157 |
-
"epoch": 0.33,
|
12158 |
-
"grad_norm": 1.6062441383768804,
|
12159 |
-
"learning_rate": 1.5524873012272742e-05,
|
12160 |
-
"loss": 1.0042,
|
12161 |
-
"step": 1736
|
12162 |
-
},
|
12163 |
-
{
|
12164 |
-
"epoch": 0.33,
|
12165 |
-
"grad_norm": 1.5342773548854218,
|
12166 |
-
"learning_rate": 1.551967323949809e-05,
|
12167 |
-
"loss": 0.867,
|
12168 |
-
"step": 1737
|
12169 |
-
},
|
12170 |
-
{
|
12171 |
-
"epoch": 0.33,
|
12172 |
-
"grad_norm": 1.6482540145045306,
|
12173 |
-
"learning_rate": 1.551447131953976e-05,
|
12174 |
-
"loss": 0.9283,
|
12175 |
-
"step": 1738
|
12176 |
-
},
|
12177 |
-
{
|
12178 |
-
"epoch": 0.33,
|
12179 |
-
"grad_norm": 1.616014640944003,
|
12180 |
-
"learning_rate": 1.550926725442132e-05,
|
12181 |
-
"loss": 0.8996,
|
12182 |
-
"step": 1739
|
12183 |
-
},
|
12184 |
-
{
|
12185 |
-
"epoch": 0.34,
|
12186 |
-
"grad_norm": 1.514494552942554,
|
12187 |
-
"learning_rate": 1.550406104616718e-05,
|
12188 |
-
"loss": 0.9248,
|
12189 |
-
"step": 1740
|
12190 |
-
},
|
12191 |
-
{
|
12192 |
-
"epoch": 0.34,
|
12193 |
-
"grad_norm": 1.5363529361022905,
|
12194 |
-
"learning_rate": 1.5498852696802595e-05,
|
12195 |
-
"loss": 0.948,
|
12196 |
-
"step": 1741
|
12197 |
-
},
|
12198 |
-
{
|
12199 |
-
"epoch": 0.34,
|
12200 |
-
"grad_norm": 1.6602545790223517,
|
12201 |
-
"learning_rate": 1.5493642208353627e-05,
|
12202 |
-
"loss": 0.9086,
|
12203 |
-
"step": 1742
|
12204 |
-
},
|
12205 |
-
{
|
12206 |
-
"epoch": 0.34,
|
12207 |
-
"grad_norm": 1.585488148395561,
|
12208 |
-
"learning_rate": 1.5488429582847194e-05,
|
12209 |
-
"loss": 0.9484,
|
12210 |
-
"step": 1743
|
12211 |
-
},
|
12212 |
-
{
|
12213 |
-
"epoch": 0.34,
|
12214 |
-
"grad_norm": 1.7637723283346054,
|
12215 |
-
"learning_rate": 1.5483214822311036e-05,
|
12216 |
-
"loss": 0.9477,
|
12217 |
-
"step": 1744
|
12218 |
-
},
|
12219 |
-
{
|
12220 |
-
"epoch": 0.34,
|
12221 |
-
"grad_norm": 1.7728810975772227,
|
12222 |
-
"learning_rate": 1.5477997928773722e-05,
|
12223 |
-
"loss": 0.8948,
|
12224 |
-
"step": 1745
|
12225 |
-
},
|
12226 |
-
{
|
12227 |
-
"epoch": 0.34,
|
12228 |
-
"grad_norm": 1.627418598739184,
|
12229 |
-
"learning_rate": 1.5472778904264655e-05,
|
12230 |
-
"loss": 0.9482,
|
12231 |
-
"step": 1746
|
12232 |
-
},
|
12233 |
-
{
|
12234 |
-
"epoch": 0.34,
|
12235 |
-
"grad_norm": 1.737617924919354,
|
12236 |
-
"learning_rate": 1.546755775081406e-05,
|
12237 |
-
"loss": 0.9492,
|
12238 |
-
"step": 1747
|
12239 |
-
},
|
12240 |
-
{
|
12241 |
-
"epoch": 0.34,
|
12242 |
-
"grad_norm": 1.6996510871147033,
|
12243 |
-
"learning_rate": 1.5462334470453e-05,
|
12244 |
-
"loss": 0.806,
|
12245 |
-
"step": 1748
|
12246 |
-
},
|
12247 |
-
{
|
12248 |
-
"epoch": 0.34,
|
12249 |
-
"grad_norm": 1.749003030553493,
|
12250 |
-
"learning_rate": 1.5457109065213357e-05,
|
12251 |
-
"loss": 0.8445,
|
12252 |
-
"step": 1749
|
12253 |
-
},
|
12254 |
-
{
|
12255 |
-
"epoch": 0.34,
|
12256 |
-
"grad_norm": 1.6613913869959902,
|
12257 |
-
"learning_rate": 1.5451881537127846e-05,
|
12258 |
-
"loss": 0.8682,
|
12259 |
-
"step": 1750
|
12260 |
-
},
|
12261 |
-
{
|
12262 |
-
"epoch": 0.34,
|
12263 |
-
"grad_norm": 1.8209928107994644,
|
12264 |
-
"learning_rate": 1.5446651888230002e-05,
|
12265 |
-
"loss": 1.0161,
|
12266 |
-
"step": 1751
|
12267 |
-
},
|
12268 |
-
{
|
12269 |
-
"epoch": 0.34,
|
12270 |
-
"grad_norm": 1.7742834895232118,
|
12271 |
-
"learning_rate": 1.5441420120554183e-05,
|
12272 |
-
"loss": 0.9762,
|
12273 |
-
"step": 1752
|
12274 |
-
},
|
12275 |
-
{
|
12276 |
-
"epoch": 0.34,
|
12277 |
-
"grad_norm": 1.4903916353227185,
|
12278 |
-
"learning_rate": 1.5436186236135586e-05,
|
12279 |
-
"loss": 0.8358,
|
12280 |
-
"step": 1753
|
12281 |
-
},
|
12282 |
-
{
|
12283 |
-
"epoch": 0.34,
|
12284 |
-
"grad_norm": 1.6523896850698925,
|
12285 |
-
"learning_rate": 1.5430950237010215e-05,
|
12286 |
-
"loss": 0.918,
|
12287 |
-
"step": 1754
|
12288 |
-
},
|
12289 |
-
{
|
12290 |
-
"epoch": 0.34,
|
12291 |
-
"grad_norm": 1.827097756747899,
|
12292 |
-
"learning_rate": 1.5425712125214905e-05,
|
12293 |
-
"loss": 0.9376,
|
12294 |
-
"step": 1755
|
12295 |
-
},
|
12296 |
-
{
|
12297 |
-
"epoch": 0.34,
|
12298 |
-
"grad_norm": 1.6366236729315444,
|
12299 |
-
"learning_rate": 1.542047190278731e-05,
|
12300 |
-
"loss": 1.0006,
|
12301 |
-
"step": 1756
|
12302 |
-
},
|
12303 |
-
{
|
12304 |
-
"epoch": 0.34,
|
12305 |
-
"grad_norm": 1.0524696605763608,
|
12306 |
-
"learning_rate": 1.54152295717659e-05,
|
12307 |
-
"loss": 0.8466,
|
12308 |
-
"step": 1757
|
12309 |
-
},
|
12310 |
-
{
|
12311 |
-
"epoch": 0.34,
|
12312 |
-
"grad_norm": 1.592256955879349,
|
12313 |
-
"learning_rate": 1.5409985134189984e-05,
|
12314 |
-
"loss": 0.8885,
|
12315 |
-
"step": 1758
|
12316 |
-
},
|
12317 |
-
{
|
12318 |
-
"epoch": 0.34,
|
12319 |
-
"grad_norm": 1.5958006272826617,
|
12320 |
-
"learning_rate": 1.5404738592099665e-05,
|
12321 |
-
"loss": 0.914,
|
12322 |
-
"step": 1759
|
12323 |
-
},
|
12324 |
-
{
|
12325 |
-
"epoch": 0.34,
|
12326 |
-
"grad_norm": 1.7383735027873934,
|
12327 |
-
"learning_rate": 1.5399489947535884e-05,
|
12328 |
-
"loss": 0.9507,
|
12329 |
-
"step": 1760
|
12330 |
-
},
|
12331 |
-
{
|
12332 |
-
"epoch": 0.34,
|
12333 |
-
"grad_norm": 1.7829696954430776,
|
12334 |
-
"learning_rate": 1.539423920254039e-05,
|
12335 |
-
"loss": 0.9411,
|
12336 |
-
"step": 1761
|
12337 |
-
},
|
12338 |
-
{
|
12339 |
-
"epoch": 0.34,
|
12340 |
-
"grad_norm": 1.591157229474544,
|
12341 |
-
"learning_rate": 1.538898635915576e-05,
|
12342 |
-
"loss": 0.9009,
|
12343 |
-
"step": 1762
|
12344 |
-
},
|
12345 |
-
{
|
12346 |
-
"epoch": 0.34,
|
12347 |
-
"grad_norm": 1.8961964721472366,
|
12348 |
-
"learning_rate": 1.5383731419425363e-05,
|
12349 |
-
"loss": 0.987,
|
12350 |
-
"step": 1763
|
12351 |
-
},
|
12352 |
-
{
|
12353 |
-
"epoch": 0.34,
|
12354 |
-
"grad_norm": 1.7139184167666317,
|
12355 |
-
"learning_rate": 1.5378474385393412e-05,
|
12356 |
-
"loss": 0.9263,
|
12357 |
-
"step": 1764
|
12358 |
-
},
|
12359 |
-
{
|
12360 |
-
"epoch": 0.34,
|
12361 |
-
"grad_norm": 1.5793588095105051,
|
12362 |
-
"learning_rate": 1.537321525910492e-05,
|
12363 |
-
"loss": 0.8779,
|
12364 |
-
"step": 1765
|
12365 |
-
},
|
12366 |
-
{
|
12367 |
-
"epoch": 0.34,
|
12368 |
-
"grad_norm": 1.806016217319507,
|
12369 |
-
"learning_rate": 1.536795404260572e-05,
|
12370 |
-
"loss": 0.9229,
|
12371 |
-
"step": 1766
|
12372 |
-
},
|
12373 |
-
{
|
12374 |
-
"epoch": 0.34,
|
12375 |
-
"grad_norm": 1.756089856756367,
|
12376 |
-
"learning_rate": 1.5362690737942446e-05,
|
12377 |
-
"loss": 0.8605,
|
12378 |
-
"step": 1767
|
12379 |
-
},
|
12380 |
-
{
|
12381 |
-
"epoch": 0.34,
|
12382 |
-
"grad_norm": 1.8572118885860662,
|
12383 |
-
"learning_rate": 1.5357425347162564e-05,
|
12384 |
-
"loss": 0.9234,
|
12385 |
-
"step": 1768
|
12386 |
-
},
|
12387 |
-
{
|
12388 |
-
"epoch": 0.34,
|
12389 |
-
"grad_norm": 1.7856241806716384,
|
12390 |
-
"learning_rate": 1.5352157872314327e-05,
|
12391 |
-
"loss": 1.0146,
|
12392 |
-
"step": 1769
|
12393 |
-
},
|
12394 |
-
{
|
12395 |
-
"epoch": 0.34,
|
12396 |
-
"grad_norm": 1.610512365395507,
|
12397 |
-
"learning_rate": 1.5346888315446824e-05,
|
12398 |
-
"loss": 0.873,
|
12399 |
-
"step": 1770
|
12400 |
-
},
|
12401 |
-
{
|
12402 |
-
"epoch": 0.34,
|
12403 |
-
"grad_norm": 1.5782211017109056,
|
12404 |
-
"learning_rate": 1.5341616678609935e-05,
|
12405 |
-
"loss": 0.8567,
|
12406 |
-
"step": 1771
|
12407 |
-
},
|
12408 |
-
{
|
12409 |
-
"epoch": 0.34,
|
12410 |
-
"grad_norm": 1.654570207794557,
|
12411 |
-
"learning_rate": 1.533634296385436e-05,
|
12412 |
-
"loss": 0.8263,
|
12413 |
-
"step": 1772
|
12414 |
-
},
|
12415 |
-
{
|
12416 |
-
"epoch": 0.34,
|
12417 |
-
"grad_norm": 1.393170469294069,
|
12418 |
-
"learning_rate": 1.5331067173231603e-05,
|
12419 |
-
"loss": 0.882,
|
12420 |
-
"step": 1773
|
12421 |
-
},
|
12422 |
-
{
|
12423 |
-
"epoch": 0.34,
|
12424 |
-
"grad_norm": 1.5878170235295708,
|
12425 |
-
"learning_rate": 1.5325789308793974e-05,
|
12426 |
-
"loss": 0.9394,
|
12427 |
-
"step": 1774
|
12428 |
-
},
|
12429 |
-
{
|
12430 |
-
"epoch": 0.34,
|
12431 |
-
"grad_norm": 1.7372761113657915,
|
12432 |
-
"learning_rate": 1.5320509372594596e-05,
|
12433 |
-
"loss": 0.9523,
|
12434 |
-
"step": 1775
|
12435 |
-
},
|
12436 |
-
{
|
12437 |
-
"epoch": 0.34,
|
12438 |
-
"grad_norm": 1.7300471659324526,
|
12439 |
-
"learning_rate": 1.5315227366687386e-05,
|
12440 |
-
"loss": 1.0319,
|
12441 |
-
"step": 1776
|
12442 |
-
},
|
12443 |
-
{
|
12444 |
-
"epoch": 0.34,
|
12445 |
-
"grad_norm": 1.9409527939786382,
|
12446 |
-
"learning_rate": 1.5309943293127076e-05,
|
12447 |
-
"loss": 1.0208,
|
12448 |
-
"step": 1777
|
12449 |
-
},
|
12450 |
-
{
|
12451 |
-
"epoch": 0.34,
|
12452 |
-
"grad_norm": 1.5078314012936311,
|
12453 |
-
"learning_rate": 1.5304657153969206e-05,
|
12454 |
-
"loss": 0.9007,
|
12455 |
-
"step": 1778
|
12456 |
-
},
|
12457 |
-
{
|
12458 |
-
"epoch": 0.34,
|
12459 |
-
"grad_norm": 1.6940177485783197,
|
12460 |
-
"learning_rate": 1.529936895127011e-05,
|
12461 |
-
"loss": 0.9291,
|
12462 |
-
"step": 1779
|
12463 |
-
},
|
12464 |
-
{
|
12465 |
-
"epoch": 0.34,
|
12466 |
-
"grad_norm": 1.6538468073075518,
|
12467 |
-
"learning_rate": 1.5294078687086926e-05,
|
12468 |
-
"loss": 0.8828,
|
12469 |
-
"step": 1780
|
12470 |
-
},
|
12471 |
-
{
|
12472 |
-
"epoch": 0.34,
|
12473 |
-
"grad_norm": 1.6245630899167927,
|
12474 |
-
"learning_rate": 1.5288786363477603e-05,
|
12475 |
-
"loss": 0.9154,
|
12476 |
-
"step": 1781
|
12477 |
-
},
|
12478 |
-
{
|
12479 |
-
"epoch": 0.34,
|
12480 |
-
"grad_norm": 1.644068790077352,
|
12481 |
-
"learning_rate": 1.5283491982500874e-05,
|
12482 |
-
"loss": 0.9155,
|
12483 |
-
"step": 1782
|
12484 |
-
},
|
12485 |
-
{
|
12486 |
-
"epoch": 0.34,
|
12487 |
-
"grad_norm": 1.7621859790247505,
|
12488 |
-
"learning_rate": 1.5278195546216293e-05,
|
12489 |
-
"loss": 0.9127,
|
12490 |
-
"step": 1783
|
12491 |
-
},
|
12492 |
-
{
|
12493 |
-
"epoch": 0.34,
|
12494 |
-
"grad_norm": 1.6412691064740812,
|
12495 |
-
"learning_rate": 1.5272897056684196e-05,
|
12496 |
-
"loss": 0.9642,
|
12497 |
-
"step": 1784
|
12498 |
-
},
|
12499 |
-
{
|
12500 |
-
"epoch": 0.34,
|
12501 |
-
"grad_norm": 1.7284551662658276,
|
12502 |
-
"learning_rate": 1.526759651596573e-05,
|
12503 |
-
"loss": 0.9682,
|
12504 |
-
"step": 1785
|
12505 |
-
},
|
12506 |
-
{
|
12507 |
-
"epoch": 0.34,
|
12508 |
-
"grad_norm": 1.7534777399859998,
|
12509 |
-
"learning_rate": 1.526229392612283e-05,
|
12510 |
-
"loss": 0.9304,
|
12511 |
-
"step": 1786
|
12512 |
-
},
|
12513 |
-
{
|
12514 |
-
"epoch": 0.34,
|
12515 |
-
"grad_norm": 1.610391858816259,
|
12516 |
-
"learning_rate": 1.5256989289218237e-05,
|
12517 |
-
"loss": 0.9148,
|
12518 |
-
"step": 1787
|
12519 |
-
},
|
12520 |
-
{
|
12521 |
-
"epoch": 0.34,
|
12522 |
-
"grad_norm": 1.8330198235703115,
|
12523 |
-
"learning_rate": 1.5251682607315485e-05,
|
12524 |
-
"loss": 0.9917,
|
12525 |
-
"step": 1788
|
12526 |
-
},
|
12527 |
-
{
|
12528 |
-
"epoch": 0.34,
|
12529 |
-
"grad_norm": 1.6130949199706817,
|
12530 |
-
"learning_rate": 1.5246373882478899e-05,
|
12531 |
-
"loss": 0.8627,
|
12532 |
-
"step": 1789
|
12533 |
-
},
|
12534 |
-
{
|
12535 |
-
"epoch": 0.34,
|
12536 |
-
"grad_norm": 1.6917099405793583,
|
12537 |
-
"learning_rate": 1.5241063116773606e-05,
|
12538 |
-
"loss": 0.8093,
|
12539 |
-
"step": 1790
|
12540 |
-
},
|
12541 |
-
{
|
12542 |
-
"epoch": 0.34,
|
12543 |
-
"grad_norm": 1.611514855787027,
|
12544 |
-
"learning_rate": 1.5235750312265522e-05,
|
12545 |
-
"loss": 0.8823,
|
12546 |
-
"step": 1791
|
12547 |
-
},
|
12548 |
-
{
|
12549 |
-
"epoch": 0.35,
|
12550 |
-
"grad_norm": 1.508488536223312,
|
12551 |
-
"learning_rate": 1.5230435471021356e-05,
|
12552 |
-
"loss": 0.8525,
|
12553 |
-
"step": 1792
|
12554 |
-
},
|
12555 |
-
{
|
12556 |
-
"epoch": 0.35,
|
12557 |
-
"grad_norm": 1.8488504173801166,
|
12558 |
-
"learning_rate": 1.5225118595108615e-05,
|
12559 |
-
"loss": 0.9723,
|
12560 |
-
"step": 1793
|
12561 |
-
},
|
12562 |
-
{
|
12563 |
-
"epoch": 0.35,
|
12564 |
-
"grad_norm": 1.81787454238434,
|
12565 |
-
"learning_rate": 1.5219799686595588e-05,
|
12566 |
-
"loss": 0.9186,
|
12567 |
-
"step": 1794
|
12568 |
-
},
|
12569 |
-
{
|
12570 |
-
"epoch": 0.35,
|
12571 |
-
"grad_norm": 1.835940653590753,
|
12572 |
-
"learning_rate": 1.5214478747551367e-05,
|
12573 |
-
"loss": 1.0315,
|
12574 |
-
"step": 1795
|
12575 |
-
},
|
12576 |
-
{
|
12577 |
-
"epoch": 0.35,
|
12578 |
-
"grad_norm": 1.7183650877031766,
|
12579 |
-
"learning_rate": 1.520915578004582e-05,
|
12580 |
-
"loss": 0.9409,
|
12581 |
-
"step": 1796
|
12582 |
-
},
|
12583 |
-
{
|
12584 |
-
"epoch": 0.35,
|
12585 |
-
"grad_norm": 1.6105105683049763,
|
12586 |
-
"learning_rate": 1.5203830786149615e-05,
|
12587 |
-
"loss": 0.9783,
|
12588 |
-
"step": 1797
|
12589 |
-
},
|
12590 |
-
{
|
12591 |
-
"epoch": 0.35,
|
12592 |
-
"grad_norm": 1.6828073709950533,
|
12593 |
-
"learning_rate": 1.51985037679342e-05,
|
12594 |
-
"loss": 0.95,
|
12595 |
-
"step": 1798
|
12596 |
-
},
|
12597 |
-
{
|
12598 |
-
"epoch": 0.35,
|
12599 |
-
"grad_norm": 1.7659646765890635,
|
12600 |
-
"learning_rate": 1.5193174727471822e-05,
|
12601 |
-
"loss": 0.9881,
|
12602 |
-
"step": 1799
|
12603 |
-
},
|
12604 |
-
{
|
12605 |
-
"epoch": 0.35,
|
12606 |
-
"grad_norm": 1.7186747858397031,
|
12607 |
-
"learning_rate": 1.5187843666835502e-05,
|
12608 |
-
"loss": 0.9142,
|
12609 |
-
"step": 1800
|
12610 |
-
},
|
12611 |
-
{
|
12612 |
-
"epoch": 0.35,
|
12613 |
-
"grad_norm": 1.5025090660678702,
|
12614 |
-
"learning_rate": 1.5182510588099058e-05,
|
12615 |
-
"loss": 0.8743,
|
12616 |
-
"step": 1801
|
12617 |
-
},
|
12618 |
-
{
|
12619 |
-
"epoch": 0.35,
|
12620 |
-
"grad_norm": 1.7934959998223838,
|
12621 |
-
"learning_rate": 1.5177175493337077e-05,
|
12622 |
-
"loss": 0.9239,
|
12623 |
-
"step": 1802
|
12624 |
-
},
|
12625 |
-
{
|
12626 |
-
"epoch": 0.35,
|
12627 |
-
"grad_norm": 1.5578282419512874,
|
12628 |
-
"learning_rate": 1.5171838384624952e-05,
|
12629 |
-
"loss": 0.869,
|
12630 |
-
"step": 1803
|
12631 |
-
},
|
12632 |
-
{
|
12633 |
-
"epoch": 0.35,
|
12634 |
-
"grad_norm": 1.7377002208663477,
|
12635 |
-
"learning_rate": 1.516649926403884e-05,
|
12636 |
-
"loss": 0.968,
|
12637 |
-
"step": 1804
|
12638 |
-
},
|
12639 |
-
{
|
12640 |
-
"epoch": 0.35,
|
12641 |
-
"grad_norm": 1.775042883902276,
|
12642 |
-
"learning_rate": 1.5161158133655694e-05,
|
12643 |
-
"loss": 0.9978,
|
12644 |
-
"step": 1805
|
12645 |
-
},
|
12646 |
-
{
|
12647 |
-
"epoch": 0.35,
|
12648 |
-
"grad_norm": 1.5911271575560177,
|
12649 |
-
"learning_rate": 1.5155814995553239e-05,
|
12650 |
-
"loss": 0.8461,
|
12651 |
-
"step": 1806
|
12652 |
-
},
|
12653 |
-
{
|
12654 |
-
"epoch": 0.35,
|
12655 |
-
"grad_norm": 1.9003801818751707,
|
12656 |
-
"learning_rate": 1.515046985180999e-05,
|
12657 |
-
"loss": 0.9565,
|
12658 |
-
"step": 1807
|
12659 |
-
},
|
12660 |
-
{
|
12661 |
-
"epoch": 0.35,
|
12662 |
-
"grad_norm": 1.7235091223002925,
|
12663 |
-
"learning_rate": 1.5145122704505238e-05,
|
12664 |
-
"loss": 0.9244,
|
12665 |
-
"step": 1808
|
12666 |
-
},
|
12667 |
-
{
|
12668 |
-
"epoch": 0.35,
|
12669 |
-
"grad_norm": 1.5836993320305877,
|
12670 |
-
"learning_rate": 1.5139773555719046e-05,
|
12671 |
-
"loss": 0.9256,
|
12672 |
-
"step": 1809
|
12673 |
-
},
|
12674 |
-
{
|
12675 |
-
"epoch": 0.35,
|
12676 |
-
"grad_norm": 1.7698468539495165,
|
12677 |
-
"learning_rate": 1.5134422407532268e-05,
|
12678 |
-
"loss": 0.9067,
|
12679 |
-
"step": 1810
|
12680 |
-
},
|
12681 |
-
{
|
12682 |
-
"epoch": 0.35,
|
12683 |
-
"grad_norm": 1.6257859529986807,
|
12684 |
-
"learning_rate": 1.512906926202653e-05,
|
12685 |
-
"loss": 0.879,
|
12686 |
-
"step": 1811
|
12687 |
-
},
|
12688 |
-
{
|
12689 |
-
"epoch": 0.35,
|
12690 |
-
"grad_norm": 1.6883738442738565,
|
12691 |
-
"learning_rate": 1.512371412128424e-05,
|
12692 |
-
"loss": 0.9165,
|
12693 |
-
"step": 1812
|
12694 |
-
},
|
12695 |
-
{
|
12696 |
-
"epoch": 0.35,
|
12697 |
-
"grad_norm": 1.641267657042082,
|
12698 |
-
"learning_rate": 1.5118356987388567e-05,
|
12699 |
-
"loss": 0.9208,
|
12700 |
-
"step": 1813
|
12701 |
-
},
|
12702 |
-
{
|
12703 |
-
"epoch": 0.35,
|
12704 |
-
"grad_norm": 1.5234773704787912,
|
12705 |
-
"learning_rate": 1.5112997862423472e-05,
|
12706 |
-
"loss": 0.939,
|
12707 |
-
"step": 1814
|
12708 |
-
},
|
12709 |
-
{
|
12710 |
-
"epoch": 0.35,
|
12711 |
-
"grad_norm": 1.699284171054198,
|
12712 |
-
"learning_rate": 1.5107636748473687e-05,
|
12713 |
-
"loss": 0.927,
|
12714 |
-
"step": 1815
|
12715 |
-
},
|
12716 |
-
{
|
12717 |
-
"epoch": 0.35,
|
12718 |
-
"grad_norm": 1.7766671571230526,
|
12719 |
-
"learning_rate": 1.5102273647624714e-05,
|
12720 |
-
"loss": 0.9559,
|
12721 |
-
"step": 1816
|
12722 |
-
},
|
12723 |
-
{
|
12724 |
-
"epoch": 0.35,
|
12725 |
-
"grad_norm": 1.8071128001622048,
|
12726 |
-
"learning_rate": 1.5096908561962824e-05,
|
12727 |
-
"loss": 0.9329,
|
12728 |
-
"step": 1817
|
12729 |
-
},
|
12730 |
-
{
|
12731 |
-
"epoch": 0.35,
|
12732 |
-
"grad_norm": 1.5602195899860873,
|
12733 |
-
"learning_rate": 1.509154149357507e-05,
|
12734 |
-
"loss": 0.8408,
|
12735 |
-
"step": 1818
|
12736 |
-
},
|
12737 |
-
{
|
12738 |
-
"epoch": 0.35,
|
12739 |
-
"grad_norm": 1.6681745264243264,
|
12740 |
-
"learning_rate": 1.5086172444549274e-05,
|
12741 |
-
"loss": 0.9715,
|
12742 |
-
"step": 1819
|
12743 |
-
},
|
12744 |
-
{
|
12745 |
-
"epoch": 0.35,
|
12746 |
-
"grad_norm": 1.8299212257238782,
|
12747 |
-
"learning_rate": 1.508080141697402e-05,
|
12748 |
-
"loss": 0.9271,
|
12749 |
-
"step": 1820
|
12750 |
-
},
|
12751 |
-
{
|
12752 |
-
"epoch": 0.35,
|
12753 |
-
"grad_norm": 1.6199015194976085,
|
12754 |
-
"learning_rate": 1.5075428412938672e-05,
|
12755 |
-
"loss": 0.8942,
|
12756 |
-
"step": 1821
|
12757 |
-
},
|
12758 |
-
{
|
12759 |
-
"epoch": 0.35,
|
12760 |
-
"grad_norm": 1.637504821627329,
|
12761 |
-
"learning_rate": 1.5070053434533351e-05,
|
12762 |
-
"loss": 0.9063,
|
12763 |
-
"step": 1822
|
12764 |
-
},
|
12765 |
-
{
|
12766 |
-
"epoch": 0.35,
|
12767 |
-
"grad_norm": 1.676609757359368,
|
12768 |
-
"learning_rate": 1.5064676483848968e-05,
|
12769 |
-
"loss": 0.8435,
|
12770 |
-
"step": 1823
|
12771 |
-
},
|
12772 |
-
{
|
12773 |
-
"epoch": 0.35,
|
12774 |
-
"grad_norm": 1.759464544373338,
|
12775 |
-
"learning_rate": 1.5059297562977174e-05,
|
12776 |
-
"loss": 0.9497,
|
12777 |
-
"step": 1824
|
12778 |
-
},
|
12779 |
-
{
|
12780 |
-
"epoch": 0.35,
|
12781 |
-
"grad_norm": 1.8044880904814558,
|
12782 |
-
"learning_rate": 1.5053916674010403e-05,
|
12783 |
-
"loss": 0.921,
|
12784 |
-
"step": 1825
|
12785 |
-
},
|
12786 |
-
{
|
12787 |
-
"epoch": 0.35,
|
12788 |
-
"grad_norm": 1.6629588750577138,
|
12789 |
-
"learning_rate": 1.5048533819041853e-05,
|
12790 |
-
"loss": 0.9421,
|
12791 |
-
"step": 1826
|
12792 |
-
},
|
12793 |
-
{
|
12794 |
-
"epoch": 0.35,
|
12795 |
-
"grad_norm": 2.0351132912201115,
|
12796 |
-
"learning_rate": 1.5043149000165482e-05,
|
12797 |
-
"loss": 0.9275,
|
12798 |
-
"step": 1827
|
12799 |
-
},
|
12800 |
-
{
|
12801 |
-
"epoch": 0.35,
|
12802 |
-
"grad_norm": 1.7221826608550477,
|
12803 |
-
"learning_rate": 1.5037762219476016e-05,
|
12804 |
-
"loss": 0.9359,
|
12805 |
-
"step": 1828
|
12806 |
-
},
|
12807 |
-
{
|
12808 |
-
"epoch": 0.35,
|
12809 |
-
"grad_norm": 1.8863983035535,
|
12810 |
-
"learning_rate": 1.503237347906894e-05,
|
12811 |
-
"loss": 0.9219,
|
12812 |
-
"step": 1829
|
12813 |
-
},
|
12814 |
-
{
|
12815 |
-
"epoch": 0.35,
|
12816 |
-
"grad_norm": 1.7877241947739209,
|
12817 |
-
"learning_rate": 1.5026982781040511e-05,
|
12818 |
-
"loss": 0.9239,
|
12819 |
-
"step": 1830
|
12820 |
-
},
|
12821 |
-
{
|
12822 |
-
"epoch": 0.35,
|
12823 |
-
"grad_norm": 1.7559006781624251,
|
12824 |
-
"learning_rate": 1.5021590127487731e-05,
|
12825 |
-
"loss": 0.9135,
|
12826 |
-
"step": 1831
|
12827 |
-
},
|
12828 |
-
{
|
12829 |
-
"epoch": 0.35,
|
12830 |
-
"grad_norm": 1.7628850458243945,
|
12831 |
-
"learning_rate": 1.5016195520508383e-05,
|
12832 |
-
"loss": 1.0021,
|
12833 |
-
"step": 1832
|
12834 |
-
},
|
12835 |
-
{
|
12836 |
-
"epoch": 0.35,
|
12837 |
-
"grad_norm": 1.712262152372984,
|
12838 |
-
"learning_rate": 1.501079896220099e-05,
|
12839 |
-
"loss": 0.9462,
|
12840 |
-
"step": 1833
|
12841 |
-
},
|
12842 |
-
{
|
12843 |
-
"epoch": 0.35,
|
12844 |
-
"grad_norm": 1.5312995616872846,
|
12845 |
-
"learning_rate": 1.5005400454664847e-05,
|
12846 |
-
"loss": 0.8234,
|
12847 |
-
"step": 1834
|
12848 |
-
},
|
12849 |
-
{
|
12850 |
-
"epoch": 0.35,
|
12851 |
-
"grad_norm": 1.4310758854139343,
|
12852 |
-
"learning_rate": 1.5000000000000002e-05,
|
12853 |
-
"loss": 0.8858,
|
12854 |
-
"step": 1835
|
12855 |
-
},
|
12856 |
-
{
|
12857 |
-
"epoch": 0.35,
|
12858 |
-
"grad_norm": 1.546597318526974,
|
12859 |
-
"learning_rate": 1.4994597600307263e-05,
|
12860 |
-
"loss": 0.9568,
|
12861 |
-
"step": 1836
|
12862 |
-
},
|
12863 |
-
{
|
12864 |
-
"epoch": 0.35,
|
12865 |
-
"grad_norm": 1.582830465670841,
|
12866 |
-
"learning_rate": 1.4989193257688195e-05,
|
12867 |
-
"loss": 0.9045,
|
12868 |
-
"step": 1837
|
12869 |
-
},
|
12870 |
-
{
|
12871 |
-
"epoch": 0.35,
|
12872 |
-
"grad_norm": 1.6151541196243508,
|
12873 |
-
"learning_rate": 1.4983786974245118e-05,
|
12874 |
-
"loss": 0.9019,
|
12875 |
-
"step": 1838
|
12876 |
-
},
|
12877 |
-
{
|
12878 |
-
"epoch": 0.35,
|
12879 |
-
"grad_norm": 1.6357268555554538,
|
12880 |
-
"learning_rate": 1.4978378752081105e-05,
|
12881 |
-
"loss": 0.9621,
|
12882 |
-
"step": 1839
|
12883 |
-
},
|
12884 |
-
{
|
12885 |
-
"epoch": 0.35,
|
12886 |
-
"grad_norm": 1.7828133609360786,
|
12887 |
-
"learning_rate": 1.497296859329998e-05,
|
12888 |
-
"loss": 0.8937,
|
12889 |
-
"step": 1840
|
12890 |
-
},
|
12891 |
-
{
|
12892 |
-
"epoch": 0.35,
|
12893 |
-
"grad_norm": 1.6209326547083096,
|
12894 |
-
"learning_rate": 1.4967556500006336e-05,
|
12895 |
-
"loss": 0.9472,
|
12896 |
-
"step": 1841
|
12897 |
-
},
|
12898 |
-
{
|
12899 |
-
"epoch": 0.35,
|
12900 |
-
"grad_norm": 1.6715487795705135,
|
12901 |
-
"learning_rate": 1.4962142474305496e-05,
|
12902 |
-
"loss": 0.8342,
|
12903 |
-
"step": 1842
|
12904 |
-
},
|
12905 |
-
{
|
12906 |
-
"epoch": 0.35,
|
12907 |
-
"grad_norm": 1.644054514212963,
|
12908 |
-
"learning_rate": 1.4956726518303553e-05,
|
12909 |
-
"loss": 0.8793,
|
12910 |
-
"step": 1843
|
12911 |
-
},
|
12912 |
-
{
|
12913 |
-
"epoch": 0.36,
|
12914 |
-
"grad_norm": 1.6778795735018404,
|
12915 |
-
"learning_rate": 1.495130863410734e-05,
|
12916 |
-
"loss": 0.9961,
|
12917 |
-
"step": 1844
|
12918 |
-
},
|
12919 |
-
{
|
12920 |
-
"epoch": 0.36,
|
12921 |
-
"grad_norm": 1.7530107949989808,
|
12922 |
-
"learning_rate": 1.4945888823824449e-05,
|
12923 |
-
"loss": 0.8523,
|
12924 |
-
"step": 1845
|
12925 |
-
},
|
12926 |
-
{
|
12927 |
-
"epoch": 0.36,
|
12928 |
-
"grad_norm": 1.600308338620861,
|
12929 |
-
"learning_rate": 1.494046708956321e-05,
|
12930 |
-
"loss": 0.8763,
|
12931 |
-
"step": 1846
|
12932 |
-
},
|
12933 |
-
{
|
12934 |
-
"epoch": 0.36,
|
12935 |
-
"grad_norm": 1.7748182051211745,
|
12936 |
-
"learning_rate": 1.4935043433432715e-05,
|
12937 |
-
"loss": 0.9168,
|
12938 |
-
"step": 1847
|
12939 |
-
},
|
12940 |
-
{
|
12941 |
-
"epoch": 0.36,
|
12942 |
-
"grad_norm": 1.6428997560126952,
|
12943 |
-
"learning_rate": 1.4929617857542793e-05,
|
12944 |
-
"loss": 0.8927,
|
12945 |
-
"step": 1848
|
12946 |
-
},
|
12947 |
-
{
|
12948 |
-
"epoch": 0.36,
|
12949 |
-
"grad_norm": 1.7377574670461893,
|
12950 |
-
"learning_rate": 1.4924190364004023e-05,
|
12951 |
-
"loss": 0.869,
|
12952 |
-
"step": 1849
|
12953 |
-
},
|
12954 |
-
{
|
12955 |
-
"epoch": 0.36,
|
12956 |
-
"grad_norm": 1.7021319792833238,
|
12957 |
-
"learning_rate": 1.4918760954927729e-05,
|
12958 |
-
"loss": 0.9108,
|
12959 |
-
"step": 1850
|
12960 |
-
},
|
12961 |
-
{
|
12962 |
-
"epoch": 0.36,
|
12963 |
-
"grad_norm": 1.591116905628271,
|
12964 |
-
"learning_rate": 1.4913329632425984e-05,
|
12965 |
-
"loss": 0.949,
|
12966 |
-
"step": 1851
|
12967 |
-
},
|
12968 |
-
{
|
12969 |
-
"epoch": 0.36,
|
12970 |
-
"grad_norm": 1.6276173765055855,
|
12971 |
-
"learning_rate": 1.4907896398611603e-05,
|
12972 |
-
"loss": 0.89,
|
12973 |
-
"step": 1852
|
12974 |
-
},
|
12975 |
-
{
|
12976 |
-
"epoch": 0.36,
|
12977 |
-
"grad_norm": 1.8225115626463677,
|
12978 |
-
"learning_rate": 1.4902461255598141e-05,
|
12979 |
-
"loss": 0.9123,
|
12980 |
-
"step": 1853
|
12981 |
-
},
|
12982 |
-
{
|
12983 |
-
"epoch": 0.36,
|
12984 |
-
"grad_norm": 1.5498562183799023,
|
12985 |
-
"learning_rate": 1.4897024205499902e-05,
|
12986 |
-
"loss": 0.8801,
|
12987 |
-
"step": 1854
|
12988 |
-
},
|
12989 |
-
{
|
12990 |
-
"epoch": 0.36,
|
12991 |
-
"grad_norm": 1.7255906804323513,
|
12992 |
-
"learning_rate": 1.4891585250431929e-05,
|
12993 |
-
"loss": 0.88,
|
12994 |
-
"step": 1855
|
12995 |
-
},
|
12996 |
-
{
|
12997 |
-
"epoch": 0.36,
|
12998 |
-
"grad_norm": 1.7392433916238563,
|
12999 |
-
"learning_rate": 1.4886144392510004e-05,
|
13000 |
-
"loss": 0.9908,
|
13001 |
-
"step": 1856
|
13002 |
-
},
|
13003 |
-
{
|
13004 |
-
"epoch": 0.36,
|
13005 |
-
"grad_norm": 1.819892755574117,
|
13006 |
-
"learning_rate": 1.4880701633850652e-05,
|
13007 |
-
"loss": 0.9699,
|
13008 |
-
"step": 1857
|
13009 |
-
},
|
13010 |
-
{
|
13011 |
-
"epoch": 0.36,
|
13012 |
-
"grad_norm": 1.6453268799227307,
|
13013 |
-
"learning_rate": 1.4875256976571135e-05,
|
13014 |
-
"loss": 0.9985,
|
13015 |
-
"step": 1858
|
13016 |
-
},
|
13017 |
-
{
|
13018 |
-
"epoch": 0.36,
|
13019 |
-
"grad_norm": 1.7570555784318829,
|
13020 |
-
"learning_rate": 1.4869810422789459e-05,
|
13021 |
-
"loss": 0.9425,
|
13022 |
-
"step": 1859
|
13023 |
-
},
|
13024 |
-
{
|
13025 |
-
"epoch": 0.36,
|
13026 |
-
"grad_norm": 1.6103885798432909,
|
13027 |
-
"learning_rate": 1.4864361974624357e-05,
|
13028 |
-
"loss": 0.9111,
|
13029 |
-
"step": 1860
|
13030 |
-
},
|
13031 |
-
{
|
13032 |
-
"epoch": 0.36,
|
13033 |
-
"grad_norm": 1.6392043539397223,
|
13034 |
-
"learning_rate": 1.4858911634195312e-05,
|
13035 |
-
"loss": 0.9116,
|
13036 |
-
"step": 1861
|
13037 |
-
},
|
13038 |
-
{
|
13039 |
-
"epoch": 0.36,
|
13040 |
-
"grad_norm": 1.6241801480289666,
|
13041 |
-
"learning_rate": 1.4853459403622535e-05,
|
13042 |
-
"loss": 0.9424,
|
13043 |
-
"step": 1862
|
13044 |
-
},
|
13045 |
-
{
|
13046 |
-
"epoch": 0.36,
|
13047 |
-
"grad_norm": 1.634986046998513,
|
13048 |
-
"learning_rate": 1.4848005285026976e-05,
|
13049 |
-
"loss": 0.9494,
|
13050 |
-
"step": 1863
|
13051 |
-
},
|
13052 |
-
{
|
13053 |
-
"epoch": 0.36,
|
13054 |
-
"grad_norm": 1.5937895894975387,
|
13055 |
-
"learning_rate": 1.4842549280530312e-05,
|
13056 |
-
"loss": 0.877,
|
13057 |
-
"step": 1864
|
13058 |
-
},
|
13059 |
-
{
|
13060 |
-
"epoch": 0.36,
|
13061 |
-
"grad_norm": 1.572921397651084,
|
13062 |
-
"learning_rate": 1.4837091392254964e-05,
|
13063 |
-
"loss": 0.8937,
|
13064 |
-
"step": 1865
|
13065 |
-
},
|
13066 |
-
{
|
13067 |
-
"epoch": 0.36,
|
13068 |
-
"grad_norm": 1.6988453094864369,
|
13069 |
-
"learning_rate": 1.4831631622324078e-05,
|
13070 |
-
"loss": 0.8695,
|
13071 |
-
"step": 1866
|
13072 |
-
},
|
13073 |
-
{
|
13074 |
-
"epoch": 0.36,
|
13075 |
-
"grad_norm": 1.5134644774277317,
|
13076 |
-
"learning_rate": 1.4826169972861539e-05,
|
13077 |
-
"loss": 0.8927,
|
13078 |
-
"step": 1867
|
13079 |
-
},
|
13080 |
-
{
|
13081 |
-
"epoch": 0.36,
|
13082 |
-
"grad_norm": 1.627315805696269,
|
13083 |
-
"learning_rate": 1.4820706445991954e-05,
|
13084 |
-
"loss": 0.9352,
|
13085 |
-
"step": 1868
|
13086 |
-
},
|
13087 |
-
{
|
13088 |
-
"epoch": 0.36,
|
13089 |
-
"grad_norm": 1.6393371999448805,
|
13090 |
-
"learning_rate": 1.4815241043840669e-05,
|
13091 |
-
"loss": 0.8528,
|
13092 |
-
"step": 1869
|
13093 |
-
},
|
13094 |
-
{
|
13095 |
-
"epoch": 0.36,
|
13096 |
-
"grad_norm": 1.8952895153405638,
|
13097 |
-
"learning_rate": 1.4809773768533757e-05,
|
13098 |
-
"loss": 0.9755,
|
13099 |
-
"step": 1870
|
13100 |
-
},
|
13101 |
-
{
|
13102 |
-
"epoch": 0.36,
|
13103 |
-
"grad_norm": 1.6091945516084094,
|
13104 |
-
"learning_rate": 1.4804304622198013e-05,
|
13105 |
-
"loss": 0.8705,
|
13106 |
-
"step": 1871
|
13107 |
-
},
|
13108 |
-
{
|
13109 |
-
"epoch": 0.36,
|
13110 |
-
"grad_norm": 1.624495437307758,
|
13111 |
-
"learning_rate": 1.4798833606960972e-05,
|
13112 |
-
"loss": 0.8524,
|
13113 |
-
"step": 1872
|
13114 |
-
},
|
13115 |
-
{
|
13116 |
-
"epoch": 0.36,
|
13117 |
-
"grad_norm": 1.8744172065380642,
|
13118 |
-
"learning_rate": 1.4793360724950887e-05,
|
13119 |
-
"loss": 1.0719,
|
13120 |
-
"step": 1873
|
13121 |
-
},
|
13122 |
-
{
|
13123 |
-
"epoch": 0.36,
|
13124 |
-
"grad_norm": 1.5575380698894332,
|
13125 |
-
"learning_rate": 1.4787885978296737e-05,
|
13126 |
-
"loss": 0.9028,
|
13127 |
-
"step": 1874
|
13128 |
-
},
|
13129 |
-
{
|
13130 |
-
"epoch": 0.36,
|
13131 |
-
"grad_norm": 1.7892688635911647,
|
13132 |
-
"learning_rate": 1.478240936912823e-05,
|
13133 |
-
"loss": 0.9931,
|
13134 |
-
"step": 1875
|
13135 |
-
},
|
13136 |
-
{
|
13137 |
-
"epoch": 0.36,
|
13138 |
-
"grad_norm": 1.6951433638470188,
|
13139 |
-
"learning_rate": 1.4776930899575801e-05,
|
13140 |
-
"loss": 0.9601,
|
13141 |
-
"step": 1876
|
13142 |
-
},
|
13143 |
-
{
|
13144 |
-
"epoch": 0.36,
|
13145 |
-
"grad_norm": 1.8511925621033531,
|
13146 |
-
"learning_rate": 1.4771450571770603e-05,
|
13147 |
-
"loss": 1.0311,
|
13148 |
-
"step": 1877
|
13149 |
-
},
|
13150 |
-
{
|
13151 |
-
"epoch": 0.36,
|
13152 |
-
"grad_norm": 1.1442810135716395,
|
13153 |
-
"learning_rate": 1.4765968387844516e-05,
|
13154 |
-
"loss": 0.884,
|
13155 |
-
"step": 1878
|
13156 |
-
},
|
13157 |
-
{
|
13158 |
-
"epoch": 0.36,
|
13159 |
-
"grad_norm": 1.9225596331812123,
|
13160 |
-
"learning_rate": 1.4760484349930134e-05,
|
13161 |
-
"loss": 0.8523,
|
13162 |
-
"step": 1879
|
13163 |
-
},
|
13164 |
-
{
|
13165 |
-
"epoch": 0.36,
|
13166 |
-
"grad_norm": 1.808853808526827,
|
13167 |
-
"learning_rate": 1.4754998460160784e-05,
|
13168 |
-
"loss": 0.9429,
|
13169 |
-
"step": 1880
|
13170 |
-
},
|
13171 |
-
{
|
13172 |
-
"epoch": 0.36,
|
13173 |
-
"grad_norm": 1.5600398733677634,
|
13174 |
-
"learning_rate": 1.4749510720670506e-05,
|
13175 |
-
"loss": 0.9736,
|
13176 |
-
"step": 1881
|
13177 |
-
},
|
13178 |
-
{
|
13179 |
-
"epoch": 0.36,
|
13180 |
-
"grad_norm": 1.5320412874387632,
|
13181 |
-
"learning_rate": 1.4744021133594059e-05,
|
13182 |
-
"loss": 0.8701,
|
13183 |
-
"step": 1882
|
13184 |
-
},
|
13185 |
-
{
|
13186 |
-
"epoch": 0.36,
|
13187 |
-
"grad_norm": 1.523014076301379,
|
13188 |
-
"learning_rate": 1.4738529701066928e-05,
|
13189 |
-
"loss": 0.8883,
|
13190 |
-
"step": 1883
|
13191 |
-
},
|
13192 |
-
{
|
13193 |
-
"epoch": 0.36,
|
13194 |
-
"grad_norm": 1.7505097977570019,
|
13195 |
-
"learning_rate": 1.4733036425225306e-05,
|
13196 |
-
"loss": 0.9172,
|
13197 |
-
"step": 1884
|
13198 |
-
},
|
13199 |
-
{
|
13200 |
-
"epoch": 0.36,
|
13201 |
-
"grad_norm": 1.5554959069631156,
|
13202 |
-
"learning_rate": 1.4727541308206114e-05,
|
13203 |
-
"loss": 0.8644,
|
13204 |
-
"step": 1885
|
13205 |
-
},
|
13206 |
-
{
|
13207 |
-
"epoch": 0.36,
|
13208 |
-
"grad_norm": 1.7958310637324932,
|
13209 |
-
"learning_rate": 1.4722044352146978e-05,
|
13210 |
-
"loss": 0.866,
|
13211 |
-
"step": 1886
|
13212 |
-
},
|
13213 |
-
{
|
13214 |
-
"epoch": 0.36,
|
13215 |
-
"grad_norm": 1.855213005785535,
|
13216 |
-
"learning_rate": 1.4716545559186244e-05,
|
13217 |
-
"loss": 0.8861,
|
13218 |
-
"step": 1887
|
13219 |
-
},
|
13220 |
-
{
|
13221 |
-
"epoch": 0.36,
|
13222 |
-
"grad_norm": 1.660788717810155,
|
13223 |
-
"learning_rate": 1.4711044931462976e-05,
|
13224 |
-
"loss": 0.888,
|
13225 |
-
"step": 1888
|
13226 |
-
},
|
13227 |
-
{
|
13228 |
-
"epoch": 0.36,
|
13229 |
-
"grad_norm": 1.5748794402819912,
|
13230 |
-
"learning_rate": 1.4705542471116949e-05,
|
13231 |
-
"loss": 0.9492,
|
13232 |
-
"step": 1889
|
13233 |
-
},
|
13234 |
-
{
|
13235 |
-
"epoch": 0.36,
|
13236 |
-
"grad_norm": 1.671940289814532,
|
13237 |
-
"learning_rate": 1.470003818028865e-05,
|
13238 |
-
"loss": 0.9392,
|
13239 |
-
"step": 1890
|
13240 |
-
},
|
13241 |
-
{
|
13242 |
-
"epoch": 0.36,
|
13243 |
-
"grad_norm": 1.6682541425119886,
|
13244 |
-
"learning_rate": 1.4694532061119277e-05,
|
13245 |
-
"loss": 0.9954,
|
13246 |
-
"step": 1891
|
13247 |
-
},
|
13248 |
-
{
|
13249 |
-
"epoch": 0.36,
|
13250 |
-
"grad_norm": 1.6270272723817845,
|
13251 |
-
"learning_rate": 1.4689024115750745e-05,
|
13252 |
-
"loss": 0.9257,
|
13253 |
-
"step": 1892
|
13254 |
-
},
|
13255 |
-
{
|
13256 |
-
"epoch": 0.36,
|
13257 |
-
"grad_norm": 1.6944012589743433,
|
13258 |
-
"learning_rate": 1.4683514346325671e-05,
|
13259 |
-
"loss": 0.8054,
|
13260 |
-
"step": 1893
|
13261 |
-
},
|
13262 |
-
{
|
13263 |
-
"epoch": 0.36,
|
13264 |
-
"grad_norm": 1.7534054491921223,
|
13265 |
-
"learning_rate": 1.4678002754987388e-05,
|
13266 |
-
"loss": 0.9034,
|
13267 |
-
"step": 1894
|
13268 |
-
},
|
13269 |
-
{
|
13270 |
-
"epoch": 0.36,
|
13271 |
-
"grad_norm": 1.658585836806205,
|
13272 |
-
"learning_rate": 1.4672489343879939e-05,
|
13273 |
-
"loss": 0.9457,
|
13274 |
-
"step": 1895
|
13275 |
-
},
|
13276 |
-
{
|
13277 |
-
"epoch": 0.37,
|
13278 |
-
"grad_norm": 1.640884368651283,
|
13279 |
-
"learning_rate": 1.4666974115148065e-05,
|
13280 |
-
"loss": 0.8148,
|
13281 |
-
"step": 1896
|
13282 |
-
},
|
13283 |
-
{
|
13284 |
-
"epoch": 0.37,
|
13285 |
-
"grad_norm": 1.5939722795602012,
|
13286 |
-
"learning_rate": 1.4661457070937226e-05,
|
13287 |
-
"loss": 0.8452,
|
13288 |
-
"step": 1897
|
13289 |
-
},
|
13290 |
-
{
|
13291 |
-
"epoch": 0.37,
|
13292 |
-
"grad_norm": 1.6167631945289578,
|
13293 |
-
"learning_rate": 1.4655938213393582e-05,
|
13294 |
-
"loss": 0.9033,
|
13295 |
-
"step": 1898
|
13296 |
-
},
|
13297 |
-
{
|
13298 |
-
"epoch": 0.37,
|
13299 |
-
"grad_norm": 1.9454139532441512,
|
13300 |
-
"learning_rate": 1.4650417544663997e-05,
|
13301 |
-
"loss": 0.9275,
|
13302 |
-
"step": 1899
|
13303 |
-
},
|
13304 |
-
{
|
13305 |
-
"epoch": 0.37,
|
13306 |
-
"grad_norm": 1.5858648706624496,
|
13307 |
-
"learning_rate": 1.4644895066896046e-05,
|
13308 |
-
"loss": 0.9054,
|
13309 |
-
"step": 1900
|
13310 |
-
},
|
13311 |
-
{
|
13312 |
-
"epoch": 0.37,
|
13313 |
-
"grad_norm": 1.6936034257279007,
|
13314 |
-
"learning_rate": 1.4639370782238e-05,
|
13315 |
-
"loss": 0.9825,
|
13316 |
-
"step": 1901
|
13317 |
-
},
|
13318 |
-
{
|
13319 |
-
"epoch": 0.37,
|
13320 |
-
"grad_norm": 1.6703890968967032,
|
13321 |
-
"learning_rate": 1.4633844692838837e-05,
|
13322 |
-
"loss": 0.9524,
|
13323 |
-
"step": 1902
|
13324 |
-
},
|
13325 |
-
{
|
13326 |
-
"epoch": 0.37,
|
13327 |
-
"grad_norm": 1.5551617347389055,
|
13328 |
-
"learning_rate": 1.4628316800848241e-05,
|
13329 |
-
"loss": 0.9375,
|
13330 |
-
"step": 1903
|
13331 |
-
},
|
13332 |
-
{
|
13333 |
-
"epoch": 0.37,
|
13334 |
-
"grad_norm": 1.8783960822922252,
|
13335 |
-
"learning_rate": 1.4622787108416585e-05,
|
13336 |
-
"loss": 1.0336,
|
13337 |
-
"step": 1904
|
13338 |
-
},
|
13339 |
-
{
|
13340 |
-
"epoch": 0.37,
|
13341 |
-
"grad_norm": 1.7771071816180781,
|
13342 |
-
"learning_rate": 1.4617255617694957e-05,
|
13343 |
-
"loss": 0.8875,
|
13344 |
-
"step": 1905
|
13345 |
-
},
|
13346 |
-
{
|
13347 |
-
"epoch": 0.37,
|
13348 |
-
"grad_norm": 1.6400552487600897,
|
13349 |
-
"learning_rate": 1.4611722330835133e-05,
|
13350 |
-
"loss": 0.9945,
|
13351 |
-
"step": 1906
|
13352 |
-
},
|
13353 |
-
{
|
13354 |
-
"epoch": 0.37,
|
13355 |
-
"grad_norm": 1.7424809111938788,
|
13356 |
-
"learning_rate": 1.4606187249989598e-05,
|
13357 |
-
"loss": 0.947,
|
13358 |
-
"step": 1907
|
13359 |
-
},
|
13360 |
-
{
|
13361 |
-
"epoch": 0.37,
|
13362 |
-
"grad_norm": 1.6042784586765482,
|
13363 |
-
"learning_rate": 1.4600650377311523e-05,
|
13364 |
-
"loss": 0.9005,
|
13365 |
-
"step": 1908
|
13366 |
-
},
|
13367 |
-
{
|
13368 |
-
"epoch": 0.37,
|
13369 |
-
"grad_norm": 1.6758776492466088,
|
13370 |
-
"learning_rate": 1.4595111714954787e-05,
|
13371 |
-
"loss": 0.9794,
|
13372 |
-
"step": 1909
|
13373 |
-
},
|
13374 |
-
{
|
13375 |
-
"epoch": 0.37,
|
13376 |
-
"grad_norm": 1.5913304752712094,
|
13377 |
-
"learning_rate": 1.4589571265073959e-05,
|
13378 |
-
"loss": 0.7905,
|
13379 |
-
"step": 1910
|
13380 |
-
},
|
13381 |
-
{
|
13382 |
-
"epoch": 0.37,
|
13383 |
-
"grad_norm": 1.7798933897483598,
|
13384 |
-
"learning_rate": 1.4584029029824305e-05,
|
13385 |
-
"loss": 0.9255,
|
13386 |
-
"step": 1911
|
13387 |
-
},
|
13388 |
-
{
|
13389 |
-
"epoch": 0.37,
|
13390 |
-
"grad_norm": 1.6437473812141559,
|
13391 |
-
"learning_rate": 1.4578485011361783e-05,
|
13392 |
-
"loss": 0.8905,
|
13393 |
-
"step": 1912
|
13394 |
-
},
|
13395 |
-
{
|
13396 |
-
"epoch": 0.37,
|
13397 |
-
"grad_norm": 1.701664671213589,
|
13398 |
-
"learning_rate": 1.4572939211843051e-05,
|
13399 |
-
"loss": 0.9686,
|
13400 |
-
"step": 1913
|
13401 |
-
},
|
13402 |
-
{
|
13403 |
-
"epoch": 0.37,
|
13404 |
-
"grad_norm": 1.611101839884725,
|
13405 |
-
"learning_rate": 1.4567391633425455e-05,
|
13406 |
-
"loss": 0.9226,
|
13407 |
-
"step": 1914
|
13408 |
-
},
|
13409 |
-
{
|
13410 |
-
"epoch": 0.37,
|
13411 |
-
"grad_norm": 1.4477358561753013,
|
13412 |
-
"learning_rate": 1.4561842278267033e-05,
|
13413 |
-
"loss": 0.956,
|
13414 |
-
"step": 1915
|
13415 |
-
},
|
13416 |
-
{
|
13417 |
-
"epoch": 0.37,
|
13418 |
-
"grad_norm": 1.5313991293553062,
|
13419 |
-
"learning_rate": 1.4556291148526516e-05,
|
13420 |
-
"loss": 0.8799,
|
13421 |
-
"step": 1916
|
13422 |
-
},
|
13423 |
-
{
|
13424 |
-
"epoch": 0.37,
|
13425 |
-
"grad_norm": 1.570437900284019,
|
13426 |
-
"learning_rate": 1.4550738246363322e-05,
|
13427 |
-
"loss": 0.8546,
|
13428 |
-
"step": 1917
|
13429 |
-
},
|
13430 |
-
{
|
13431 |
-
"epoch": 0.37,
|
13432 |
-
"grad_norm": 1.6393386291966954,
|
13433 |
-
"learning_rate": 1.4545183573937566e-05,
|
13434 |
-
"loss": 0.8923,
|
13435 |
-
"step": 1918
|
13436 |
-
},
|
13437 |
-
{
|
13438 |
-
"epoch": 0.37,
|
13439 |
-
"grad_norm": 1.7235853278804611,
|
13440 |
-
"learning_rate": 1.4539627133410042e-05,
|
13441 |
-
"loss": 0.8886,
|
13442 |
-
"step": 1919
|
13443 |
-
},
|
13444 |
-
{
|
13445 |
-
"epoch": 0.37,
|
13446 |
-
"grad_norm": 1.6947056773766112,
|
13447 |
-
"learning_rate": 1.4534068926942235e-05,
|
13448 |
-
"loss": 0.8453,
|
13449 |
-
"step": 1920
|
13450 |
-
},
|
13451 |
-
{
|
13452 |
-
"epoch": 0.37,
|
13453 |
-
"grad_norm": 1.5835750935087545,
|
13454 |
-
"learning_rate": 1.4528508956696326e-05,
|
13455 |
-
"loss": 0.9339,
|
13456 |
-
"step": 1921
|
13457 |
-
},
|
13458 |
-
{
|
13459 |
-
"epoch": 0.37,
|
13460 |
-
"grad_norm": 1.742088241485645,
|
13461 |
-
"learning_rate": 1.4522947224835165e-05,
|
13462 |
-
"loss": 0.9946,
|
13463 |
-
"step": 1922
|
13464 |
-
},
|
13465 |
-
{
|
13466 |
-
"epoch": 0.37,
|
13467 |
-
"grad_norm": 1.4949963929651728,
|
13468 |
-
"learning_rate": 1.4517383733522304e-05,
|
13469 |
-
"loss": 0.927,
|
13470 |
-
"step": 1923
|
13471 |
-
},
|
13472 |
-
{
|
13473 |
-
"epoch": 0.37,
|
13474 |
-
"grad_norm": 1.5253164901288647,
|
13475 |
-
"learning_rate": 1.451181848492197e-05,
|
13476 |
-
"loss": 0.8769,
|
13477 |
-
"step": 1924
|
13478 |
-
},
|
13479 |
-
{
|
13480 |
-
"epoch": 0.37,
|
13481 |
-
"grad_norm": 1.4889989022162844,
|
13482 |
-
"learning_rate": 1.4506251481199071e-05,
|
13483 |
-
"loss": 0.872,
|
13484 |
-
"step": 1925
|
13485 |
-
},
|
13486 |
-
{
|
13487 |
-
"epoch": 0.37,
|
13488 |
-
"grad_norm": 1.8096496107868196,
|
13489 |
-
"learning_rate": 1.450068272451921e-05,
|
13490 |
-
"loss": 0.9849,
|
13491 |
-
"step": 1926
|
13492 |
-
},
|
13493 |
-
{
|
13494 |
-
"epoch": 0.37,
|
13495 |
-
"grad_norm": 1.6165599569378286,
|
13496 |
-
"learning_rate": 1.449511221704866e-05,
|
13497 |
-
"loss": 0.8824,
|
13498 |
-
"step": 1927
|
13499 |
-
},
|
13500 |
-
{
|
13501 |
-
"epoch": 0.37,
|
13502 |
-
"grad_norm": 1.696888487069437,
|
13503 |
-
"learning_rate": 1.4489539960954382e-05,
|
13504 |
-
"loss": 0.9493,
|
13505 |
-
"step": 1928
|
13506 |
-
},
|
13507 |
-
{
|
13508 |
-
"epoch": 0.37,
|
13509 |
-
"grad_norm": 1.5617807092050424,
|
13510 |
-
"learning_rate": 1.4483965958404012e-05,
|
13511 |
-
"loss": 0.9603,
|
13512 |
-
"step": 1929
|
13513 |
-
},
|
13514 |
-
{
|
13515 |
-
"epoch": 0.37,
|
13516 |
-
"grad_norm": 1.7772163500967342,
|
13517 |
-
"learning_rate": 1.447839021156587e-05,
|
13518 |
-
"loss": 0.8918,
|
13519 |
-
"step": 1930
|
13520 |
-
},
|
13521 |
-
{
|
13522 |
-
"epoch": 0.37,
|
13523 |
-
"grad_norm": 1.060365842099134,
|
13524 |
-
"learning_rate": 1.4472812722608948e-05,
|
13525 |
-
"loss": 0.8678,
|
13526 |
-
"step": 1931
|
13527 |
-
},
|
13528 |
-
{
|
13529 |
-
"epoch": 0.37,
|
13530 |
-
"grad_norm": 1.6120849117340765,
|
13531 |
-
"learning_rate": 1.446723349370293e-05,
|
13532 |
-
"loss": 0.9337,
|
13533 |
-
"step": 1932
|
13534 |
-
},
|
13535 |
-
{
|
13536 |
-
"epoch": 0.37,
|
13537 |
-
"grad_norm": 1.0949366371155416,
|
13538 |
-
"learning_rate": 1.4461652527018158e-05,
|
13539 |
-
"loss": 0.8726,
|
13540 |
-
"step": 1933
|
13541 |
-
},
|
13542 |
-
{
|
13543 |
-
"epoch": 0.37,
|
13544 |
-
"grad_norm": 1.6705665592860641,
|
13545 |
-
"learning_rate": 1.4456069824725663e-05,
|
13546 |
-
"loss": 0.9106,
|
13547 |
-
"step": 1934
|
13548 |
-
},
|
13549 |
-
{
|
13550 |
-
"epoch": 0.37,
|
13551 |
-
"grad_norm": 1.842606010652722,
|
13552 |
-
"learning_rate": 1.4450485388997149e-05,
|
13553 |
-
"loss": 0.9954,
|
13554 |
-
"step": 1935
|
13555 |
-
},
|
13556 |
-
{
|
13557 |
-
"epoch": 0.37,
|
13558 |
-
"grad_norm": 1.057321791400095,
|
13559 |
-
"learning_rate": 1.444489922200499e-05,
|
13560 |
-
"loss": 0.8959,
|
13561 |
-
"step": 1936
|
13562 |
-
},
|
13563 |
-
{
|
13564 |
-
"epoch": 0.37,
|
13565 |
-
"grad_norm": 1.8140608348248906,
|
13566 |
-
"learning_rate": 1.4439311325922235e-05,
|
13567 |
-
"loss": 0.9943,
|
13568 |
-
"step": 1937
|
13569 |
-
},
|
13570 |
-
{
|
13571 |
-
"epoch": 0.37,
|
13572 |
-
"grad_norm": 1.7098410462471028,
|
13573 |
-
"learning_rate": 1.4433721702922607e-05,
|
13574 |
-
"loss": 0.8261,
|
13575 |
-
"step": 1938
|
13576 |
-
},
|
13577 |
-
{
|
13578 |
-
"epoch": 0.37,
|
13579 |
-
"grad_norm": 1.0270872514099714,
|
13580 |
-
"learning_rate": 1.4428130355180505e-05,
|
13581 |
-
"loss": 0.8597,
|
13582 |
-
"step": 1939
|
13583 |
-
},
|
13584 |
-
{
|
13585 |
-
"epoch": 0.37,
|
13586 |
-
"grad_norm": 1.596577887827628,
|
13587 |
-
"learning_rate": 1.4422537284870993e-05,
|
13588 |
-
"loss": 0.8603,
|
13589 |
-
"step": 1940
|
13590 |
-
},
|
13591 |
-
{
|
13592 |
-
"epoch": 0.37,
|
13593 |
-
"grad_norm": 1.0437894211858543,
|
13594 |
-
"learning_rate": 1.4416942494169801e-05,
|
13595 |
-
"loss": 0.8863,
|
13596 |
-
"step": 1941
|
13597 |
-
},
|
13598 |
-
{
|
13599 |
-
"epoch": 0.37,
|
13600 |
-
"grad_norm": 1.022257618024313,
|
13601 |
-
"learning_rate": 1.441134598525334e-05,
|
13602 |
-
"loss": 0.8304,
|
13603 |
-
"step": 1942
|
13604 |
-
},
|
13605 |
-
{
|
13606 |
-
"epoch": 0.37,
|
13607 |
-
"grad_norm": 1.630469813224106,
|
13608 |
-
"learning_rate": 1.4405747760298681e-05,
|
13609 |
-
"loss": 0.8952,
|
13610 |
-
"step": 1943
|
13611 |
-
},
|
13612 |
-
{
|
13613 |
-
"epoch": 0.37,
|
13614 |
-
"grad_norm": 1.7663508160729156,
|
13615 |
-
"learning_rate": 1.4400147821483564e-05,
|
13616 |
-
"loss": 0.9825,
|
13617 |
-
"step": 1944
|
13618 |
-
},
|
13619 |
-
{
|
13620 |
-
"epoch": 0.37,
|
13621 |
-
"grad_norm": 1.6935303282233554,
|
13622 |
-
"learning_rate": 1.4394546170986394e-05,
|
13623 |
-
"loss": 0.9867,
|
13624 |
-
"step": 1945
|
13625 |
-
},
|
13626 |
-
{
|
13627 |
-
"epoch": 0.37,
|
13628 |
-
"grad_norm": 1.811332894249063,
|
13629 |
-
"learning_rate": 1.4388942810986253e-05,
|
13630 |
-
"loss": 0.9215,
|
13631 |
-
"step": 1946
|
13632 |
-
},
|
13633 |
-
{
|
13634 |
-
"epoch": 0.37,
|
13635 |
-
"grad_norm": 1.5936262053219343,
|
13636 |
-
"learning_rate": 1.4383337743662872e-05,
|
13637 |
-
"loss": 1.0064,
|
13638 |
-
"step": 1947
|
13639 |
-
},
|
13640 |
-
{
|
13641 |
-
"epoch": 0.38,
|
13642 |
-
"grad_norm": 1.2221389486212109,
|
13643 |
-
"learning_rate": 1.4377730971196658e-05,
|
13644 |
-
"loss": 0.8219,
|
13645 |
-
"step": 1948
|
13646 |
-
},
|
13647 |
-
{
|
13648 |
-
"epoch": 0.38,
|
13649 |
-
"grad_norm": 1.692758782000757,
|
13650 |
-
"learning_rate": 1.437212249576867e-05,
|
13651 |
-
"loss": 0.8998,
|
13652 |
-
"step": 1949
|
13653 |
-
},
|
13654 |
-
{
|
13655 |
-
"epoch": 0.38,
|
13656 |
-
"grad_norm": 1.6050066128005005,
|
13657 |
-
"learning_rate": 1.4366512319560642e-05,
|
13658 |
-
"loss": 0.9318,
|
13659 |
-
"step": 1950
|
13660 |
-
},
|
13661 |
-
{
|
13662 |
-
"epoch": 0.38,
|
13663 |
-
"grad_norm": 1.820927381354136,
|
13664 |
-
"learning_rate": 1.4360900444754959e-05,
|
13665 |
-
"loss": 0.9633,
|
13666 |
-
"step": 1951
|
13667 |
-
},
|
13668 |
-
{
|
13669 |
-
"epoch": 0.38,
|
13670 |
-
"grad_norm": 1.759938786543528,
|
13671 |
-
"learning_rate": 1.4355286873534677e-05,
|
13672 |
-
"loss": 0.9468,
|
13673 |
-
"step": 1952
|
13674 |
-
},
|
13675 |
-
{
|
13676 |
-
"epoch": 0.38,
|
13677 |
-
"grad_norm": 1.6752343970748882,
|
13678 |
-
"learning_rate": 1.4349671608083504e-05,
|
13679 |
-
"loss": 0.8669,
|
13680 |
-
"step": 1953
|
13681 |
-
},
|
13682 |
-
{
|
13683 |
-
"epoch": 0.38,
|
13684 |
-
"grad_norm": 1.7184980534359486,
|
13685 |
-
"learning_rate": 1.4344054650585813e-05,
|
13686 |
-
"loss": 0.9065,
|
13687 |
-
"step": 1954
|
13688 |
-
},
|
13689 |
-
{
|
13690 |
-
"epoch": 0.38,
|
13691 |
-
"grad_norm": 1.672450031095198,
|
13692 |
-
"learning_rate": 1.4338436003226623e-05,
|
13693 |
-
"loss": 1.0158,
|
13694 |
-
"step": 1955
|
13695 |
-
},
|
13696 |
-
{
|
13697 |
-
"epoch": 0.38,
|
13698 |
-
"grad_norm": 1.7822544858188532,
|
13699 |
-
"learning_rate": 1.433281566819163e-05,
|
13700 |
-
"loss": 0.8884,
|
13701 |
-
"step": 1956
|
13702 |
-
},
|
13703 |
-
{
|
13704 |
-
"epoch": 0.38,
|
13705 |
-
"grad_norm": 1.2323855892676456,
|
13706 |
-
"learning_rate": 1.4327193647667167e-05,
|
13707 |
-
"loss": 0.857,
|
13708 |
-
"step": 1957
|
13709 |
-
},
|
13710 |
-
{
|
13711 |
-
"epoch": 0.38,
|
13712 |
-
"grad_norm": 1.6749892119624912,
|
13713 |
-
"learning_rate": 1.4321569943840235e-05,
|
13714 |
-
"loss": 0.9273,
|
13715 |
-
"step": 1958
|
13716 |
-
},
|
13717 |
-
{
|
13718 |
-
"epoch": 0.38,
|
13719 |
-
"grad_norm": 1.562532316981109,
|
13720 |
-
"learning_rate": 1.4315944558898486e-05,
|
13721 |
-
"loss": 0.9543,
|
13722 |
-
"step": 1959
|
13723 |
-
},
|
13724 |
-
{
|
13725 |
-
"epoch": 0.38,
|
13726 |
-
"grad_norm": 1.733048227533296,
|
13727 |
-
"learning_rate": 1.4310317495030226e-05,
|
13728 |
-
"loss": 0.9416,
|
13729 |
-
"step": 1960
|
13730 |
-
},
|
13731 |
-
{
|
13732 |
-
"epoch": 0.38,
|
13733 |
-
"grad_norm": 1.6187249948968137,
|
13734 |
-
"learning_rate": 1.4304688754424417e-05,
|
13735 |
-
"loss": 0.9877,
|
13736 |
-
"step": 1961
|
13737 |
-
},
|
13738 |
-
{
|
13739 |
-
"epoch": 0.38,
|
13740 |
-
"grad_norm": 1.544184212908568,
|
13741 |
-
"learning_rate": 1.4299058339270663e-05,
|
13742 |
-
"loss": 0.8356,
|
13743 |
-
"step": 1962
|
13744 |
-
},
|
13745 |
-
{
|
13746 |
-
"epoch": 0.38,
|
13747 |
-
"grad_norm": 1.5597864305215874,
|
13748 |
-
"learning_rate": 1.4293426251759234e-05,
|
13749 |
-
"loss": 0.8891,
|
13750 |
-
"step": 1963
|
13751 |
-
},
|
13752 |
-
{
|
13753 |
-
"epoch": 0.38,
|
13754 |
-
"grad_norm": 1.6764875535211585,
|
13755 |
-
"learning_rate": 1.4287792494081042e-05,
|
13756 |
-
"loss": 0.9978,
|
13757 |
-
"step": 1964
|
13758 |
-
},
|
13759 |
-
{
|
13760 |
-
"epoch": 0.38,
|
13761 |
-
"grad_norm": 1.628972353736311,
|
13762 |
-
"learning_rate": 1.428215706842765e-05,
|
13763 |
-
"loss": 0.8752,
|
13764 |
-
"step": 1965
|
13765 |
-
},
|
13766 |
-
{
|
13767 |
-
"epoch": 0.38,
|
13768 |
-
"grad_norm": 1.6175343059783494,
|
13769 |
-
"learning_rate": 1.4276519976991267e-05,
|
13770 |
-
"loss": 0.9888,
|
13771 |
-
"step": 1966
|
13772 |
-
},
|
13773 |
-
{
|
13774 |
-
"epoch": 0.38,
|
13775 |
-
"grad_norm": 1.6569498876168287,
|
13776 |
-
"learning_rate": 1.4270881221964756e-05,
|
13777 |
-
"loss": 0.9243,
|
13778 |
-
"step": 1967
|
13779 |
-
},
|
13780 |
-
{
|
13781 |
-
"epoch": 0.38,
|
13782 |
-
"grad_norm": 1.5486333423207015,
|
13783 |
-
"learning_rate": 1.4265240805541628e-05,
|
13784 |
-
"loss": 0.875,
|
13785 |
-
"step": 1968
|
13786 |
-
},
|
13787 |
-
{
|
13788 |
-
"epoch": 0.38,
|
13789 |
-
"grad_norm": 1.6050782117840383,
|
13790 |
-
"learning_rate": 1.4259598729916027e-05,
|
13791 |
-
"loss": 0.8632,
|
13792 |
-
"step": 1969
|
13793 |
-
},
|
13794 |
-
{
|
13795 |
-
"epoch": 0.38,
|
13796 |
-
"grad_norm": 1.613167697354811,
|
13797 |
-
"learning_rate": 1.425395499728276e-05,
|
13798 |
-
"loss": 0.8139,
|
13799 |
-
"step": 1970
|
13800 |
-
},
|
13801 |
-
{
|
13802 |
-
"epoch": 0.38,
|
13803 |
-
"grad_norm": 1.0995807826218689,
|
13804 |
-
"learning_rate": 1.4248309609837262e-05,
|
13805 |
-
"loss": 0.8318,
|
13806 |
-
"step": 1971
|
13807 |
-
},
|
13808 |
-
{
|
13809 |
-
"epoch": 0.38,
|
13810 |
-
"grad_norm": 1.7339742844852268,
|
13811 |
-
"learning_rate": 1.4242662569775632e-05,
|
13812 |
-
"loss": 1.0169,
|
13813 |
-
"step": 1972
|
13814 |
-
},
|
13815 |
-
{
|
13816 |
-
"epoch": 0.38,
|
13817 |
-
"grad_norm": 1.775570046531735,
|
13818 |
-
"learning_rate": 1.423701387929459e-05,
|
13819 |
-
"loss": 0.8689,
|
13820 |
-
"step": 1973
|
13821 |
-
},
|
13822 |
-
{
|
13823 |
-
"epoch": 0.38,
|
13824 |
-
"grad_norm": 1.712780324624818,
|
13825 |
-
"learning_rate": 1.4231363540591512e-05,
|
13826 |
-
"loss": 0.8534,
|
13827 |
-
"step": 1974
|
13828 |
-
},
|
13829 |
-
{
|
13830 |
-
"epoch": 0.38,
|
13831 |
-
"grad_norm": 1.9214141386062493,
|
13832 |
-
"learning_rate": 1.4225711555864413e-05,
|
13833 |
-
"loss": 0.9083,
|
13834 |
-
"step": 1975
|
13835 |
-
},
|
13836 |
-
{
|
13837 |
-
"epoch": 0.38,
|
13838 |
-
"grad_norm": 1.0979553405135083,
|
13839 |
-
"learning_rate": 1.4220057927311944e-05,
|
13840 |
-
"loss": 0.8881,
|
13841 |
-
"step": 1976
|
13842 |
-
},
|
13843 |
-
{
|
13844 |
-
"epoch": 0.38,
|
13845 |
-
"grad_norm": 1.6731728263110943,
|
13846 |
-
"learning_rate": 1.4214402657133396e-05,
|
13847 |
-
"loss": 0.968,
|
13848 |
-
"step": 1977
|
13849 |
-
},
|
13850 |
-
{
|
13851 |
-
"epoch": 0.38,
|
13852 |
-
"grad_norm": 1.5442188820300313,
|
13853 |
-
"learning_rate": 1.4208745747528705e-05,
|
13854 |
-
"loss": 0.8429,
|
13855 |
-
"step": 1978
|
13856 |
-
},
|
13857 |
-
{
|
13858 |
-
"epoch": 0.38,
|
13859 |
-
"grad_norm": 1.560545156571736,
|
13860 |
-
"learning_rate": 1.420308720069844e-05,
|
13861 |
-
"loss": 0.8704,
|
13862 |
-
"step": 1979
|
13863 |
-
},
|
13864 |
-
{
|
13865 |
-
"epoch": 0.38,
|
13866 |
-
"grad_norm": 1.5465513905600146,
|
13867 |
-
"learning_rate": 1.4197427018843804e-05,
|
13868 |
-
"loss": 0.8807,
|
13869 |
-
"step": 1980
|
13870 |
-
},
|
13871 |
-
{
|
13872 |
-
"epoch": 0.38,
|
13873 |
-
"grad_norm": 1.9032763143445839,
|
13874 |
-
"learning_rate": 1.4191765204166643e-05,
|
13875 |
-
"loss": 0.8731,
|
13876 |
-
"step": 1981
|
13877 |
-
},
|
13878 |
-
{
|
13879 |
-
"epoch": 0.38,
|
13880 |
-
"grad_norm": 1.5074974422961198,
|
13881 |
-
"learning_rate": 1.418610175886943e-05,
|
13882 |
-
"loss": 0.8785,
|
13883 |
-
"step": 1982
|
13884 |
-
},
|
13885 |
-
{
|
13886 |
-
"epoch": 0.38,
|
13887 |
-
"grad_norm": 1.7085486460822743,
|
13888 |
-
"learning_rate": 1.4180436685155284e-05,
|
13889 |
-
"loss": 0.9437,
|
13890 |
-
"step": 1983
|
13891 |
-
},
|
13892 |
-
{
|
13893 |
-
"epoch": 0.38,
|
13894 |
-
"grad_norm": 1.0614399683461917,
|
13895 |
-
"learning_rate": 1.4174769985227943e-05,
|
13896 |
-
"loss": 0.869,
|
13897 |
-
"step": 1984
|
13898 |
-
},
|
13899 |
-
{
|
13900 |
-
"epoch": 0.38,
|
13901 |
-
"grad_norm": 1.8102639056663725,
|
13902 |
-
"learning_rate": 1.4169101661291789e-05,
|
13903 |
-
"loss": 0.9407,
|
13904 |
-
"step": 1985
|
13905 |
-
},
|
13906 |
-
{
|
13907 |
-
"epoch": 0.38,
|
13908 |
-
"grad_norm": 1.677696426013255,
|
13909 |
-
"learning_rate": 1.416343171555183e-05,
|
13910 |
-
"loss": 0.9164,
|
13911 |
-
"step": 1986
|
13912 |
-
},
|
13913 |
-
{
|
13914 |
-
"epoch": 0.38,
|
13915 |
-
"grad_norm": 1.5983097724232058,
|
13916 |
-
"learning_rate": 1.4157760150213706e-05,
|
13917 |
-
"loss": 0.8976,
|
13918 |
-
"step": 1987
|
13919 |
-
},
|
13920 |
-
{
|
13921 |
-
"epoch": 0.38,
|
13922 |
-
"grad_norm": 1.5633357942983659,
|
13923 |
-
"learning_rate": 1.415208696748369e-05,
|
13924 |
-
"loss": 0.9583,
|
13925 |
-
"step": 1988
|
13926 |
-
},
|
13927 |
-
{
|
13928 |
-
"epoch": 0.38,
|
13929 |
-
"grad_norm": 1.8757667991615432,
|
13930 |
-
"learning_rate": 1.414641216956868e-05,
|
13931 |
-
"loss": 0.8841,
|
13932 |
-
"step": 1989
|
13933 |
-
},
|
13934 |
-
{
|
13935 |
-
"epoch": 0.38,
|
13936 |
-
"grad_norm": 1.7104011632366787,
|
13937 |
-
"learning_rate": 1.4140735758676205e-05,
|
13938 |
-
"loss": 0.9663,
|
13939 |
-
"step": 1990
|
13940 |
-
},
|
13941 |
-
{
|
13942 |
-
"epoch": 0.38,
|
13943 |
-
"grad_norm": 1.6278015170837083,
|
13944 |
-
"learning_rate": 1.4135057737014416e-05,
|
13945 |
-
"loss": 0.9017,
|
13946 |
-
"step": 1991
|
13947 |
-
},
|
13948 |
-
{
|
13949 |
-
"epoch": 0.38,
|
13950 |
-
"grad_norm": 1.6085948855074665,
|
13951 |
-
"learning_rate": 1.4129378106792103e-05,
|
13952 |
-
"loss": 0.9748,
|
13953 |
-
"step": 1992
|
13954 |
-
},
|
13955 |
-
{
|
13956 |
-
"epoch": 0.38,
|
13957 |
-
"grad_norm": 1.7212729021378976,
|
13958 |
-
"learning_rate": 1.412369687021867e-05,
|
13959 |
-
"loss": 0.8887,
|
13960 |
-
"step": 1993
|
13961 |
-
},
|
13962 |
-
{
|
13963 |
-
"epoch": 0.38,
|
13964 |
-
"grad_norm": 1.4847775674762957,
|
13965 |
-
"learning_rate": 1.411801402950415e-05,
|
13966 |
-
"loss": 0.8589,
|
13967 |
-
"step": 1994
|
13968 |
-
},
|
13969 |
-
{
|
13970 |
-
"epoch": 0.38,
|
13971 |
-
"grad_norm": 1.7254985997235885,
|
13972 |
-
"learning_rate": 1.4112329586859196e-05,
|
13973 |
-
"loss": 0.9851,
|
13974 |
-
"step": 1995
|
13975 |
-
},
|
13976 |
-
{
|
13977 |
-
"epoch": 0.38,
|
13978 |
-
"grad_norm": 1.6497964331392245,
|
13979 |
-
"learning_rate": 1.4106643544495092e-05,
|
13980 |
-
"loss": 0.9671,
|
13981 |
-
"step": 1996
|
13982 |
-
},
|
13983 |
-
{
|
13984 |
-
"epoch": 0.38,
|
13985 |
-
"grad_norm": 1.7500475617926596,
|
13986 |
-
"learning_rate": 1.4100955904623742e-05,
|
13987 |
-
"loss": 0.9816,
|
13988 |
-
"step": 1997
|
13989 |
-
},
|
13990 |
-
{
|
13991 |
-
"epoch": 0.38,
|
13992 |
-
"grad_norm": 1.6055781534173013,
|
13993 |
-
"learning_rate": 1.4095266669457664e-05,
|
13994 |
-
"loss": 0.7968,
|
13995 |
-
"step": 1998
|
13996 |
-
},
|
13997 |
-
{
|
13998 |
-
"epoch": 0.38,
|
13999 |
-
"grad_norm": 1.7201656182922893,
|
14000 |
-
"learning_rate": 1.4089575841210004e-05,
|
14001 |
-
"loss": 0.9469,
|
14002 |
-
"step": 1999
|
14003 |
-
},
|
14004 |
-
{
|
14005 |
-
"epoch": 0.39,
|
14006 |
-
"grad_norm": 1.7198154929517466,
|
14007 |
-
"learning_rate": 1.4083883422094528e-05,
|
14008 |
-
"loss": 0.9505,
|
14009 |
-
"step": 2000
|
14010 |
}
|
14011 |
],
|
14012 |
"logging_steps": 1.0,
|
@@ -14014,7 +10514,7 @@
|
|
14014 |
"num_input_tokens_seen": 0,
|
14015 |
"num_train_epochs": 1,
|
14016 |
"save_steps": 500,
|
14017 |
-
"total_flos":
|
14018 |
"train_batch_size": 4,
|
14019 |
"trial_name": null,
|
14020 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.28885037550548814,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 1500,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
10507 |
"learning_rate": 1.6687597216781584e-05,
|
10508 |
"loss": 0.8145,
|
10509 |
"step": 1500
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10510 |
}
|
10511 |
],
|
10512 |
"logging_steps": 1.0,
|
|
|
10514 |
"num_input_tokens_seen": 0,
|
10515 |
"num_train_epochs": 1,
|
10516 |
"save_steps": 500,
|
10517 |
+
"total_flos": 2005543029506048.0,
|
10518 |
"train_batch_size": 4,
|
10519 |
"trial_name": null,
|
10520 |
"trial_params": null
|