Upload folder using huggingface_hub
Browse files- adapter_model.safetensors +1 -1
- optimizer.pt +1 -1
- rng_state.pth +1 -1
- scheduler.pt +1 -1
- trainer_state.json +5 -3604
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 13648432
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4b7b015e42e173818fea90cd734464a0f91cc0f673d9774615a292dfea18e1d3
|
3 |
size 13648432
|
optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 27338810
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:38b34b2b6c04fc909904a419a9220b3ab237bd22eee93af56cc2e03983e68b94
|
3 |
size 27338810
|
rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:39dbd3f37f16ee432d6b762708109d2e74798485e2714032f47750ffe2268ca1
|
3 |
size 14244
|
scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:836e374dfbd903836045d35c4066a8ab079349c0c581eecdecc125ab8c4439e1
|
3 |
size 1064
|
trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
-
"best_metric": 2.
|
3 |
-
"best_model_checkpoint": "/home/sunggeunan/data/ICL/outputs/lora/SKIML-ICL_mrqa_nq_v3/Meta-Llama-3-8B-Instruct-unanswerable-5Q-0U-0C-qa_first/checkpoint-
|
4 |
-
"epoch":
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -3599,3605 +3599,6 @@
|
|
3599 |
"eval_samples_per_second": 1.293,
|
3600 |
"eval_steps_per_second": 0.324,
|
3601 |
"step": 512
|
3602 |
-
},
|
3603 |
-
{
|
3604 |
-
"epoch": 1.0002437241043138,
|
3605 |
-
"grad_norm": 0.41433945298194885,
|
3606 |
-
"learning_rate": 3.505825908156271e-07,
|
3607 |
-
"loss": 2.0366,
|
3608 |
-
"step": 513
|
3609 |
-
},
|
3610 |
-
{
|
3611 |
-
"epoch": 1.0021935169388252,
|
3612 |
-
"grad_norm": 0.42399510741233826,
|
3613 |
-
"learning_rate": 3.5023989033584646e-07,
|
3614 |
-
"loss": 1.991,
|
3615 |
-
"step": 514
|
3616 |
-
},
|
3617 |
-
{
|
3618 |
-
"epoch": 1.0041433097733365,
|
3619 |
-
"grad_norm": 0.45652541518211365,
|
3620 |
-
"learning_rate": 3.4989718985606576e-07,
|
3621 |
-
"loss": 2.0066,
|
3622 |
-
"step": 515
|
3623 |
-
},
|
3624 |
-
{
|
3625 |
-
"epoch": 1.006093102607848,
|
3626 |
-
"grad_norm": 0.43585795164108276,
|
3627 |
-
"learning_rate": 3.495544893762851e-07,
|
3628 |
-
"loss": 2.0025,
|
3629 |
-
"step": 516
|
3630 |
-
},
|
3631 |
-
{
|
3632 |
-
"epoch": 1.0080428954423593,
|
3633 |
-
"grad_norm": 0.43803489208221436,
|
3634 |
-
"learning_rate": 3.4921178889650445e-07,
|
3635 |
-
"loss": 2.0654,
|
3636 |
-
"step": 517
|
3637 |
-
},
|
3638 |
-
{
|
3639 |
-
"epoch": 1.0099926882768706,
|
3640 |
-
"grad_norm": 0.43803176283836365,
|
3641 |
-
"learning_rate": 3.4886908841672375e-07,
|
3642 |
-
"loss": 2.0896,
|
3643 |
-
"step": 518
|
3644 |
-
},
|
3645 |
-
{
|
3646 |
-
"epoch": 1.011942481111382,
|
3647 |
-
"grad_norm": 0.41983944177627563,
|
3648 |
-
"learning_rate": 3.485263879369431e-07,
|
3649 |
-
"loss": 2.0335,
|
3650 |
-
"step": 519
|
3651 |
-
},
|
3652 |
-
{
|
3653 |
-
"epoch": 1.0138922739458933,
|
3654 |
-
"grad_norm": 0.4354363977909088,
|
3655 |
-
"learning_rate": 3.4818368745716244e-07,
|
3656 |
-
"loss": 2.0699,
|
3657 |
-
"step": 520
|
3658 |
-
},
|
3659 |
-
{
|
3660 |
-
"epoch": 1.0158420667804047,
|
3661 |
-
"grad_norm": 0.42140671610832214,
|
3662 |
-
"learning_rate": 3.4784098697738174e-07,
|
3663 |
-
"loss": 1.9646,
|
3664 |
-
"step": 521
|
3665 |
-
},
|
3666 |
-
{
|
3667 |
-
"epoch": 1.0177918596149158,
|
3668 |
-
"grad_norm": 0.4265493154525757,
|
3669 |
-
"learning_rate": 3.474982864976011e-07,
|
3670 |
-
"loss": 2.0735,
|
3671 |
-
"step": 522
|
3672 |
-
},
|
3673 |
-
{
|
3674 |
-
"epoch": 1.0197416524494272,
|
3675 |
-
"grad_norm": 0.43847259879112244,
|
3676 |
-
"learning_rate": 3.4715558601782043e-07,
|
3677 |
-
"loss": 2.0986,
|
3678 |
-
"step": 523
|
3679 |
-
},
|
3680 |
-
{
|
3681 |
-
"epoch": 1.0216914452839385,
|
3682 |
-
"grad_norm": 0.4600801467895508,
|
3683 |
-
"learning_rate": 3.468128855380397e-07,
|
3684 |
-
"loss": 2.0643,
|
3685 |
-
"step": 524
|
3686 |
-
},
|
3687 |
-
{
|
3688 |
-
"epoch": 1.0236412381184499,
|
3689 |
-
"grad_norm": 0.42904648184776306,
|
3690 |
-
"learning_rate": 3.4647018505825907e-07,
|
3691 |
-
"loss": 2.0056,
|
3692 |
-
"step": 525
|
3693 |
-
},
|
3694 |
-
{
|
3695 |
-
"epoch": 1.0255910309529612,
|
3696 |
-
"grad_norm": 0.46431151032447815,
|
3697 |
-
"learning_rate": 3.461274845784784e-07,
|
3698 |
-
"loss": 2.1056,
|
3699 |
-
"step": 526
|
3700 |
-
},
|
3701 |
-
{
|
3702 |
-
"epoch": 1.0275408237874726,
|
3703 |
-
"grad_norm": 0.455836683511734,
|
3704 |
-
"learning_rate": 3.457847840986977e-07,
|
3705 |
-
"loss": 2.0187,
|
3706 |
-
"step": 527
|
3707 |
-
},
|
3708 |
-
{
|
3709 |
-
"epoch": 1.029490616621984,
|
3710 |
-
"grad_norm": 0.4192461669445038,
|
3711 |
-
"learning_rate": 3.45442083618917e-07,
|
3712 |
-
"loss": 2.0832,
|
3713 |
-
"step": 528
|
3714 |
-
},
|
3715 |
-
{
|
3716 |
-
"epoch": 1.0314404094564953,
|
3717 |
-
"grad_norm": 0.4513595402240753,
|
3718 |
-
"learning_rate": 3.450993831391364e-07,
|
3719 |
-
"loss": 2.058,
|
3720 |
-
"step": 529
|
3721 |
-
},
|
3722 |
-
{
|
3723 |
-
"epoch": 1.0333902022910066,
|
3724 |
-
"grad_norm": 0.4370152950286865,
|
3725 |
-
"learning_rate": 3.447566826593557e-07,
|
3726 |
-
"loss": 2.0537,
|
3727 |
-
"step": 530
|
3728 |
-
},
|
3729 |
-
{
|
3730 |
-
"epoch": 1.035339995125518,
|
3731 |
-
"grad_norm": 0.4199161231517792,
|
3732 |
-
"learning_rate": 3.44413982179575e-07,
|
3733 |
-
"loss": 1.9518,
|
3734 |
-
"step": 531
|
3735 |
-
},
|
3736 |
-
{
|
3737 |
-
"epoch": 1.0372897879600294,
|
3738 |
-
"grad_norm": 0.43688762187957764,
|
3739 |
-
"learning_rate": 3.440712816997944e-07,
|
3740 |
-
"loss": 2.0444,
|
3741 |
-
"step": 532
|
3742 |
-
},
|
3743 |
-
{
|
3744 |
-
"epoch": 1.0392395807945405,
|
3745 |
-
"grad_norm": 0.49809253215789795,
|
3746 |
-
"learning_rate": 3.437285812200137e-07,
|
3747 |
-
"loss": 2.0401,
|
3748 |
-
"step": 533
|
3749 |
-
},
|
3750 |
-
{
|
3751 |
-
"epoch": 1.0411893736290518,
|
3752 |
-
"grad_norm": 0.4518781900405884,
|
3753 |
-
"learning_rate": 3.43385880740233e-07,
|
3754 |
-
"loss": 2.0605,
|
3755 |
-
"step": 534
|
3756 |
-
},
|
3757 |
-
{
|
3758 |
-
"epoch": 1.0431391664635632,
|
3759 |
-
"grad_norm": 0.45353132486343384,
|
3760 |
-
"learning_rate": 3.430431802604524e-07,
|
3761 |
-
"loss": 2.0402,
|
3762 |
-
"step": 535
|
3763 |
-
},
|
3764 |
-
{
|
3765 |
-
"epoch": 1.0450889592980745,
|
3766 |
-
"grad_norm": 0.4396359622478485,
|
3767 |
-
"learning_rate": 3.427004797806717e-07,
|
3768 |
-
"loss": 2.0643,
|
3769 |
-
"step": 536
|
3770 |
-
},
|
3771 |
-
{
|
3772 |
-
"epoch": 1.047038752132586,
|
3773 |
-
"grad_norm": 0.4434252083301544,
|
3774 |
-
"learning_rate": 3.42357779300891e-07,
|
3775 |
-
"loss": 2.0188,
|
3776 |
-
"step": 537
|
3777 |
-
},
|
3778 |
-
{
|
3779 |
-
"epoch": 1.0489885449670973,
|
3780 |
-
"grad_norm": 0.4241044819355011,
|
3781 |
-
"learning_rate": 3.420150788211104e-07,
|
3782 |
-
"loss": 1.9556,
|
3783 |
-
"step": 538
|
3784 |
-
},
|
3785 |
-
{
|
3786 |
-
"epoch": 1.0509383378016086,
|
3787 |
-
"grad_norm": 0.4382232129573822,
|
3788 |
-
"learning_rate": 3.4167237834132967e-07,
|
3789 |
-
"loss": 1.9855,
|
3790 |
-
"step": 539
|
3791 |
-
},
|
3792 |
-
{
|
3793 |
-
"epoch": 1.05288813063612,
|
3794 |
-
"grad_norm": 0.4357564151287079,
|
3795 |
-
"learning_rate": 3.4132967786154897e-07,
|
3796 |
-
"loss": 2.0524,
|
3797 |
-
"step": 540
|
3798 |
-
},
|
3799 |
-
{
|
3800 |
-
"epoch": 1.0548379234706313,
|
3801 |
-
"grad_norm": 0.46050140261650085,
|
3802 |
-
"learning_rate": 3.409869773817683e-07,
|
3803 |
-
"loss": 2.0461,
|
3804 |
-
"step": 541
|
3805 |
-
},
|
3806 |
-
{
|
3807 |
-
"epoch": 1.0567877163051427,
|
3808 |
-
"grad_norm": 0.44581982493400574,
|
3809 |
-
"learning_rate": 3.4064427690198766e-07,
|
3810 |
-
"loss": 1.9955,
|
3811 |
-
"step": 542
|
3812 |
-
},
|
3813 |
-
{
|
3814 |
-
"epoch": 1.0587375091396538,
|
3815 |
-
"grad_norm": 0.4502599835395813,
|
3816 |
-
"learning_rate": 3.4030157642220696e-07,
|
3817 |
-
"loss": 2.0864,
|
3818 |
-
"step": 543
|
3819 |
-
},
|
3820 |
-
{
|
3821 |
-
"epoch": 1.0606873019741652,
|
3822 |
-
"grad_norm": 0.44767019152641296,
|
3823 |
-
"learning_rate": 3.399588759424263e-07,
|
3824 |
-
"loss": 2.0447,
|
3825 |
-
"step": 544
|
3826 |
-
},
|
3827 |
-
{
|
3828 |
-
"epoch": 1.0626370948086765,
|
3829 |
-
"grad_norm": 0.44603490829467773,
|
3830 |
-
"learning_rate": 3.3961617546264565e-07,
|
3831 |
-
"loss": 2.0709,
|
3832 |
-
"step": 545
|
3833 |
-
},
|
3834 |
-
{
|
3835 |
-
"epoch": 1.0645868876431879,
|
3836 |
-
"grad_norm": 0.4321264922618866,
|
3837 |
-
"learning_rate": 3.3927347498286495e-07,
|
3838 |
-
"loss": 2.0157,
|
3839 |
-
"step": 546
|
3840 |
-
},
|
3841 |
-
{
|
3842 |
-
"epoch": 1.0665366804776992,
|
3843 |
-
"grad_norm": 0.4479556083679199,
|
3844 |
-
"learning_rate": 3.389307745030843e-07,
|
3845 |
-
"loss": 2.1088,
|
3846 |
-
"step": 547
|
3847 |
-
},
|
3848 |
-
{
|
3849 |
-
"epoch": 1.0684864733122106,
|
3850 |
-
"grad_norm": 0.4292636513710022,
|
3851 |
-
"learning_rate": 3.3858807402330364e-07,
|
3852 |
-
"loss": 2.0847,
|
3853 |
-
"step": 548
|
3854 |
-
},
|
3855 |
-
{
|
3856 |
-
"epoch": 1.070436266146722,
|
3857 |
-
"grad_norm": 0.43631821870803833,
|
3858 |
-
"learning_rate": 3.3824537354352294e-07,
|
3859 |
-
"loss": 2.034,
|
3860 |
-
"step": 549
|
3861 |
-
},
|
3862 |
-
{
|
3863 |
-
"epoch": 1.0723860589812333,
|
3864 |
-
"grad_norm": 0.43201327323913574,
|
3865 |
-
"learning_rate": 3.379026730637423e-07,
|
3866 |
-
"loss": 1.9633,
|
3867 |
-
"step": 550
|
3868 |
-
},
|
3869 |
-
{
|
3870 |
-
"epoch": 1.0743358518157446,
|
3871 |
-
"grad_norm": 0.4389747679233551,
|
3872 |
-
"learning_rate": 3.3755997258396163e-07,
|
3873 |
-
"loss": 2.0331,
|
3874 |
-
"step": 551
|
3875 |
-
},
|
3876 |
-
{
|
3877 |
-
"epoch": 1.076285644650256,
|
3878 |
-
"grad_norm": 0.46588924527168274,
|
3879 |
-
"learning_rate": 3.372172721041809e-07,
|
3880 |
-
"loss": 2.0748,
|
3881 |
-
"step": 552
|
3882 |
-
},
|
3883 |
-
{
|
3884 |
-
"epoch": 1.0782354374847674,
|
3885 |
-
"grad_norm": 0.45190852880477905,
|
3886 |
-
"learning_rate": 3.3687457162440027e-07,
|
3887 |
-
"loss": 1.9639,
|
3888 |
-
"step": 553
|
3889 |
-
},
|
3890 |
-
{
|
3891 |
-
"epoch": 1.0801852303192785,
|
3892 |
-
"grad_norm": 0.4458979070186615,
|
3893 |
-
"learning_rate": 3.3653187114461957e-07,
|
3894 |
-
"loss": 2.1124,
|
3895 |
-
"step": 554
|
3896 |
-
},
|
3897 |
-
{
|
3898 |
-
"epoch": 1.0821350231537898,
|
3899 |
-
"grad_norm": 0.40400832891464233,
|
3900 |
-
"learning_rate": 3.361891706648389e-07,
|
3901 |
-
"loss": 1.9776,
|
3902 |
-
"step": 555
|
3903 |
-
},
|
3904 |
-
{
|
3905 |
-
"epoch": 1.0840848159883012,
|
3906 |
-
"grad_norm": 0.4538462460041046,
|
3907 |
-
"learning_rate": 3.3584647018505826e-07,
|
3908 |
-
"loss": 1.9962,
|
3909 |
-
"step": 556
|
3910 |
-
},
|
3911 |
-
{
|
3912 |
-
"epoch": 1.0860346088228126,
|
3913 |
-
"grad_norm": 0.44181132316589355,
|
3914 |
-
"learning_rate": 3.3550376970527756e-07,
|
3915 |
-
"loss": 2.0973,
|
3916 |
-
"step": 557
|
3917 |
-
},
|
3918 |
-
{
|
3919 |
-
"epoch": 1.087984401657324,
|
3920 |
-
"grad_norm": 0.43516308069229126,
|
3921 |
-
"learning_rate": 3.351610692254969e-07,
|
3922 |
-
"loss": 1.9923,
|
3923 |
-
"step": 558
|
3924 |
-
},
|
3925 |
-
{
|
3926 |
-
"epoch": 1.0899341944918353,
|
3927 |
-
"grad_norm": 0.4485546052455902,
|
3928 |
-
"learning_rate": 3.3481836874571625e-07,
|
3929 |
-
"loss": 2.0242,
|
3930 |
-
"step": 559
|
3931 |
-
},
|
3932 |
-
{
|
3933 |
-
"epoch": 1.0918839873263466,
|
3934 |
-
"grad_norm": 0.45358070731163025,
|
3935 |
-
"learning_rate": 3.3447566826593555e-07,
|
3936 |
-
"loss": 2.0603,
|
3937 |
-
"step": 560
|
3938 |
-
},
|
3939 |
-
{
|
3940 |
-
"epoch": 1.093833780160858,
|
3941 |
-
"grad_norm": 0.43879690766334534,
|
3942 |
-
"learning_rate": 3.341329677861549e-07,
|
3943 |
-
"loss": 1.9869,
|
3944 |
-
"step": 561
|
3945 |
-
},
|
3946 |
-
{
|
3947 |
-
"epoch": 1.0957835729953693,
|
3948 |
-
"grad_norm": 0.4376320242881775,
|
3949 |
-
"learning_rate": 3.3379026730637424e-07,
|
3950 |
-
"loss": 2.0447,
|
3951 |
-
"step": 562
|
3952 |
-
},
|
3953 |
-
{
|
3954 |
-
"epoch": 1.0977333658298805,
|
3955 |
-
"grad_norm": 0.4591986835002899,
|
3956 |
-
"learning_rate": 3.3344756682659354e-07,
|
3957 |
-
"loss": 2.0188,
|
3958 |
-
"step": 563
|
3959 |
-
},
|
3960 |
-
{
|
3961 |
-
"epoch": 1.0996831586643918,
|
3962 |
-
"grad_norm": 0.4306589961051941,
|
3963 |
-
"learning_rate": 3.331048663468129e-07,
|
3964 |
-
"loss": 2.0223,
|
3965 |
-
"step": 564
|
3966 |
-
},
|
3967 |
-
{
|
3968 |
-
"epoch": 1.1016329514989032,
|
3969 |
-
"grad_norm": 0.43692710995674133,
|
3970 |
-
"learning_rate": 3.3276216586703223e-07,
|
3971 |
-
"loss": 2.0507,
|
3972 |
-
"step": 565
|
3973 |
-
},
|
3974 |
-
{
|
3975 |
-
"epoch": 1.1035827443334145,
|
3976 |
-
"grad_norm": 0.4663935601711273,
|
3977 |
-
"learning_rate": 3.324194653872515e-07,
|
3978 |
-
"loss": 2.0444,
|
3979 |
-
"step": 566
|
3980 |
-
},
|
3981 |
-
{
|
3982 |
-
"epoch": 1.1055325371679259,
|
3983 |
-
"grad_norm": 0.45090562105178833,
|
3984 |
-
"learning_rate": 3.320767649074708e-07,
|
3985 |
-
"loss": 1.9944,
|
3986 |
-
"step": 567
|
3987 |
-
},
|
3988 |
-
{
|
3989 |
-
"epoch": 1.1074823300024372,
|
3990 |
-
"grad_norm": 0.4450632631778717,
|
3991 |
-
"learning_rate": 3.317340644276902e-07,
|
3992 |
-
"loss": 2.0264,
|
3993 |
-
"step": 568
|
3994 |
-
},
|
3995 |
-
{
|
3996 |
-
"epoch": 1.1094321228369486,
|
3997 |
-
"grad_norm": 0.45126745104789734,
|
3998 |
-
"learning_rate": 3.313913639479095e-07,
|
3999 |
-
"loss": 2.081,
|
4000 |
-
"step": 569
|
4001 |
-
},
|
4002 |
-
{
|
4003 |
-
"epoch": 1.11138191567146,
|
4004 |
-
"grad_norm": 0.44254472851753235,
|
4005 |
-
"learning_rate": 3.310486634681288e-07,
|
4006 |
-
"loss": 2.0223,
|
4007 |
-
"step": 570
|
4008 |
-
},
|
4009 |
-
{
|
4010 |
-
"epoch": 1.1133317085059713,
|
4011 |
-
"grad_norm": 0.43211621046066284,
|
4012 |
-
"learning_rate": 3.307059629883482e-07,
|
4013 |
-
"loss": 2.0363,
|
4014 |
-
"step": 571
|
4015 |
-
},
|
4016 |
-
{
|
4017 |
-
"epoch": 1.1152815013404827,
|
4018 |
-
"grad_norm": 0.4256265163421631,
|
4019 |
-
"learning_rate": 3.303632625085675e-07,
|
4020 |
-
"loss": 2.0363,
|
4021 |
-
"step": 572
|
4022 |
-
},
|
4023 |
-
{
|
4024 |
-
"epoch": 1.117231294174994,
|
4025 |
-
"grad_norm": 0.4462417960166931,
|
4026 |
-
"learning_rate": 3.300205620287868e-07,
|
4027 |
-
"loss": 2.0394,
|
4028 |
-
"step": 573
|
4029 |
-
},
|
4030 |
-
{
|
4031 |
-
"epoch": 1.1191810870095051,
|
4032 |
-
"grad_norm": 0.4583437442779541,
|
4033 |
-
"learning_rate": 3.296778615490062e-07,
|
4034 |
-
"loss": 2.0878,
|
4035 |
-
"step": 574
|
4036 |
-
},
|
4037 |
-
{
|
4038 |
-
"epoch": 1.1211308798440165,
|
4039 |
-
"grad_norm": 0.4595088064670563,
|
4040 |
-
"learning_rate": 3.293351610692255e-07,
|
4041 |
-
"loss": 2.111,
|
4042 |
-
"step": 575
|
4043 |
-
},
|
4044 |
-
{
|
4045 |
-
"epoch": 1.1230806726785278,
|
4046 |
-
"grad_norm": 0.4117080569267273,
|
4047 |
-
"learning_rate": 3.289924605894448e-07,
|
4048 |
-
"loss": 1.999,
|
4049 |
-
"step": 576
|
4050 |
-
},
|
4051 |
-
{
|
4052 |
-
"epoch": 1.1250304655130392,
|
4053 |
-
"grad_norm": 0.4381641149520874,
|
4054 |
-
"learning_rate": 3.286497601096642e-07,
|
4055 |
-
"loss": 2.044,
|
4056 |
-
"step": 577
|
4057 |
-
},
|
4058 |
-
{
|
4059 |
-
"epoch": 1.1269802583475506,
|
4060 |
-
"grad_norm": 0.43854039907455444,
|
4061 |
-
"learning_rate": 3.283070596298835e-07,
|
4062 |
-
"loss": 2.0272,
|
4063 |
-
"step": 578
|
4064 |
-
},
|
4065 |
-
{
|
4066 |
-
"epoch": 1.128930051182062,
|
4067 |
-
"grad_norm": 0.4721965789794922,
|
4068 |
-
"learning_rate": 3.279643591501028e-07,
|
4069 |
-
"loss": 2.0697,
|
4070 |
-
"step": 579
|
4071 |
-
},
|
4072 |
-
{
|
4073 |
-
"epoch": 1.1308798440165733,
|
4074 |
-
"grad_norm": 0.4373783767223358,
|
4075 |
-
"learning_rate": 3.2762165867032207e-07,
|
4076 |
-
"loss": 2.0102,
|
4077 |
-
"step": 580
|
4078 |
-
},
|
4079 |
-
{
|
4080 |
-
"epoch": 1.1328296368510846,
|
4081 |
-
"grad_norm": 0.4286502003669739,
|
4082 |
-
"learning_rate": 3.2727895819054147e-07,
|
4083 |
-
"loss": 1.9695,
|
4084 |
-
"step": 581
|
4085 |
-
},
|
4086 |
-
{
|
4087 |
-
"epoch": 1.134779429685596,
|
4088 |
-
"grad_norm": 0.4373305141925812,
|
4089 |
-
"learning_rate": 3.2693625771076077e-07,
|
4090 |
-
"loss": 1.9823,
|
4091 |
-
"step": 582
|
4092 |
-
},
|
4093 |
-
{
|
4094 |
-
"epoch": 1.1367292225201073,
|
4095 |
-
"grad_norm": 0.4659106433391571,
|
4096 |
-
"learning_rate": 3.2659355723098006e-07,
|
4097 |
-
"loss": 2.081,
|
4098 |
-
"step": 583
|
4099 |
-
},
|
4100 |
-
{
|
4101 |
-
"epoch": 1.1386790153546187,
|
4102 |
-
"grad_norm": 0.4315546154975891,
|
4103 |
-
"learning_rate": 3.2625085675119946e-07,
|
4104 |
-
"loss": 2.0336,
|
4105 |
-
"step": 584
|
4106 |
-
},
|
4107 |
-
{
|
4108 |
-
"epoch": 1.1406288081891298,
|
4109 |
-
"grad_norm": 0.4512901306152344,
|
4110 |
-
"learning_rate": 3.2590815627141876e-07,
|
4111 |
-
"loss": 2.0642,
|
4112 |
-
"step": 585
|
4113 |
-
},
|
4114 |
-
{
|
4115 |
-
"epoch": 1.1425786010236412,
|
4116 |
-
"grad_norm": 0.4398232400417328,
|
4117 |
-
"learning_rate": 3.2556545579163805e-07,
|
4118 |
-
"loss": 2.0401,
|
4119 |
-
"step": 586
|
4120 |
-
},
|
4121 |
-
{
|
4122 |
-
"epoch": 1.1445283938581525,
|
4123 |
-
"grad_norm": 0.45262405276298523,
|
4124 |
-
"learning_rate": 3.2522275531185745e-07,
|
4125 |
-
"loss": 2.0999,
|
4126 |
-
"step": 587
|
4127 |
-
},
|
4128 |
-
{
|
4129 |
-
"epoch": 1.1464781866926639,
|
4130 |
-
"grad_norm": 0.4210640490055084,
|
4131 |
-
"learning_rate": 3.2488005483207675e-07,
|
4132 |
-
"loss": 1.992,
|
4133 |
-
"step": 588
|
4134 |
-
},
|
4135 |
-
{
|
4136 |
-
"epoch": 1.1484279795271752,
|
4137 |
-
"grad_norm": 0.4530121386051178,
|
4138 |
-
"learning_rate": 3.2453735435229604e-07,
|
4139 |
-
"loss": 2.0119,
|
4140 |
-
"step": 589
|
4141 |
-
},
|
4142 |
-
{
|
4143 |
-
"epoch": 1.1503777723616866,
|
4144 |
-
"grad_norm": 0.43637722730636597,
|
4145 |
-
"learning_rate": 3.2419465387251544e-07,
|
4146 |
-
"loss": 2.0022,
|
4147 |
-
"step": 590
|
4148 |
-
},
|
4149 |
-
{
|
4150 |
-
"epoch": 1.152327565196198,
|
4151 |
-
"grad_norm": 0.46872228384017944,
|
4152 |
-
"learning_rate": 3.2385195339273474e-07,
|
4153 |
-
"loss": 2.0545,
|
4154 |
-
"step": 591
|
4155 |
-
},
|
4156 |
-
{
|
4157 |
-
"epoch": 1.1542773580307093,
|
4158 |
-
"grad_norm": 0.45964333415031433,
|
4159 |
-
"learning_rate": 3.2350925291295403e-07,
|
4160 |
-
"loss": 2.0313,
|
4161 |
-
"step": 592
|
4162 |
-
},
|
4163 |
-
{
|
4164 |
-
"epoch": 1.1562271508652207,
|
4165 |
-
"grad_norm": 0.4444529414176941,
|
4166 |
-
"learning_rate": 3.231665524331734e-07,
|
4167 |
-
"loss": 2.0463,
|
4168 |
-
"step": 593
|
4169 |
-
},
|
4170 |
-
{
|
4171 |
-
"epoch": 1.1581769436997318,
|
4172 |
-
"grad_norm": 0.4702310264110565,
|
4173 |
-
"learning_rate": 3.228238519533927e-07,
|
4174 |
-
"loss": 2.0055,
|
4175 |
-
"step": 594
|
4176 |
-
},
|
4177 |
-
{
|
4178 |
-
"epoch": 1.1601267365342431,
|
4179 |
-
"grad_norm": 0.4435891807079315,
|
4180 |
-
"learning_rate": 3.22481151473612e-07,
|
4181 |
-
"loss": 2.1027,
|
4182 |
-
"step": 595
|
4183 |
-
},
|
4184 |
-
{
|
4185 |
-
"epoch": 1.1620765293687545,
|
4186 |
-
"grad_norm": 0.4557732343673706,
|
4187 |
-
"learning_rate": 3.2213845099383137e-07,
|
4188 |
-
"loss": 2.0307,
|
4189 |
-
"step": 596
|
4190 |
-
},
|
4191 |
-
{
|
4192 |
-
"epoch": 1.1640263222032659,
|
4193 |
-
"grad_norm": 0.4286348819732666,
|
4194 |
-
"learning_rate": 3.217957505140507e-07,
|
4195 |
-
"loss": 2.0196,
|
4196 |
-
"step": 597
|
4197 |
-
},
|
4198 |
-
{
|
4199 |
-
"epoch": 1.1659761150377772,
|
4200 |
-
"grad_norm": 0.4475346803665161,
|
4201 |
-
"learning_rate": 3.2145305003427e-07,
|
4202 |
-
"loss": 2.1014,
|
4203 |
-
"step": 598
|
4204 |
-
},
|
4205 |
-
{
|
4206 |
-
"epoch": 1.1679259078722886,
|
4207 |
-
"grad_norm": 0.418293297290802,
|
4208 |
-
"learning_rate": 3.2111034955448936e-07,
|
4209 |
-
"loss": 2.078,
|
4210 |
-
"step": 599
|
4211 |
-
},
|
4212 |
-
{
|
4213 |
-
"epoch": 1.1698757007068,
|
4214 |
-
"grad_norm": 0.42740973830223083,
|
4215 |
-
"learning_rate": 3.207676490747087e-07,
|
4216 |
-
"loss": 1.9695,
|
4217 |
-
"step": 600
|
4218 |
-
},
|
4219 |
-
{
|
4220 |
-
"epoch": 1.1718254935413113,
|
4221 |
-
"grad_norm": 0.41325512528419495,
|
4222 |
-
"learning_rate": 3.20424948594928e-07,
|
4223 |
-
"loss": 2.0297,
|
4224 |
-
"step": 601
|
4225 |
-
},
|
4226 |
-
{
|
4227 |
-
"epoch": 1.1737752863758226,
|
4228 |
-
"grad_norm": 0.4326270818710327,
|
4229 |
-
"learning_rate": 3.2008224811514735e-07,
|
4230 |
-
"loss": 2.0059,
|
4231 |
-
"step": 602
|
4232 |
-
},
|
4233 |
-
{
|
4234 |
-
"epoch": 1.175725079210334,
|
4235 |
-
"grad_norm": 0.44774889945983887,
|
4236 |
-
"learning_rate": 3.197395476353667e-07,
|
4237 |
-
"loss": 2.0427,
|
4238 |
-
"step": 603
|
4239 |
-
},
|
4240 |
-
{
|
4241 |
-
"epoch": 1.1776748720448453,
|
4242 |
-
"grad_norm": 0.446158766746521,
|
4243 |
-
"learning_rate": 3.19396847155586e-07,
|
4244 |
-
"loss": 2.0748,
|
4245 |
-
"step": 604
|
4246 |
-
},
|
4247 |
-
{
|
4248 |
-
"epoch": 1.1796246648793565,
|
4249 |
-
"grad_norm": 0.4654727280139923,
|
4250 |
-
"learning_rate": 3.1905414667580534e-07,
|
4251 |
-
"loss": 1.9297,
|
4252 |
-
"step": 605
|
4253 |
-
},
|
4254 |
-
{
|
4255 |
-
"epoch": 1.1815744577138678,
|
4256 |
-
"grad_norm": 0.45213672518730164,
|
4257 |
-
"learning_rate": 3.1871144619602463e-07,
|
4258 |
-
"loss": 2.1087,
|
4259 |
-
"step": 606
|
4260 |
-
},
|
4261 |
-
{
|
4262 |
-
"epoch": 1.1835242505483792,
|
4263 |
-
"grad_norm": 0.45228397846221924,
|
4264 |
-
"learning_rate": 3.18368745716244e-07,
|
4265 |
-
"loss": 2.0961,
|
4266 |
-
"step": 607
|
4267 |
-
},
|
4268 |
-
{
|
4269 |
-
"epoch": 1.1854740433828905,
|
4270 |
-
"grad_norm": 0.4470541477203369,
|
4271 |
-
"learning_rate": 3.180260452364633e-07,
|
4272 |
-
"loss": 2.0073,
|
4273 |
-
"step": 608
|
4274 |
-
},
|
4275 |
-
{
|
4276 |
-
"epoch": 1.1874238362174019,
|
4277 |
-
"grad_norm": 0.4324132204055786,
|
4278 |
-
"learning_rate": 3.176833447566826e-07,
|
4279 |
-
"loss": 2.0334,
|
4280 |
-
"step": 609
|
4281 |
-
},
|
4282 |
-
{
|
4283 |
-
"epoch": 1.1893736290519132,
|
4284 |
-
"grad_norm": 0.47044241428375244,
|
4285 |
-
"learning_rate": 3.1734064427690197e-07,
|
4286 |
-
"loss": 2.1086,
|
4287 |
-
"step": 610
|
4288 |
-
},
|
4289 |
-
{
|
4290 |
-
"epoch": 1.1913234218864246,
|
4291 |
-
"grad_norm": 0.43018707633018494,
|
4292 |
-
"learning_rate": 3.169979437971213e-07,
|
4293 |
-
"loss": 2.0289,
|
4294 |
-
"step": 611
|
4295 |
-
},
|
4296 |
-
{
|
4297 |
-
"epoch": 1.193273214720936,
|
4298 |
-
"grad_norm": 0.44133853912353516,
|
4299 |
-
"learning_rate": 3.166552433173406e-07,
|
4300 |
-
"loss": 2.0333,
|
4301 |
-
"step": 612
|
4302 |
-
},
|
4303 |
-
{
|
4304 |
-
"epoch": 1.1952230075554473,
|
4305 |
-
"grad_norm": 0.45557719469070435,
|
4306 |
-
"learning_rate": 3.1631254283755996e-07,
|
4307 |
-
"loss": 2.0399,
|
4308 |
-
"step": 613
|
4309 |
-
},
|
4310 |
-
{
|
4311 |
-
"epoch": 1.1971728003899587,
|
4312 |
-
"grad_norm": 0.4350452125072479,
|
4313 |
-
"learning_rate": 3.159698423577793e-07,
|
4314 |
-
"loss": 2.0224,
|
4315 |
-
"step": 614
|
4316 |
-
},
|
4317 |
-
{
|
4318 |
-
"epoch": 1.19912259322447,
|
4319 |
-
"grad_norm": 0.4687999188899994,
|
4320 |
-
"learning_rate": 3.156271418779986e-07,
|
4321 |
-
"loss": 2.0228,
|
4322 |
-
"step": 615
|
4323 |
-
},
|
4324 |
-
{
|
4325 |
-
"epoch": 1.2010723860589811,
|
4326 |
-
"grad_norm": 0.43684178590774536,
|
4327 |
-
"learning_rate": 3.1528444139821795e-07,
|
4328 |
-
"loss": 2.0776,
|
4329 |
-
"step": 616
|
4330 |
-
},
|
4331 |
-
{
|
4332 |
-
"epoch": 1.2030221788934925,
|
4333 |
-
"grad_norm": 0.45561161637306213,
|
4334 |
-
"learning_rate": 3.149417409184373e-07,
|
4335 |
-
"loss": 2.0022,
|
4336 |
-
"step": 617
|
4337 |
-
},
|
4338 |
-
{
|
4339 |
-
"epoch": 1.2049719717280039,
|
4340 |
-
"grad_norm": 0.4689810276031494,
|
4341 |
-
"learning_rate": 3.145990404386566e-07,
|
4342 |
-
"loss": 2.0173,
|
4343 |
-
"step": 618
|
4344 |
-
},
|
4345 |
-
{
|
4346 |
-
"epoch": 1.2069217645625152,
|
4347 |
-
"grad_norm": 0.4293496310710907,
|
4348 |
-
"learning_rate": 3.142563399588759e-07,
|
4349 |
-
"loss": 1.9824,
|
4350 |
-
"step": 619
|
4351 |
-
},
|
4352 |
-
{
|
4353 |
-
"epoch": 1.2088715573970266,
|
4354 |
-
"grad_norm": 0.4662802219390869,
|
4355 |
-
"learning_rate": 3.139136394790953e-07,
|
4356 |
-
"loss": 2.0784,
|
4357 |
-
"step": 620
|
4358 |
-
},
|
4359 |
-
{
|
4360 |
-
"epoch": 1.210821350231538,
|
4361 |
-
"grad_norm": 0.45310187339782715,
|
4362 |
-
"learning_rate": 3.135709389993146e-07,
|
4363 |
-
"loss": 1.9844,
|
4364 |
-
"step": 621
|
4365 |
-
},
|
4366 |
-
{
|
4367 |
-
"epoch": 1.2127711430660493,
|
4368 |
-
"grad_norm": 0.4419795870780945,
|
4369 |
-
"learning_rate": 3.1322823851953387e-07,
|
4370 |
-
"loss": 2.0515,
|
4371 |
-
"step": 622
|
4372 |
-
},
|
4373 |
-
{
|
4374 |
-
"epoch": 1.2147209359005606,
|
4375 |
-
"grad_norm": 0.4516865611076355,
|
4376 |
-
"learning_rate": 3.1288553803975327e-07,
|
4377 |
-
"loss": 2.0879,
|
4378 |
-
"step": 623
|
4379 |
-
},
|
4380 |
-
{
|
4381 |
-
"epoch": 1.216670728735072,
|
4382 |
-
"grad_norm": 0.46178489923477173,
|
4383 |
-
"learning_rate": 3.1254283755997257e-07,
|
4384 |
-
"loss": 2.0498,
|
4385 |
-
"step": 624
|
4386 |
-
},
|
4387 |
-
{
|
4388 |
-
"epoch": 1.2186205215695831,
|
4389 |
-
"grad_norm": 0.4678952097892761,
|
4390 |
-
"learning_rate": 3.1220013708019186e-07,
|
4391 |
-
"loss": 2.0408,
|
4392 |
-
"step": 625
|
4393 |
-
},
|
4394 |
-
{
|
4395 |
-
"epoch": 1.2205703144040945,
|
4396 |
-
"grad_norm": 0.4456236660480499,
|
4397 |
-
"learning_rate": 3.1185743660041126e-07,
|
4398 |
-
"loss": 1.9694,
|
4399 |
-
"step": 626
|
4400 |
-
},
|
4401 |
-
{
|
4402 |
-
"epoch": 1.2225201072386058,
|
4403 |
-
"grad_norm": 0.4397581219673157,
|
4404 |
-
"learning_rate": 3.1151473612063056e-07,
|
4405 |
-
"loss": 2.0048,
|
4406 |
-
"step": 627
|
4407 |
-
},
|
4408 |
-
{
|
4409 |
-
"epoch": 1.2244699000731172,
|
4410 |
-
"grad_norm": 0.4338027238845825,
|
4411 |
-
"learning_rate": 3.1117203564084985e-07,
|
4412 |
-
"loss": 2.0194,
|
4413 |
-
"step": 628
|
4414 |
-
},
|
4415 |
-
{
|
4416 |
-
"epoch": 1.2264196929076285,
|
4417 |
-
"grad_norm": 0.4413823187351227,
|
4418 |
-
"learning_rate": 3.108293351610692e-07,
|
4419 |
-
"loss": 2.025,
|
4420 |
-
"step": 629
|
4421 |
-
},
|
4422 |
-
{
|
4423 |
-
"epoch": 1.2283694857421399,
|
4424 |
-
"grad_norm": 0.43685299158096313,
|
4425 |
-
"learning_rate": 3.1048663468128855e-07,
|
4426 |
-
"loss": 2.0051,
|
4427 |
-
"step": 630
|
4428 |
-
},
|
4429 |
-
{
|
4430 |
-
"epoch": 1.2303192785766512,
|
4431 |
-
"grad_norm": 0.4644426107406616,
|
4432 |
-
"learning_rate": 3.1014393420150784e-07,
|
4433 |
-
"loss": 2.0313,
|
4434 |
-
"step": 631
|
4435 |
-
},
|
4436 |
-
{
|
4437 |
-
"epoch": 1.2322690714111626,
|
4438 |
-
"grad_norm": 0.4478755593299866,
|
4439 |
-
"learning_rate": 3.098012337217272e-07,
|
4440 |
-
"loss": 1.9669,
|
4441 |
-
"step": 632
|
4442 |
-
},
|
4443 |
-
{
|
4444 |
-
"epoch": 1.234218864245674,
|
4445 |
-
"grad_norm": 0.43452218174934387,
|
4446 |
-
"learning_rate": 3.0945853324194654e-07,
|
4447 |
-
"loss": 1.9927,
|
4448 |
-
"step": 633
|
4449 |
-
},
|
4450 |
-
{
|
4451 |
-
"epoch": 1.2361686570801853,
|
4452 |
-
"grad_norm": 0.4408141076564789,
|
4453 |
-
"learning_rate": 3.0911583276216583e-07,
|
4454 |
-
"loss": 2.136,
|
4455 |
-
"step": 634
|
4456 |
-
},
|
4457 |
-
{
|
4458 |
-
"epoch": 1.2381184499146967,
|
4459 |
-
"grad_norm": 0.42754924297332764,
|
4460 |
-
"learning_rate": 3.087731322823852e-07,
|
4461 |
-
"loss": 2.0247,
|
4462 |
-
"step": 635
|
4463 |
-
},
|
4464 |
-
{
|
4465 |
-
"epoch": 1.2400682427492078,
|
4466 |
-
"grad_norm": 0.4387798607349396,
|
4467 |
-
"learning_rate": 3.084304318026045e-07,
|
4468 |
-
"loss": 1.9643,
|
4469 |
-
"step": 636
|
4470 |
-
},
|
4471 |
-
{
|
4472 |
-
"epoch": 1.2420180355837191,
|
4473 |
-
"grad_norm": 0.46978920698165894,
|
4474 |
-
"learning_rate": 3.080877313228238e-07,
|
4475 |
-
"loss": 2.0776,
|
4476 |
-
"step": 637
|
4477 |
-
},
|
4478 |
-
{
|
4479 |
-
"epoch": 1.2439678284182305,
|
4480 |
-
"grad_norm": 0.41821563243865967,
|
4481 |
-
"learning_rate": 3.0774503084304317e-07,
|
4482 |
-
"loss": 2.0355,
|
4483 |
-
"step": 638
|
4484 |
-
},
|
4485 |
-
{
|
4486 |
-
"epoch": 1.2459176212527419,
|
4487 |
-
"grad_norm": 0.4664837419986725,
|
4488 |
-
"learning_rate": 3.074023303632625e-07,
|
4489 |
-
"loss": 2.0328,
|
4490 |
-
"step": 639
|
4491 |
-
},
|
4492 |
-
{
|
4493 |
-
"epoch": 1.2478674140872532,
|
4494 |
-
"grad_norm": 0.4467378258705139,
|
4495 |
-
"learning_rate": 3.070596298834818e-07,
|
4496 |
-
"loss": 2.0058,
|
4497 |
-
"step": 640
|
4498 |
-
},
|
4499 |
-
{
|
4500 |
-
"epoch": 1.2498172069217646,
|
4501 |
-
"grad_norm": 0.442058265209198,
|
4502 |
-
"learning_rate": 3.0671692940370116e-07,
|
4503 |
-
"loss": 2.0565,
|
4504 |
-
"step": 641
|
4505 |
-
},
|
4506 |
-
{
|
4507 |
-
"epoch": 1.251766999756276,
|
4508 |
-
"grad_norm": 0.4655166268348694,
|
4509 |
-
"learning_rate": 3.0637422892392045e-07,
|
4510 |
-
"loss": 2.0628,
|
4511 |
-
"step": 642
|
4512 |
-
},
|
4513 |
-
{
|
4514 |
-
"epoch": 1.2537167925907873,
|
4515 |
-
"grad_norm": 0.4388466477394104,
|
4516 |
-
"learning_rate": 3.060315284441398e-07,
|
4517 |
-
"loss": 2.0716,
|
4518 |
-
"step": 643
|
4519 |
-
},
|
4520 |
-
{
|
4521 |
-
"epoch": 1.2556665854252986,
|
4522 |
-
"grad_norm": 0.48705416917800903,
|
4523 |
-
"learning_rate": 3.0568882796435915e-07,
|
4524 |
-
"loss": 1.9872,
|
4525 |
-
"step": 644
|
4526 |
-
},
|
4527 |
-
{
|
4528 |
-
"epoch": 1.2576163782598098,
|
4529 |
-
"grad_norm": 0.4618842899799347,
|
4530 |
-
"learning_rate": 3.0534612748457844e-07,
|
4531 |
-
"loss": 2.0306,
|
4532 |
-
"step": 645
|
4533 |
-
},
|
4534 |
-
{
|
4535 |
-
"epoch": 1.2595661710943213,
|
4536 |
-
"grad_norm": 0.46533843874931335,
|
4537 |
-
"learning_rate": 3.050034270047978e-07,
|
4538 |
-
"loss": 2.0827,
|
4539 |
-
"step": 646
|
4540 |
-
},
|
4541 |
-
{
|
4542 |
-
"epoch": 1.2615159639288325,
|
4543 |
-
"grad_norm": 0.4898700714111328,
|
4544 |
-
"learning_rate": 3.0466072652501714e-07,
|
4545 |
-
"loss": 1.9585,
|
4546 |
-
"step": 647
|
4547 |
-
},
|
4548 |
-
{
|
4549 |
-
"epoch": 1.2634657567633438,
|
4550 |
-
"grad_norm": 0.4561532735824585,
|
4551 |
-
"learning_rate": 3.0431802604523643e-07,
|
4552 |
-
"loss": 2.0689,
|
4553 |
-
"step": 648
|
4554 |
-
},
|
4555 |
-
{
|
4556 |
-
"epoch": 1.2654155495978552,
|
4557 |
-
"grad_norm": 0.4628736078739166,
|
4558 |
-
"learning_rate": 3.039753255654558e-07,
|
4559 |
-
"loss": 2.0307,
|
4560 |
-
"step": 649
|
4561 |
-
},
|
4562 |
-
{
|
4563 |
-
"epoch": 1.2673653424323665,
|
4564 |
-
"grad_norm": 0.4475798010826111,
|
4565 |
-
"learning_rate": 3.036326250856751e-07,
|
4566 |
-
"loss": 2.0372,
|
4567 |
-
"step": 650
|
4568 |
-
},
|
4569 |
-
{
|
4570 |
-
"epoch": 1.269315135266878,
|
4571 |
-
"grad_norm": 0.44448035955429077,
|
4572 |
-
"learning_rate": 3.032899246058944e-07,
|
4573 |
-
"loss": 2.0334,
|
4574 |
-
"step": 651
|
4575 |
-
},
|
4576 |
-
{
|
4577 |
-
"epoch": 1.2712649281013892,
|
4578 |
-
"grad_norm": 0.4554859697818756,
|
4579 |
-
"learning_rate": 3.0294722412611377e-07,
|
4580 |
-
"loss": 2.0487,
|
4581 |
-
"step": 652
|
4582 |
-
},
|
4583 |
-
{
|
4584 |
-
"epoch": 1.2732147209359006,
|
4585 |
-
"grad_norm": 0.44150403141975403,
|
4586 |
-
"learning_rate": 3.026045236463331e-07,
|
4587 |
-
"loss": 2.085,
|
4588 |
-
"step": 653
|
4589 |
-
},
|
4590 |
-
{
|
4591 |
-
"epoch": 1.275164513770412,
|
4592 |
-
"grad_norm": 0.4476960301399231,
|
4593 |
-
"learning_rate": 3.022618231665524e-07,
|
4594 |
-
"loss": 1.9762,
|
4595 |
-
"step": 654
|
4596 |
-
},
|
4597 |
-
{
|
4598 |
-
"epoch": 1.2771143066049233,
|
4599 |
-
"grad_norm": 0.4773290753364563,
|
4600 |
-
"learning_rate": 3.019191226867717e-07,
|
4601 |
-
"loss": 2.0565,
|
4602 |
-
"step": 655
|
4603 |
-
},
|
4604 |
-
{
|
4605 |
-
"epoch": 1.2790640994394344,
|
4606 |
-
"grad_norm": 0.43788987398147583,
|
4607 |
-
"learning_rate": 3.015764222069911e-07,
|
4608 |
-
"loss": 2.0629,
|
4609 |
-
"step": 656
|
4610 |
-
},
|
4611 |
-
{
|
4612 |
-
"epoch": 1.281013892273946,
|
4613 |
-
"grad_norm": 0.4314157962799072,
|
4614 |
-
"learning_rate": 3.012337217272104e-07,
|
4615 |
-
"loss": 2.0554,
|
4616 |
-
"step": 657
|
4617 |
-
},
|
4618 |
-
{
|
4619 |
-
"epoch": 1.2829636851084572,
|
4620 |
-
"grad_norm": 0.45381680130958557,
|
4621 |
-
"learning_rate": 3.008910212474297e-07,
|
4622 |
-
"loss": 2.0514,
|
4623 |
-
"step": 658
|
4624 |
-
},
|
4625 |
-
{
|
4626 |
-
"epoch": 1.2849134779429685,
|
4627 |
-
"grad_norm": 0.47213441133499146,
|
4628 |
-
"learning_rate": 3.005483207676491e-07,
|
4629 |
-
"loss": 2.0267,
|
4630 |
-
"step": 659
|
4631 |
-
},
|
4632 |
-
{
|
4633 |
-
"epoch": 1.2868632707774799,
|
4634 |
-
"grad_norm": 0.4460486173629761,
|
4635 |
-
"learning_rate": 3.002056202878684e-07,
|
4636 |
-
"loss": 2.0717,
|
4637 |
-
"step": 660
|
4638 |
-
},
|
4639 |
-
{
|
4640 |
-
"epoch": 1.2888130636119912,
|
4641 |
-
"grad_norm": 0.452747642993927,
|
4642 |
-
"learning_rate": 2.998629198080877e-07,
|
4643 |
-
"loss": 2.0634,
|
4644 |
-
"step": 661
|
4645 |
-
},
|
4646 |
-
{
|
4647 |
-
"epoch": 1.2907628564465026,
|
4648 |
-
"grad_norm": 0.4495120942592621,
|
4649 |
-
"learning_rate": 2.995202193283071e-07,
|
4650 |
-
"loss": 2.042,
|
4651 |
-
"step": 662
|
4652 |
-
},
|
4653 |
-
{
|
4654 |
-
"epoch": 1.292712649281014,
|
4655 |
-
"grad_norm": 0.433224081993103,
|
4656 |
-
"learning_rate": 2.991775188485264e-07,
|
4657 |
-
"loss": 2.0565,
|
4658 |
-
"step": 663
|
4659 |
-
},
|
4660 |
-
{
|
4661 |
-
"epoch": 1.2946624421155253,
|
4662 |
-
"grad_norm": 0.4596520960330963,
|
4663 |
-
"learning_rate": 2.9883481836874567e-07,
|
4664 |
-
"loss": 2.0272,
|
4665 |
-
"step": 664
|
4666 |
-
},
|
4667 |
-
{
|
4668 |
-
"epoch": 1.2966122349500366,
|
4669 |
-
"grad_norm": 0.433887243270874,
|
4670 |
-
"learning_rate": 2.9849211788896507e-07,
|
4671 |
-
"loss": 1.965,
|
4672 |
-
"step": 665
|
4673 |
-
},
|
4674 |
-
{
|
4675 |
-
"epoch": 1.298562027784548,
|
4676 |
-
"grad_norm": 0.44755810499191284,
|
4677 |
-
"learning_rate": 2.9814941740918437e-07,
|
4678 |
-
"loss": 1.9915,
|
4679 |
-
"step": 666
|
4680 |
-
},
|
4681 |
-
{
|
4682 |
-
"epoch": 1.3005118206190591,
|
4683 |
-
"grad_norm": 0.48203861713409424,
|
4684 |
-
"learning_rate": 2.9780671692940366e-07,
|
4685 |
-
"loss": 2.0296,
|
4686 |
-
"step": 667
|
4687 |
-
},
|
4688 |
-
{
|
4689 |
-
"epoch": 1.3024616134535705,
|
4690 |
-
"grad_norm": 0.4314959943294525,
|
4691 |
-
"learning_rate": 2.97464016449623e-07,
|
4692 |
-
"loss": 2.0282,
|
4693 |
-
"step": 668
|
4694 |
-
},
|
4695 |
-
{
|
4696 |
-
"epoch": 1.3044114062880818,
|
4697 |
-
"grad_norm": 0.4476211369037628,
|
4698 |
-
"learning_rate": 2.9712131596984236e-07,
|
4699 |
-
"loss": 2.0348,
|
4700 |
-
"step": 669
|
4701 |
-
},
|
4702 |
-
{
|
4703 |
-
"epoch": 1.3063611991225932,
|
4704 |
-
"grad_norm": 0.45356854796409607,
|
4705 |
-
"learning_rate": 2.9677861549006165e-07,
|
4706 |
-
"loss": 2.0369,
|
4707 |
-
"step": 670
|
4708 |
-
},
|
4709 |
-
{
|
4710 |
-
"epoch": 1.3083109919571045,
|
4711 |
-
"grad_norm": 0.4637032747268677,
|
4712 |
-
"learning_rate": 2.96435915010281e-07,
|
4713 |
-
"loss": 2.1002,
|
4714 |
-
"step": 671
|
4715 |
-
},
|
4716 |
-
{
|
4717 |
-
"epoch": 1.310260784791616,
|
4718 |
-
"grad_norm": 0.4258365333080292,
|
4719 |
-
"learning_rate": 2.9609321453050035e-07,
|
4720 |
-
"loss": 2.0184,
|
4721 |
-
"step": 672
|
4722 |
-
},
|
4723 |
-
{
|
4724 |
-
"epoch": 1.3122105776261273,
|
4725 |
-
"grad_norm": 0.4571716785430908,
|
4726 |
-
"learning_rate": 2.9575051405071964e-07,
|
4727 |
-
"loss": 2.0711,
|
4728 |
-
"step": 673
|
4729 |
-
},
|
4730 |
-
{
|
4731 |
-
"epoch": 1.3141603704606386,
|
4732 |
-
"grad_norm": 0.4479144215583801,
|
4733 |
-
"learning_rate": 2.95407813570939e-07,
|
4734 |
-
"loss": 2.1037,
|
4735 |
-
"step": 674
|
4736 |
-
},
|
4737 |
-
{
|
4738 |
-
"epoch": 1.31611016329515,
|
4739 |
-
"grad_norm": 0.463773638010025,
|
4740 |
-
"learning_rate": 2.9506511309115834e-07,
|
4741 |
-
"loss": 2.087,
|
4742 |
-
"step": 675
|
4743 |
-
},
|
4744 |
-
{
|
4745 |
-
"epoch": 1.318059956129661,
|
4746 |
-
"grad_norm": 0.4595959782600403,
|
4747 |
-
"learning_rate": 2.9472241261137763e-07,
|
4748 |
-
"loss": 2.0246,
|
4749 |
-
"step": 676
|
4750 |
-
},
|
4751 |
-
{
|
4752 |
-
"epoch": 1.3200097489641727,
|
4753 |
-
"grad_norm": 0.41977226734161377,
|
4754 |
-
"learning_rate": 2.94379712131597e-07,
|
4755 |
-
"loss": 2.0132,
|
4756 |
-
"step": 677
|
4757 |
-
},
|
4758 |
-
{
|
4759 |
-
"epoch": 1.3219595417986838,
|
4760 |
-
"grad_norm": 0.4429217576980591,
|
4761 |
-
"learning_rate": 2.940370116518163e-07,
|
4762 |
-
"loss": 2.0414,
|
4763 |
-
"step": 678
|
4764 |
-
},
|
4765 |
-
{
|
4766 |
-
"epoch": 1.3239093346331952,
|
4767 |
-
"grad_norm": 0.46036285161972046,
|
4768 |
-
"learning_rate": 2.936943111720356e-07,
|
4769 |
-
"loss": 2.0474,
|
4770 |
-
"step": 679
|
4771 |
-
},
|
4772 |
-
{
|
4773 |
-
"epoch": 1.3258591274677065,
|
4774 |
-
"grad_norm": 0.4518478512763977,
|
4775 |
-
"learning_rate": 2.9335161069225497e-07,
|
4776 |
-
"loss": 1.991,
|
4777 |
-
"step": 680
|
4778 |
-
},
|
4779 |
-
{
|
4780 |
-
"epoch": 1.3278089203022179,
|
4781 |
-
"grad_norm": 0.4507528841495514,
|
4782 |
-
"learning_rate": 2.9300891021247426e-07,
|
4783 |
-
"loss": 2.0038,
|
4784 |
-
"step": 681
|
4785 |
-
},
|
4786 |
-
{
|
4787 |
-
"epoch": 1.3297587131367292,
|
4788 |
-
"grad_norm": 0.45446595549583435,
|
4789 |
-
"learning_rate": 2.926662097326936e-07,
|
4790 |
-
"loss": 1.9257,
|
4791 |
-
"step": 682
|
4792 |
-
},
|
4793 |
-
{
|
4794 |
-
"epoch": 1.3317085059712406,
|
4795 |
-
"grad_norm": 0.45073091983795166,
|
4796 |
-
"learning_rate": 2.9232350925291296e-07,
|
4797 |
-
"loss": 2.0667,
|
4798 |
-
"step": 683
|
4799 |
-
},
|
4800 |
-
{
|
4801 |
-
"epoch": 1.333658298805752,
|
4802 |
-
"grad_norm": 0.43848779797554016,
|
4803 |
-
"learning_rate": 2.9198080877313225e-07,
|
4804 |
-
"loss": 2.0127,
|
4805 |
-
"step": 684
|
4806 |
-
},
|
4807 |
-
{
|
4808 |
-
"epoch": 1.3356080916402633,
|
4809 |
-
"grad_norm": 0.44587504863739014,
|
4810 |
-
"learning_rate": 2.916381082933516e-07,
|
4811 |
-
"loss": 2.0694,
|
4812 |
-
"step": 685
|
4813 |
-
},
|
4814 |
-
{
|
4815 |
-
"epoch": 1.3375578844747746,
|
4816 |
-
"grad_norm": 0.46157652139663696,
|
4817 |
-
"learning_rate": 2.9129540781357095e-07,
|
4818 |
-
"loss": 2.112,
|
4819 |
-
"step": 686
|
4820 |
-
},
|
4821 |
-
{
|
4822 |
-
"epoch": 1.3395076773092858,
|
4823 |
-
"grad_norm": 0.461897075176239,
|
4824 |
-
"learning_rate": 2.9095270733379024e-07,
|
4825 |
-
"loss": 2.0431,
|
4826 |
-
"step": 687
|
4827 |
-
},
|
4828 |
-
{
|
4829 |
-
"epoch": 1.3414574701437973,
|
4830 |
-
"grad_norm": 0.42506590485572815,
|
4831 |
-
"learning_rate": 2.906100068540096e-07,
|
4832 |
-
"loss": 2.0612,
|
4833 |
-
"step": 688
|
4834 |
-
},
|
4835 |
-
{
|
4836 |
-
"epoch": 1.3434072629783085,
|
4837 |
-
"grad_norm": 0.43368127942085266,
|
4838 |
-
"learning_rate": 2.9026730637422894e-07,
|
4839 |
-
"loss": 2.0253,
|
4840 |
-
"step": 689
|
4841 |
-
},
|
4842 |
-
{
|
4843 |
-
"epoch": 1.3453570558128198,
|
4844 |
-
"grad_norm": 0.4484082758426666,
|
4845 |
-
"learning_rate": 2.8992460589444823e-07,
|
4846 |
-
"loss": 1.9962,
|
4847 |
-
"step": 690
|
4848 |
-
},
|
4849 |
-
{
|
4850 |
-
"epoch": 1.3473068486473312,
|
4851 |
-
"grad_norm": 0.44570791721343994,
|
4852 |
-
"learning_rate": 2.895819054146676e-07,
|
4853 |
-
"loss": 2.018,
|
4854 |
-
"step": 691
|
4855 |
-
},
|
4856 |
-
{
|
4857 |
-
"epoch": 1.3492566414818425,
|
4858 |
-
"grad_norm": 0.4472144842147827,
|
4859 |
-
"learning_rate": 2.892392049348869e-07,
|
4860 |
-
"loss": 2.0254,
|
4861 |
-
"step": 692
|
4862 |
-
},
|
4863 |
-
{
|
4864 |
-
"epoch": 1.351206434316354,
|
4865 |
-
"grad_norm": 0.4680030047893524,
|
4866 |
-
"learning_rate": 2.888965044551062e-07,
|
4867 |
-
"loss": 2.1265,
|
4868 |
-
"step": 693
|
4869 |
-
},
|
4870 |
-
{
|
4871 |
-
"epoch": 1.3531562271508653,
|
4872 |
-
"grad_norm": 0.44323253631591797,
|
4873 |
-
"learning_rate": 2.885538039753255e-07,
|
4874 |
-
"loss": 2.0222,
|
4875 |
-
"step": 694
|
4876 |
-
},
|
4877 |
-
{
|
4878 |
-
"epoch": 1.3551060199853766,
|
4879 |
-
"grad_norm": 0.4732964038848877,
|
4880 |
-
"learning_rate": 2.882111034955449e-07,
|
4881 |
-
"loss": 2.0219,
|
4882 |
-
"step": 695
|
4883 |
-
},
|
4884 |
-
{
|
4885 |
-
"epoch": 1.357055812819888,
|
4886 |
-
"grad_norm": 0.4392209053039551,
|
4887 |
-
"learning_rate": 2.878684030157642e-07,
|
4888 |
-
"loss": 1.9841,
|
4889 |
-
"step": 696
|
4890 |
-
},
|
4891 |
-
{
|
4892 |
-
"epoch": 1.3590056056543993,
|
4893 |
-
"grad_norm": 0.46177539229393005,
|
4894 |
-
"learning_rate": 2.875257025359835e-07,
|
4895 |
-
"loss": 2.0461,
|
4896 |
-
"step": 697
|
4897 |
-
},
|
4898 |
-
{
|
4899 |
-
"epoch": 1.3609553984889105,
|
4900 |
-
"grad_norm": 0.4625999927520752,
|
4901 |
-
"learning_rate": 2.871830020562029e-07,
|
4902 |
-
"loss": 2.0137,
|
4903 |
-
"step": 698
|
4904 |
-
},
|
4905 |
-
{
|
4906 |
-
"epoch": 1.3629051913234218,
|
4907 |
-
"grad_norm": 0.43552806973457336,
|
4908 |
-
"learning_rate": 2.868403015764222e-07,
|
4909 |
-
"loss": 2.0408,
|
4910 |
-
"step": 699
|
4911 |
-
},
|
4912 |
-
{
|
4913 |
-
"epoch": 1.3648549841579332,
|
4914 |
-
"grad_norm": 0.47674480080604553,
|
4915 |
-
"learning_rate": 2.864976010966415e-07,
|
4916 |
-
"loss": 2.0021,
|
4917 |
-
"step": 700
|
4918 |
-
},
|
4919 |
-
{
|
4920 |
-
"epoch": 1.3668047769924445,
|
4921 |
-
"grad_norm": 0.46479421854019165,
|
4922 |
-
"learning_rate": 2.861549006168609e-07,
|
4923 |
-
"loss": 1.9898,
|
4924 |
-
"step": 701
|
4925 |
-
},
|
4926 |
-
{
|
4927 |
-
"epoch": 1.3687545698269559,
|
4928 |
-
"grad_norm": 0.4399622976779938,
|
4929 |
-
"learning_rate": 2.858122001370802e-07,
|
4930 |
-
"loss": 1.9638,
|
4931 |
-
"step": 702
|
4932 |
-
},
|
4933 |
-
{
|
4934 |
-
"epoch": 1.3707043626614672,
|
4935 |
-
"grad_norm": 0.442557692527771,
|
4936 |
-
"learning_rate": 2.854694996572995e-07,
|
4937 |
-
"loss": 2.0099,
|
4938 |
-
"step": 703
|
4939 |
-
},
|
4940 |
-
{
|
4941 |
-
"epoch": 1.3726541554959786,
|
4942 |
-
"grad_norm": 0.4601743817329407,
|
4943 |
-
"learning_rate": 2.851267991775189e-07,
|
4944 |
-
"loss": 2.057,
|
4945 |
-
"step": 704
|
4946 |
-
},
|
4947 |
-
{
|
4948 |
-
"epoch": 1.37460394833049,
|
4949 |
-
"grad_norm": 0.4959220290184021,
|
4950 |
-
"learning_rate": 2.847840986977382e-07,
|
4951 |
-
"loss": 2.092,
|
4952 |
-
"step": 705
|
4953 |
-
},
|
4954 |
-
{
|
4955 |
-
"epoch": 1.3765537411650013,
|
4956 |
-
"grad_norm": 0.40172404050827026,
|
4957 |
-
"learning_rate": 2.8444139821795747e-07,
|
4958 |
-
"loss": 2.0074,
|
4959 |
-
"step": 706
|
4960 |
-
},
|
4961 |
-
{
|
4962 |
-
"epoch": 1.3785035339995124,
|
4963 |
-
"grad_norm": 0.4572814404964447,
|
4964 |
-
"learning_rate": 2.840986977381768e-07,
|
4965 |
-
"loss": 1.9777,
|
4966 |
-
"step": 707
|
4967 |
-
},
|
4968 |
-
{
|
4969 |
-
"epoch": 1.380453326834024,
|
4970 |
-
"grad_norm": 0.4464624524116516,
|
4971 |
-
"learning_rate": 2.8375599725839617e-07,
|
4972 |
-
"loss": 2.0183,
|
4973 |
-
"step": 708
|
4974 |
-
},
|
4975 |
-
{
|
4976 |
-
"epoch": 1.3824031196685351,
|
4977 |
-
"grad_norm": 0.4498922526836395,
|
4978 |
-
"learning_rate": 2.8341329677861546e-07,
|
4979 |
-
"loss": 2.0975,
|
4980 |
-
"step": 709
|
4981 |
-
},
|
4982 |
-
{
|
4983 |
-
"epoch": 1.3843529125030465,
|
4984 |
-
"grad_norm": 0.4430985748767853,
|
4985 |
-
"learning_rate": 2.830705962988348e-07,
|
4986 |
-
"loss": 2.027,
|
4987 |
-
"step": 710
|
4988 |
-
},
|
4989 |
-
{
|
4990 |
-
"epoch": 1.3863027053375578,
|
4991 |
-
"grad_norm": 0.4422641694545746,
|
4992 |
-
"learning_rate": 2.8272789581905416e-07,
|
4993 |
-
"loss": 2.0625,
|
4994 |
-
"step": 711
|
4995 |
-
},
|
4996 |
-
{
|
4997 |
-
"epoch": 1.3882524981720692,
|
4998 |
-
"grad_norm": 0.46121206879615784,
|
4999 |
-
"learning_rate": 2.8238519533927345e-07,
|
5000 |
-
"loss": 2.0135,
|
5001 |
-
"step": 712
|
5002 |
-
},
|
5003 |
-
{
|
5004 |
-
"epoch": 1.3902022910065805,
|
5005 |
-
"grad_norm": 0.4685353934764862,
|
5006 |
-
"learning_rate": 2.820424948594928e-07,
|
5007 |
-
"loss": 2.071,
|
5008 |
-
"step": 713
|
5009 |
-
},
|
5010 |
-
{
|
5011 |
-
"epoch": 1.392152083841092,
|
5012 |
-
"grad_norm": 0.43733134865760803,
|
5013 |
-
"learning_rate": 2.8169979437971215e-07,
|
5014 |
-
"loss": 2.0531,
|
5015 |
-
"step": 714
|
5016 |
-
},
|
5017 |
-
{
|
5018 |
-
"epoch": 1.3941018766756033,
|
5019 |
-
"grad_norm": 0.4479463994503021,
|
5020 |
-
"learning_rate": 2.8135709389993144e-07,
|
5021 |
-
"loss": 2.0192,
|
5022 |
-
"step": 715
|
5023 |
-
},
|
5024 |
-
{
|
5025 |
-
"epoch": 1.3960516695101146,
|
5026 |
-
"grad_norm": 0.4477840065956116,
|
5027 |
-
"learning_rate": 2.810143934201508e-07,
|
5028 |
-
"loss": 2.0408,
|
5029 |
-
"step": 716
|
5030 |
-
},
|
5031 |
-
{
|
5032 |
-
"epoch": 1.398001462344626,
|
5033 |
-
"grad_norm": 0.44232964515686035,
|
5034 |
-
"learning_rate": 2.8067169294037014e-07,
|
5035 |
-
"loss": 2.0992,
|
5036 |
-
"step": 717
|
5037 |
-
},
|
5038 |
-
{
|
5039 |
-
"epoch": 1.399951255179137,
|
5040 |
-
"grad_norm": 0.4573095142841339,
|
5041 |
-
"learning_rate": 2.8032899246058943e-07,
|
5042 |
-
"loss": 1.9958,
|
5043 |
-
"step": 718
|
5044 |
-
},
|
5045 |
-
{
|
5046 |
-
"epoch": 1.4019010480136487,
|
5047 |
-
"grad_norm": 0.4734794497489929,
|
5048 |
-
"learning_rate": 2.799862919808088e-07,
|
5049 |
-
"loss": 2.0268,
|
5050 |
-
"step": 719
|
5051 |
-
},
|
5052 |
-
{
|
5053 |
-
"epoch": 1.4038508408481598,
|
5054 |
-
"grad_norm": 0.4753987193107605,
|
5055 |
-
"learning_rate": 2.7964359150102807e-07,
|
5056 |
-
"loss": 2.0436,
|
5057 |
-
"step": 720
|
5058 |
-
},
|
5059 |
-
{
|
5060 |
-
"epoch": 1.4058006336826712,
|
5061 |
-
"grad_norm": 0.4515923261642456,
|
5062 |
-
"learning_rate": 2.793008910212474e-07,
|
5063 |
-
"loss": 2.0018,
|
5064 |
-
"step": 721
|
5065 |
-
},
|
5066 |
-
{
|
5067 |
-
"epoch": 1.4077504265171825,
|
5068 |
-
"grad_norm": 0.45925289392471313,
|
5069 |
-
"learning_rate": 2.7895819054146677e-07,
|
5070 |
-
"loss": 2.0454,
|
5071 |
-
"step": 722
|
5072 |
-
},
|
5073 |
-
{
|
5074 |
-
"epoch": 1.4097002193516939,
|
5075 |
-
"grad_norm": 0.4684261083602905,
|
5076 |
-
"learning_rate": 2.7861549006168606e-07,
|
5077 |
-
"loss": 2.0355,
|
5078 |
-
"step": 723
|
5079 |
-
},
|
5080 |
-
{
|
5081 |
-
"epoch": 1.4116500121862052,
|
5082 |
-
"grad_norm": 0.4723130464553833,
|
5083 |
-
"learning_rate": 2.782727895819054e-07,
|
5084 |
-
"loss": 2.0189,
|
5085 |
-
"step": 724
|
5086 |
-
},
|
5087 |
-
{
|
5088 |
-
"epoch": 1.4135998050207166,
|
5089 |
-
"grad_norm": 0.43946054577827454,
|
5090 |
-
"learning_rate": 2.7793008910212476e-07,
|
5091 |
-
"loss": 2.0165,
|
5092 |
-
"step": 725
|
5093 |
-
},
|
5094 |
-
{
|
5095 |
-
"epoch": 1.415549597855228,
|
5096 |
-
"grad_norm": 0.45172879099845886,
|
5097 |
-
"learning_rate": 2.7758738862234405e-07,
|
5098 |
-
"loss": 1.9966,
|
5099 |
-
"step": 726
|
5100 |
-
},
|
5101 |
-
{
|
5102 |
-
"epoch": 1.4174993906897393,
|
5103 |
-
"grad_norm": 0.4361145496368408,
|
5104 |
-
"learning_rate": 2.772446881425634e-07,
|
5105 |
-
"loss": 1.982,
|
5106 |
-
"step": 727
|
5107 |
-
},
|
5108 |
-
{
|
5109 |
-
"epoch": 1.4194491835242506,
|
5110 |
-
"grad_norm": 0.4422454237937927,
|
5111 |
-
"learning_rate": 2.7690198766278275e-07,
|
5112 |
-
"loss": 2.0032,
|
5113 |
-
"step": 728
|
5114 |
-
},
|
5115 |
-
{
|
5116 |
-
"epoch": 1.4213989763587618,
|
5117 |
-
"grad_norm": 0.4438495934009552,
|
5118 |
-
"learning_rate": 2.7655928718300204e-07,
|
5119 |
-
"loss": 2.0198,
|
5120 |
-
"step": 729
|
5121 |
-
},
|
5122 |
-
{
|
5123 |
-
"epoch": 1.4233487691932731,
|
5124 |
-
"grad_norm": 0.4422749876976013,
|
5125 |
-
"learning_rate": 2.762165867032214e-07,
|
5126 |
-
"loss": 1.992,
|
5127 |
-
"step": 730
|
5128 |
-
},
|
5129 |
-
{
|
5130 |
-
"epoch": 1.4252985620277845,
|
5131 |
-
"grad_norm": 0.4652174115180969,
|
5132 |
-
"learning_rate": 2.7587388622344074e-07,
|
5133 |
-
"loss": 2.0345,
|
5134 |
-
"step": 731
|
5135 |
-
},
|
5136 |
-
{
|
5137 |
-
"epoch": 1.4272483548622958,
|
5138 |
-
"grad_norm": 0.46277597546577454,
|
5139 |
-
"learning_rate": 2.7553118574366003e-07,
|
5140 |
-
"loss": 2.0406,
|
5141 |
-
"step": 732
|
5142 |
-
},
|
5143 |
-
{
|
5144 |
-
"epoch": 1.4291981476968072,
|
5145 |
-
"grad_norm": 0.45579442381858826,
|
5146 |
-
"learning_rate": 2.751884852638793e-07,
|
5147 |
-
"loss": 2.0671,
|
5148 |
-
"step": 733
|
5149 |
-
},
|
5150 |
-
{
|
5151 |
-
"epoch": 1.4311479405313186,
|
5152 |
-
"grad_norm": 0.43527230620384216,
|
5153 |
-
"learning_rate": 2.748457847840987e-07,
|
5154 |
-
"loss": 2.0433,
|
5155 |
-
"step": 734
|
5156 |
-
},
|
5157 |
-
{
|
5158 |
-
"epoch": 1.43309773336583,
|
5159 |
-
"grad_norm": 0.4699551463127136,
|
5160 |
-
"learning_rate": 2.74503084304318e-07,
|
5161 |
-
"loss": 2.0366,
|
5162 |
-
"step": 735
|
5163 |
-
},
|
5164 |
-
{
|
5165 |
-
"epoch": 1.4350475262003413,
|
5166 |
-
"grad_norm": 0.4446089565753937,
|
5167 |
-
"learning_rate": 2.741603838245373e-07,
|
5168 |
-
"loss": 1.9986,
|
5169 |
-
"step": 736
|
5170 |
-
},
|
5171 |
-
{
|
5172 |
-
"epoch": 1.4369973190348526,
|
5173 |
-
"grad_norm": 0.4645906686782837,
|
5174 |
-
"learning_rate": 2.738176833447567e-07,
|
5175 |
-
"loss": 2.1331,
|
5176 |
-
"step": 737
|
5177 |
-
},
|
5178 |
-
{
|
5179 |
-
"epoch": 1.4389471118693637,
|
5180 |
-
"grad_norm": 0.46871501207351685,
|
5181 |
-
"learning_rate": 2.73474982864976e-07,
|
5182 |
-
"loss": 2.0402,
|
5183 |
-
"step": 738
|
5184 |
-
},
|
5185 |
-
{
|
5186 |
-
"epoch": 1.4408969047038753,
|
5187 |
-
"grad_norm": 0.4507101774215698,
|
5188 |
-
"learning_rate": 2.731322823851953e-07,
|
5189 |
-
"loss": 2.0027,
|
5190 |
-
"step": 739
|
5191 |
-
},
|
5192 |
-
{
|
5193 |
-
"epoch": 1.4428466975383865,
|
5194 |
-
"grad_norm": 0.4642309546470642,
|
5195 |
-
"learning_rate": 2.727895819054147e-07,
|
5196 |
-
"loss": 2.0613,
|
5197 |
-
"step": 740
|
5198 |
-
},
|
5199 |
-
{
|
5200 |
-
"epoch": 1.4447964903728978,
|
5201 |
-
"grad_norm": 0.4762292206287384,
|
5202 |
-
"learning_rate": 2.72446881425634e-07,
|
5203 |
-
"loss": 2.0315,
|
5204 |
-
"step": 741
|
5205 |
-
},
|
5206 |
-
{
|
5207 |
-
"epoch": 1.4467462832074092,
|
5208 |
-
"grad_norm": 0.4549463391304016,
|
5209 |
-
"learning_rate": 2.721041809458533e-07,
|
5210 |
-
"loss": 2.0492,
|
5211 |
-
"step": 742
|
5212 |
-
},
|
5213 |
-
{
|
5214 |
-
"epoch": 1.4486960760419205,
|
5215 |
-
"grad_norm": 0.4566596448421478,
|
5216 |
-
"learning_rate": 2.717614804660727e-07,
|
5217 |
-
"loss": 1.9571,
|
5218 |
-
"step": 743
|
5219 |
-
},
|
5220 |
-
{
|
5221 |
-
"epoch": 1.4506458688764319,
|
5222 |
-
"grad_norm": 0.4666212797164917,
|
5223 |
-
"learning_rate": 2.71418779986292e-07,
|
5224 |
-
"loss": 1.9897,
|
5225 |
-
"step": 744
|
5226 |
-
},
|
5227 |
-
{
|
5228 |
-
"epoch": 1.4525956617109432,
|
5229 |
-
"grad_norm": 0.45651644468307495,
|
5230 |
-
"learning_rate": 2.710760795065113e-07,
|
5231 |
-
"loss": 2.0471,
|
5232 |
-
"step": 745
|
5233 |
-
},
|
5234 |
-
{
|
5235 |
-
"epoch": 1.4545454545454546,
|
5236 |
-
"grad_norm": 0.43935099244117737,
|
5237 |
-
"learning_rate": 2.707333790267306e-07,
|
5238 |
-
"loss": 1.9525,
|
5239 |
-
"step": 746
|
5240 |
-
},
|
5241 |
-
{
|
5242 |
-
"epoch": 1.456495247379966,
|
5243 |
-
"grad_norm": 0.4813799560070038,
|
5244 |
-
"learning_rate": 2.7039067854695e-07,
|
5245 |
-
"loss": 2.0396,
|
5246 |
-
"step": 747
|
5247 |
-
},
|
5248 |
-
{
|
5249 |
-
"epoch": 1.4584450402144773,
|
5250 |
-
"grad_norm": 0.4743799567222595,
|
5251 |
-
"learning_rate": 2.7004797806716927e-07,
|
5252 |
-
"loss": 2.0824,
|
5253 |
-
"step": 748
|
5254 |
-
},
|
5255 |
-
{
|
5256 |
-
"epoch": 1.4603948330489884,
|
5257 |
-
"grad_norm": 0.4927983283996582,
|
5258 |
-
"learning_rate": 2.6970527758738857e-07,
|
5259 |
-
"loss": 2.0257,
|
5260 |
-
"step": 749
|
5261 |
-
},
|
5262 |
-
{
|
5263 |
-
"epoch": 1.4623446258835,
|
5264 |
-
"grad_norm": 0.4711035192012787,
|
5265 |
-
"learning_rate": 2.6936257710760797e-07,
|
5266 |
-
"loss": 2.0487,
|
5267 |
-
"step": 750
|
5268 |
-
},
|
5269 |
-
{
|
5270 |
-
"epoch": 1.4642944187180111,
|
5271 |
-
"grad_norm": 0.4515864849090576,
|
5272 |
-
"learning_rate": 2.6901987662782726e-07,
|
5273 |
-
"loss": 2.0244,
|
5274 |
-
"step": 751
|
5275 |
-
},
|
5276 |
-
{
|
5277 |
-
"epoch": 1.4662442115525225,
|
5278 |
-
"grad_norm": 0.46076542139053345,
|
5279 |
-
"learning_rate": 2.6867717614804656e-07,
|
5280 |
-
"loss": 2.07,
|
5281 |
-
"step": 752
|
5282 |
-
},
|
5283 |
-
{
|
5284 |
-
"epoch": 1.4681940043870338,
|
5285 |
-
"grad_norm": 0.44762691855430603,
|
5286 |
-
"learning_rate": 2.6833447566826596e-07,
|
5287 |
-
"loss": 2.0297,
|
5288 |
-
"step": 753
|
5289 |
-
},
|
5290 |
-
{
|
5291 |
-
"epoch": 1.4701437972215452,
|
5292 |
-
"grad_norm": 0.4801499843597412,
|
5293 |
-
"learning_rate": 2.6799177518848525e-07,
|
5294 |
-
"loss": 2.0683,
|
5295 |
-
"step": 754
|
5296 |
-
},
|
5297 |
-
{
|
5298 |
-
"epoch": 1.4720935900560566,
|
5299 |
-
"grad_norm": 0.45053598284721375,
|
5300 |
-
"learning_rate": 2.6764907470870455e-07,
|
5301 |
-
"loss": 1.9783,
|
5302 |
-
"step": 755
|
5303 |
-
},
|
5304 |
-
{
|
5305 |
-
"epoch": 1.474043382890568,
|
5306 |
-
"grad_norm": 0.45730066299438477,
|
5307 |
-
"learning_rate": 2.673063742289239e-07,
|
5308 |
-
"loss": 2.0548,
|
5309 |
-
"step": 756
|
5310 |
-
},
|
5311 |
-
{
|
5312 |
-
"epoch": 1.4759931757250793,
|
5313 |
-
"grad_norm": 0.4543995261192322,
|
5314 |
-
"learning_rate": 2.6696367374914324e-07,
|
5315 |
-
"loss": 2.0306,
|
5316 |
-
"step": 757
|
5317 |
-
},
|
5318 |
-
{
|
5319 |
-
"epoch": 1.4779429685595906,
|
5320 |
-
"grad_norm": 0.4372531473636627,
|
5321 |
-
"learning_rate": 2.6662097326936254e-07,
|
5322 |
-
"loss": 2.0164,
|
5323 |
-
"step": 758
|
5324 |
-
},
|
5325 |
-
{
|
5326 |
-
"epoch": 1.479892761394102,
|
5327 |
-
"grad_norm": 0.44617414474487305,
|
5328 |
-
"learning_rate": 2.662782727895819e-07,
|
5329 |
-
"loss": 1.9891,
|
5330 |
-
"step": 759
|
5331 |
-
},
|
5332 |
-
{
|
5333 |
-
"epoch": 1.481842554228613,
|
5334 |
-
"grad_norm": 0.4605617821216583,
|
5335 |
-
"learning_rate": 2.6593557230980123e-07,
|
5336 |
-
"loss": 2.01,
|
5337 |
-
"step": 760
|
5338 |
-
},
|
5339 |
-
{
|
5340 |
-
"epoch": 1.4837923470631245,
|
5341 |
-
"grad_norm": 0.4638999402523041,
|
5342 |
-
"learning_rate": 2.655928718300205e-07,
|
5343 |
-
"loss": 2.0685,
|
5344 |
-
"step": 761
|
5345 |
-
},
|
5346 |
-
{
|
5347 |
-
"epoch": 1.4857421398976358,
|
5348 |
-
"grad_norm": 0.4548538327217102,
|
5349 |
-
"learning_rate": 2.6525017135023987e-07,
|
5350 |
-
"loss": 2.0665,
|
5351 |
-
"step": 762
|
5352 |
-
},
|
5353 |
-
{
|
5354 |
-
"epoch": 1.4876919327321472,
|
5355 |
-
"grad_norm": 0.44948044419288635,
|
5356 |
-
"learning_rate": 2.649074708704592e-07,
|
5357 |
-
"loss": 1.9921,
|
5358 |
-
"step": 763
|
5359 |
-
},
|
5360 |
-
{
|
5361 |
-
"epoch": 1.4896417255666585,
|
5362 |
-
"grad_norm": 0.4577581286430359,
|
5363 |
-
"learning_rate": 2.645647703906785e-07,
|
5364 |
-
"loss": 2.0392,
|
5365 |
-
"step": 764
|
5366 |
-
},
|
5367 |
-
{
|
5368 |
-
"epoch": 1.4915915184011699,
|
5369 |
-
"grad_norm": 0.4821256101131439,
|
5370 |
-
"learning_rate": 2.6422206991089786e-07,
|
5371 |
-
"loss": 2.1304,
|
5372 |
-
"step": 765
|
5373 |
-
},
|
5374 |
-
{
|
5375 |
-
"epoch": 1.4935413112356812,
|
5376 |
-
"grad_norm": 0.48839786648750305,
|
5377 |
-
"learning_rate": 2.638793694311172e-07,
|
5378 |
-
"loss": 2.0773,
|
5379 |
-
"step": 766
|
5380 |
-
},
|
5381 |
-
{
|
5382 |
-
"epoch": 1.4954911040701926,
|
5383 |
-
"grad_norm": 0.43702590465545654,
|
5384 |
-
"learning_rate": 2.635366689513365e-07,
|
5385 |
-
"loss": 2.02,
|
5386 |
-
"step": 767
|
5387 |
-
},
|
5388 |
-
{
|
5389 |
-
"epoch": 1.497440896904704,
|
5390 |
-
"grad_norm": 0.45477136969566345,
|
5391 |
-
"learning_rate": 2.6319396847155585e-07,
|
5392 |
-
"loss": 1.9962,
|
5393 |
-
"step": 768
|
5394 |
-
},
|
5395 |
-
{
|
5396 |
-
"epoch": 1.499390689739215,
|
5397 |
-
"grad_norm": 0.47229456901550293,
|
5398 |
-
"learning_rate": 2.6285126799177515e-07,
|
5399 |
-
"loss": 2.0281,
|
5400 |
-
"step": 769
|
5401 |
-
},
|
5402 |
-
{
|
5403 |
-
"epoch": 1.5013404825737267,
|
5404 |
-
"grad_norm": 0.4817400276660919,
|
5405 |
-
"learning_rate": 2.625085675119945e-07,
|
5406 |
-
"loss": 2.1009,
|
5407 |
-
"step": 770
|
5408 |
-
},
|
5409 |
-
{
|
5410 |
-
"epoch": 1.5032902754082378,
|
5411 |
-
"grad_norm": 0.4645569324493408,
|
5412 |
-
"learning_rate": 2.6216586703221384e-07,
|
5413 |
-
"loss": 2.083,
|
5414 |
-
"step": 771
|
5415 |
-
},
|
5416 |
-
{
|
5417 |
-
"epoch": 1.5052400682427494,
|
5418 |
-
"grad_norm": 0.44810667634010315,
|
5419 |
-
"learning_rate": 2.6182316655243314e-07,
|
5420 |
-
"loss": 2.09,
|
5421 |
-
"step": 772
|
5422 |
-
},
|
5423 |
-
{
|
5424 |
-
"epoch": 1.5071898610772605,
|
5425 |
-
"grad_norm": 0.44432902336120605,
|
5426 |
-
"learning_rate": 2.614804660726525e-07,
|
5427 |
-
"loss": 2.0126,
|
5428 |
-
"step": 773
|
5429 |
-
},
|
5430 |
-
{
|
5431 |
-
"epoch": 1.5091396539117719,
|
5432 |
-
"grad_norm": 0.4630286991596222,
|
5433 |
-
"learning_rate": 2.6113776559287183e-07,
|
5434 |
-
"loss": 2.0136,
|
5435 |
-
"step": 774
|
5436 |
-
},
|
5437 |
-
{
|
5438 |
-
"epoch": 1.5110894467462832,
|
5439 |
-
"grad_norm": 0.44443148374557495,
|
5440 |
-
"learning_rate": 2.607950651130911e-07,
|
5441 |
-
"loss": 1.9979,
|
5442 |
-
"step": 775
|
5443 |
-
},
|
5444 |
-
{
|
5445 |
-
"epoch": 1.5130392395807946,
|
5446 |
-
"grad_norm": 0.44903403520584106,
|
5447 |
-
"learning_rate": 2.6045236463331047e-07,
|
5448 |
-
"loss": 1.9788,
|
5449 |
-
"step": 776
|
5450 |
-
},
|
5451 |
-
{
|
5452 |
-
"epoch": 1.514989032415306,
|
5453 |
-
"grad_norm": 0.45394134521484375,
|
5454 |
-
"learning_rate": 2.601096641535298e-07,
|
5455 |
-
"loss": 1.9529,
|
5456 |
-
"step": 777
|
5457 |
-
},
|
5458 |
-
{
|
5459 |
-
"epoch": 1.516938825249817,
|
5460 |
-
"grad_norm": 0.46713778376579285,
|
5461 |
-
"learning_rate": 2.597669636737491e-07,
|
5462 |
-
"loss": 2.0212,
|
5463 |
-
"step": 778
|
5464 |
-
},
|
5465 |
-
{
|
5466 |
-
"epoch": 1.5188886180843286,
|
5467 |
-
"grad_norm": 0.45262840390205383,
|
5468 |
-
"learning_rate": 2.5942426319396846e-07,
|
5469 |
-
"loss": 2.0723,
|
5470 |
-
"step": 779
|
5471 |
-
},
|
5472 |
-
{
|
5473 |
-
"epoch": 1.5208384109188398,
|
5474 |
-
"grad_norm": 0.4648626446723938,
|
5475 |
-
"learning_rate": 2.590815627141878e-07,
|
5476 |
-
"loss": 2.0046,
|
5477 |
-
"step": 780
|
5478 |
-
},
|
5479 |
-
{
|
5480 |
-
"epoch": 1.5227882037533513,
|
5481 |
-
"grad_norm": 0.4754423201084137,
|
5482 |
-
"learning_rate": 2.587388622344071e-07,
|
5483 |
-
"loss": 2.0434,
|
5484 |
-
"step": 781
|
5485 |
-
},
|
5486 |
-
{
|
5487 |
-
"epoch": 1.5247379965878625,
|
5488 |
-
"grad_norm": 0.4271760880947113,
|
5489 |
-
"learning_rate": 2.583961617546264e-07,
|
5490 |
-
"loss": 2.0843,
|
5491 |
-
"step": 782
|
5492 |
-
},
|
5493 |
-
{
|
5494 |
-
"epoch": 1.5266877894223738,
|
5495 |
-
"grad_norm": 0.48139727115631104,
|
5496 |
-
"learning_rate": 2.580534612748458e-07,
|
5497 |
-
"loss": 2.098,
|
5498 |
-
"step": 783
|
5499 |
-
},
|
5500 |
-
{
|
5501 |
-
"epoch": 1.5286375822568852,
|
5502 |
-
"grad_norm": 0.473366379737854,
|
5503 |
-
"learning_rate": 2.577107607950651e-07,
|
5504 |
-
"loss": 2.0422,
|
5505 |
-
"step": 784
|
5506 |
-
},
|
5507 |
-
{
|
5508 |
-
"epoch": 1.5305873750913965,
|
5509 |
-
"grad_norm": 0.4580918848514557,
|
5510 |
-
"learning_rate": 2.573680603152844e-07,
|
5511 |
-
"loss": 2.006,
|
5512 |
-
"step": 785
|
5513 |
-
},
|
5514 |
-
{
|
5515 |
-
"epoch": 1.5325371679259079,
|
5516 |
-
"grad_norm": 0.4635441303253174,
|
5517 |
-
"learning_rate": 2.570253598355038e-07,
|
5518 |
-
"loss": 1.9736,
|
5519 |
-
"step": 786
|
5520 |
-
},
|
5521 |
-
{
|
5522 |
-
"epoch": 1.5344869607604192,
|
5523 |
-
"grad_norm": 0.4621422290802002,
|
5524 |
-
"learning_rate": 2.566826593557231e-07,
|
5525 |
-
"loss": 2.1078,
|
5526 |
-
"step": 787
|
5527 |
-
},
|
5528 |
-
{
|
5529 |
-
"epoch": 1.5364367535949306,
|
5530 |
-
"grad_norm": 0.4151935279369354,
|
5531 |
-
"learning_rate": 2.563399588759424e-07,
|
5532 |
-
"loss": 2.0092,
|
5533 |
-
"step": 788
|
5534 |
-
},
|
5535 |
-
{
|
5536 |
-
"epoch": 1.5383865464294417,
|
5537 |
-
"grad_norm": 0.4793336093425751,
|
5538 |
-
"learning_rate": 2.559972583961618e-07,
|
5539 |
-
"loss": 2.0173,
|
5540 |
-
"step": 789
|
5541 |
-
},
|
5542 |
-
{
|
5543 |
-
"epoch": 1.5403363392639533,
|
5544 |
-
"grad_norm": 0.4768364429473877,
|
5545 |
-
"learning_rate": 2.5565455791638107e-07,
|
5546 |
-
"loss": 2.0813,
|
5547 |
-
"step": 790
|
5548 |
-
},
|
5549 |
-
{
|
5550 |
-
"epoch": 1.5422861320984644,
|
5551 |
-
"grad_norm": 0.452411949634552,
|
5552 |
-
"learning_rate": 2.5531185743660037e-07,
|
5553 |
-
"loss": 2.0527,
|
5554 |
-
"step": 791
|
5555 |
-
},
|
5556 |
-
{
|
5557 |
-
"epoch": 1.544235924932976,
|
5558 |
-
"grad_norm": 0.44334676861763,
|
5559 |
-
"learning_rate": 2.5496915695681977e-07,
|
5560 |
-
"loss": 1.9701,
|
5561 |
-
"step": 792
|
5562 |
-
},
|
5563 |
-
{
|
5564 |
-
"epoch": 1.5461857177674871,
|
5565 |
-
"grad_norm": 0.4465942978858948,
|
5566 |
-
"learning_rate": 2.5462645647703906e-07,
|
5567 |
-
"loss": 1.9905,
|
5568 |
-
"step": 793
|
5569 |
-
},
|
5570 |
-
{
|
5571 |
-
"epoch": 1.5481355106019985,
|
5572 |
-
"grad_norm": 0.4681743085384369,
|
5573 |
-
"learning_rate": 2.5428375599725836e-07,
|
5574 |
-
"loss": 2.0654,
|
5575 |
-
"step": 794
|
5576 |
-
},
|
5577 |
-
{
|
5578 |
-
"epoch": 1.5500853034365099,
|
5579 |
-
"grad_norm": 0.46780961751937866,
|
5580 |
-
"learning_rate": 2.539410555174777e-07,
|
5581 |
-
"loss": 2.0336,
|
5582 |
-
"step": 795
|
5583 |
-
},
|
5584 |
-
{
|
5585 |
-
"epoch": 1.5520350962710212,
|
5586 |
-
"grad_norm": 0.44133254885673523,
|
5587 |
-
"learning_rate": 2.5359835503769705e-07,
|
5588 |
-
"loss": 1.9668,
|
5589 |
-
"step": 796
|
5590 |
-
},
|
5591 |
-
{
|
5592 |
-
"epoch": 1.5539848891055326,
|
5593 |
-
"grad_norm": 0.45011645555496216,
|
5594 |
-
"learning_rate": 2.5325565455791635e-07,
|
5595 |
-
"loss": 2.0099,
|
5596 |
-
"step": 797
|
5597 |
-
},
|
5598 |
-
{
|
5599 |
-
"epoch": 1.555934681940044,
|
5600 |
-
"grad_norm": 0.41162246465682983,
|
5601 |
-
"learning_rate": 2.529129540781357e-07,
|
5602 |
-
"loss": 1.9684,
|
5603 |
-
"step": 798
|
5604 |
-
},
|
5605 |
-
{
|
5606 |
-
"epoch": 1.5578844747745553,
|
5607 |
-
"grad_norm": 0.438760906457901,
|
5608 |
-
"learning_rate": 2.5257025359835504e-07,
|
5609 |
-
"loss": 1.9934,
|
5610 |
-
"step": 799
|
5611 |
-
},
|
5612 |
-
{
|
5613 |
-
"epoch": 1.5598342676090664,
|
5614 |
-
"grad_norm": 0.45921608805656433,
|
5615 |
-
"learning_rate": 2.5222755311857434e-07,
|
5616 |
-
"loss": 2.0447,
|
5617 |
-
"step": 800
|
5618 |
-
},
|
5619 |
-
{
|
5620 |
-
"epoch": 1.561784060443578,
|
5621 |
-
"grad_norm": 0.4474433958530426,
|
5622 |
-
"learning_rate": 2.518848526387937e-07,
|
5623 |
-
"loss": 2.0508,
|
5624 |
-
"step": 801
|
5625 |
-
},
|
5626 |
-
{
|
5627 |
-
"epoch": 1.5637338532780891,
|
5628 |
-
"grad_norm": 0.42901015281677246,
|
5629 |
-
"learning_rate": 2.5154215215901303e-07,
|
5630 |
-
"loss": 2.0607,
|
5631 |
-
"step": 802
|
5632 |
-
},
|
5633 |
-
{
|
5634 |
-
"epoch": 1.5656836461126007,
|
5635 |
-
"grad_norm": 0.4604319632053375,
|
5636 |
-
"learning_rate": 2.511994516792323e-07,
|
5637 |
-
"loss": 2.0142,
|
5638 |
-
"step": 803
|
5639 |
-
},
|
5640 |
-
{
|
5641 |
-
"epoch": 1.5676334389471118,
|
5642 |
-
"grad_norm": 0.4305102527141571,
|
5643 |
-
"learning_rate": 2.5085675119945167e-07,
|
5644 |
-
"loss": 1.9828,
|
5645 |
-
"step": 804
|
5646 |
-
},
|
5647 |
-
{
|
5648 |
-
"epoch": 1.5695832317816232,
|
5649 |
-
"grad_norm": 0.4656990170478821,
|
5650 |
-
"learning_rate": 2.50514050719671e-07,
|
5651 |
-
"loss": 2.0302,
|
5652 |
-
"step": 805
|
5653 |
-
},
|
5654 |
-
{
|
5655 |
-
"epoch": 1.5715330246161345,
|
5656 |
-
"grad_norm": 0.4602496325969696,
|
5657 |
-
"learning_rate": 2.501713502398903e-07,
|
5658 |
-
"loss": 2.0412,
|
5659 |
-
"step": 806
|
5660 |
-
},
|
5661 |
-
{
|
5662 |
-
"epoch": 1.5734828174506459,
|
5663 |
-
"grad_norm": 0.4626891314983368,
|
5664 |
-
"learning_rate": 2.4982864976010966e-07,
|
5665 |
-
"loss": 2.0513,
|
5666 |
-
"step": 807
|
5667 |
-
},
|
5668 |
-
{
|
5669 |
-
"epoch": 1.5754326102851572,
|
5670 |
-
"grad_norm": 0.4671951234340668,
|
5671 |
-
"learning_rate": 2.4948594928032896e-07,
|
5672 |
-
"loss": 2.003,
|
5673 |
-
"step": 808
|
5674 |
-
},
|
5675 |
-
{
|
5676 |
-
"epoch": 1.5773824031196684,
|
5677 |
-
"grad_norm": 0.4399751126766205,
|
5678 |
-
"learning_rate": 2.491432488005483e-07,
|
5679 |
-
"loss": 2.0532,
|
5680 |
-
"step": 809
|
5681 |
-
},
|
5682 |
-
{
|
5683 |
-
"epoch": 1.57933219595418,
|
5684 |
-
"grad_norm": 0.4228038191795349,
|
5685 |
-
"learning_rate": 2.4880054832076765e-07,
|
5686 |
-
"loss": 2.0078,
|
5687 |
-
"step": 810
|
5688 |
-
},
|
5689 |
-
{
|
5690 |
-
"epoch": 1.581281988788691,
|
5691 |
-
"grad_norm": 0.4445479214191437,
|
5692 |
-
"learning_rate": 2.4845784784098695e-07,
|
5693 |
-
"loss": 2.0142,
|
5694 |
-
"step": 811
|
5695 |
-
},
|
5696 |
-
{
|
5697 |
-
"epoch": 1.5832317816232027,
|
5698 |
-
"grad_norm": 0.4397488534450531,
|
5699 |
-
"learning_rate": 2.481151473612063e-07,
|
5700 |
-
"loss": 2.0468,
|
5701 |
-
"step": 812
|
5702 |
-
},
|
5703 |
-
{
|
5704 |
-
"epoch": 1.5851815744577138,
|
5705 |
-
"grad_norm": 0.48187440633773804,
|
5706 |
-
"learning_rate": 2.4777244688142564e-07,
|
5707 |
-
"loss": 2.0444,
|
5708 |
-
"step": 813
|
5709 |
-
},
|
5710 |
-
{
|
5711 |
-
"epoch": 1.5871313672922251,
|
5712 |
-
"grad_norm": 0.4355807304382324,
|
5713 |
-
"learning_rate": 2.4742974640164494e-07,
|
5714 |
-
"loss": 1.9955,
|
5715 |
-
"step": 814
|
5716 |
-
},
|
5717 |
-
{
|
5718 |
-
"epoch": 1.5890811601267365,
|
5719 |
-
"grad_norm": 0.4219972491264343,
|
5720 |
-
"learning_rate": 2.470870459218643e-07,
|
5721 |
-
"loss": 1.9971,
|
5722 |
-
"step": 815
|
5723 |
-
},
|
5724 |
-
{
|
5725 |
-
"epoch": 1.5910309529612479,
|
5726 |
-
"grad_norm": 0.44700267910957336,
|
5727 |
-
"learning_rate": 2.4674434544208363e-07,
|
5728 |
-
"loss": 2.0297,
|
5729 |
-
"step": 816
|
5730 |
-
},
|
5731 |
-
{
|
5732 |
-
"epoch": 1.5929807457957592,
|
5733 |
-
"grad_norm": 0.45433923602104187,
|
5734 |
-
"learning_rate": 2.464016449623029e-07,
|
5735 |
-
"loss": 2.0064,
|
5736 |
-
"step": 817
|
5737 |
-
},
|
5738 |
-
{
|
5739 |
-
"epoch": 1.5949305386302706,
|
5740 |
-
"grad_norm": 0.4188825488090515,
|
5741 |
-
"learning_rate": 2.4605894448252227e-07,
|
5742 |
-
"loss": 2.0236,
|
5743 |
-
"step": 818
|
5744 |
-
},
|
5745 |
-
{
|
5746 |
-
"epoch": 1.596880331464782,
|
5747 |
-
"grad_norm": 0.4635048508644104,
|
5748 |
-
"learning_rate": 2.457162440027416e-07,
|
5749 |
-
"loss": 2.0652,
|
5750 |
-
"step": 819
|
5751 |
-
},
|
5752 |
-
{
|
5753 |
-
"epoch": 1.598830124299293,
|
5754 |
-
"grad_norm": 0.4555036127567291,
|
5755 |
-
"learning_rate": 2.453735435229609e-07,
|
5756 |
-
"loss": 2.079,
|
5757 |
-
"step": 820
|
5758 |
-
},
|
5759 |
-
{
|
5760 |
-
"epoch": 1.6007799171338046,
|
5761 |
-
"grad_norm": 0.45152541995048523,
|
5762 |
-
"learning_rate": 2.4503084304318026e-07,
|
5763 |
-
"loss": 1.9724,
|
5764 |
-
"step": 821
|
5765 |
-
},
|
5766 |
-
{
|
5767 |
-
"epoch": 1.6027297099683158,
|
5768 |
-
"grad_norm": 0.4355667233467102,
|
5769 |
-
"learning_rate": 2.446881425633996e-07,
|
5770 |
-
"loss": 2.0444,
|
5771 |
-
"step": 822
|
5772 |
-
},
|
5773 |
-
{
|
5774 |
-
"epoch": 1.6046795028028273,
|
5775 |
-
"grad_norm": 0.42853429913520813,
|
5776 |
-
"learning_rate": 2.443454420836189e-07,
|
5777 |
-
"loss": 1.9451,
|
5778 |
-
"step": 823
|
5779 |
-
},
|
5780 |
-
{
|
5781 |
-
"epoch": 1.6066292956373385,
|
5782 |
-
"grad_norm": 0.4546351134777069,
|
5783 |
-
"learning_rate": 2.4400274160383825e-07,
|
5784 |
-
"loss": 2.015,
|
5785 |
-
"step": 824
|
5786 |
-
},
|
5787 |
-
{
|
5788 |
-
"epoch": 1.6085790884718498,
|
5789 |
-
"grad_norm": 0.45015424489974976,
|
5790 |
-
"learning_rate": 2.4366004112405755e-07,
|
5791 |
-
"loss": 2.0171,
|
5792 |
-
"step": 825
|
5793 |
-
},
|
5794 |
-
{
|
5795 |
-
"epoch": 1.6105288813063612,
|
5796 |
-
"grad_norm": 0.446065217256546,
|
5797 |
-
"learning_rate": 2.433173406442769e-07,
|
5798 |
-
"loss": 2.0085,
|
5799 |
-
"step": 826
|
5800 |
-
},
|
5801 |
-
{
|
5802 |
-
"epoch": 1.6124786741408725,
|
5803 |
-
"grad_norm": 0.46771183609962463,
|
5804 |
-
"learning_rate": 2.4297464016449624e-07,
|
5805 |
-
"loss": 1.9844,
|
5806 |
-
"step": 827
|
5807 |
-
},
|
5808 |
-
{
|
5809 |
-
"epoch": 1.614428466975384,
|
5810 |
-
"grad_norm": 0.4590853452682495,
|
5811 |
-
"learning_rate": 2.4263193968471554e-07,
|
5812 |
-
"loss": 2.0031,
|
5813 |
-
"step": 828
|
5814 |
-
},
|
5815 |
-
{
|
5816 |
-
"epoch": 1.6163782598098952,
|
5817 |
-
"grad_norm": 0.4465842545032501,
|
5818 |
-
"learning_rate": 2.422892392049349e-07,
|
5819 |
-
"loss": 2.0344,
|
5820 |
-
"step": 829
|
5821 |
-
},
|
5822 |
-
{
|
5823 |
-
"epoch": 1.6183280526444066,
|
5824 |
-
"grad_norm": 0.40251830220222473,
|
5825 |
-
"learning_rate": 2.419465387251542e-07,
|
5826 |
-
"loss": 2.0129,
|
5827 |
-
"step": 830
|
5828 |
-
},
|
5829 |
-
{
|
5830 |
-
"epoch": 1.6202778454789177,
|
5831 |
-
"grad_norm": 0.45284631848335266,
|
5832 |
-
"learning_rate": 2.416038382453735e-07,
|
5833 |
-
"loss": 2.0354,
|
5834 |
-
"step": 831
|
5835 |
-
},
|
5836 |
-
{
|
5837 |
-
"epoch": 1.6222276383134293,
|
5838 |
-
"grad_norm": 0.4733079969882965,
|
5839 |
-
"learning_rate": 2.4126113776559287e-07,
|
5840 |
-
"loss": 1.993,
|
5841 |
-
"step": 832
|
5842 |
-
},
|
5843 |
-
{
|
5844 |
-
"epoch": 1.6241774311479404,
|
5845 |
-
"grad_norm": 0.4264031946659088,
|
5846 |
-
"learning_rate": 2.4091843728581217e-07,
|
5847 |
-
"loss": 2.007,
|
5848 |
-
"step": 833
|
5849 |
-
},
|
5850 |
-
{
|
5851 |
-
"epoch": 1.626127223982452,
|
5852 |
-
"grad_norm": 0.46400555968284607,
|
5853 |
-
"learning_rate": 2.405757368060315e-07,
|
5854 |
-
"loss": 1.9825,
|
5855 |
-
"step": 834
|
5856 |
-
},
|
5857 |
-
{
|
5858 |
-
"epoch": 1.6280770168169632,
|
5859 |
-
"grad_norm": 0.4408418834209442,
|
5860 |
-
"learning_rate": 2.4023303632625086e-07,
|
5861 |
-
"loss": 2.0199,
|
5862 |
-
"step": 835
|
5863 |
-
},
|
5864 |
-
{
|
5865 |
-
"epoch": 1.6300268096514745,
|
5866 |
-
"grad_norm": 0.4353219270706177,
|
5867 |
-
"learning_rate": 2.3989033584647016e-07,
|
5868 |
-
"loss": 1.9767,
|
5869 |
-
"step": 836
|
5870 |
-
},
|
5871 |
-
{
|
5872 |
-
"epoch": 1.6319766024859859,
|
5873 |
-
"grad_norm": 0.47256654500961304,
|
5874 |
-
"learning_rate": 2.395476353666895e-07,
|
5875 |
-
"loss": 2.0708,
|
5876 |
-
"step": 837
|
5877 |
-
},
|
5878 |
-
{
|
5879 |
-
"epoch": 1.6339263953204972,
|
5880 |
-
"grad_norm": 0.44208547472953796,
|
5881 |
-
"learning_rate": 2.392049348869088e-07,
|
5882 |
-
"loss": 2.0518,
|
5883 |
-
"step": 838
|
5884 |
-
},
|
5885 |
-
{
|
5886 |
-
"epoch": 1.6358761881550086,
|
5887 |
-
"grad_norm": 0.4937672019004822,
|
5888 |
-
"learning_rate": 2.3886223440712815e-07,
|
5889 |
-
"loss": 2.043,
|
5890 |
-
"step": 839
|
5891 |
-
},
|
5892 |
-
{
|
5893 |
-
"epoch": 1.6378259809895197,
|
5894 |
-
"grad_norm": 0.46095776557922363,
|
5895 |
-
"learning_rate": 2.385195339273475e-07,
|
5896 |
-
"loss": 2.0421,
|
5897 |
-
"step": 840
|
5898 |
-
},
|
5899 |
-
{
|
5900 |
-
"epoch": 1.6397757738240313,
|
5901 |
-
"grad_norm": 0.4658643901348114,
|
5902 |
-
"learning_rate": 2.3817683344756682e-07,
|
5903 |
-
"loss": 2.0225,
|
5904 |
-
"step": 841
|
5905 |
-
},
|
5906 |
-
{
|
5907 |
-
"epoch": 1.6417255666585424,
|
5908 |
-
"grad_norm": 0.4451207220554352,
|
5909 |
-
"learning_rate": 2.3783413296778616e-07,
|
5910 |
-
"loss": 2.0244,
|
5911 |
-
"step": 842
|
5912 |
-
},
|
5913 |
-
{
|
5914 |
-
"epoch": 1.643675359493054,
|
5915 |
-
"grad_norm": 0.43841567635536194,
|
5916 |
-
"learning_rate": 2.3749143248800546e-07,
|
5917 |
-
"loss": 1.9797,
|
5918 |
-
"step": 843
|
5919 |
-
},
|
5920 |
-
{
|
5921 |
-
"epoch": 1.6456251523275651,
|
5922 |
-
"grad_norm": 0.45495790243148804,
|
5923 |
-
"learning_rate": 2.371487320082248e-07,
|
5924 |
-
"loss": 2.039,
|
5925 |
-
"step": 844
|
5926 |
-
},
|
5927 |
-
{
|
5928 |
-
"epoch": 1.6475749451620765,
|
5929 |
-
"grad_norm": 0.4694961607456207,
|
5930 |
-
"learning_rate": 2.3680603152844415e-07,
|
5931 |
-
"loss": 2.0232,
|
5932 |
-
"step": 845
|
5933 |
-
},
|
5934 |
-
{
|
5935 |
-
"epoch": 1.6495247379965878,
|
5936 |
-
"grad_norm": 0.4593546986579895,
|
5937 |
-
"learning_rate": 2.3646333104866345e-07,
|
5938 |
-
"loss": 2.0495,
|
5939 |
-
"step": 846
|
5940 |
-
},
|
5941 |
-
{
|
5942 |
-
"epoch": 1.6514745308310992,
|
5943 |
-
"grad_norm": 0.4738862216472626,
|
5944 |
-
"learning_rate": 2.361206305688828e-07,
|
5945 |
-
"loss": 2.0105,
|
5946 |
-
"step": 847
|
5947 |
-
},
|
5948 |
-
{
|
5949 |
-
"epoch": 1.6534243236656105,
|
5950 |
-
"grad_norm": 0.45088139176368713,
|
5951 |
-
"learning_rate": 2.357779300891021e-07,
|
5952 |
-
"loss": 2.0418,
|
5953 |
-
"step": 848
|
5954 |
-
},
|
5955 |
-
{
|
5956 |
-
"epoch": 1.655374116500122,
|
5957 |
-
"grad_norm": 0.4501790702342987,
|
5958 |
-
"learning_rate": 2.3543522960932144e-07,
|
5959 |
-
"loss": 2.0531,
|
5960 |
-
"step": 849
|
5961 |
-
},
|
5962 |
-
{
|
5963 |
-
"epoch": 1.6573239093346332,
|
5964 |
-
"grad_norm": 0.47187909483909607,
|
5965 |
-
"learning_rate": 2.3509252912954078e-07,
|
5966 |
-
"loss": 1.9907,
|
5967 |
-
"step": 850
|
5968 |
-
},
|
5969 |
-
{
|
5970 |
-
"epoch": 1.6592737021691444,
|
5971 |
-
"grad_norm": 0.46769675612449646,
|
5972 |
-
"learning_rate": 2.3474982864976008e-07,
|
5973 |
-
"loss": 2.0145,
|
5974 |
-
"step": 851
|
5975 |
-
},
|
5976 |
-
{
|
5977 |
-
"epoch": 1.661223495003656,
|
5978 |
-
"grad_norm": 0.44854676723480225,
|
5979 |
-
"learning_rate": 2.3440712816997943e-07,
|
5980 |
-
"loss": 2.0381,
|
5981 |
-
"step": 852
|
5982 |
-
},
|
5983 |
-
{
|
5984 |
-
"epoch": 1.663173287838167,
|
5985 |
-
"grad_norm": 0.4576641023159027,
|
5986 |
-
"learning_rate": 2.3406442769019877e-07,
|
5987 |
-
"loss": 1.9722,
|
5988 |
-
"step": 853
|
5989 |
-
},
|
5990 |
-
{
|
5991 |
-
"epoch": 1.6651230806726787,
|
5992 |
-
"grad_norm": 0.4568294584751129,
|
5993 |
-
"learning_rate": 2.3372172721041807e-07,
|
5994 |
-
"loss": 1.9744,
|
5995 |
-
"step": 854
|
5996 |
-
},
|
5997 |
-
{
|
5998 |
-
"epoch": 1.6670728735071898,
|
5999 |
-
"grad_norm": 0.4591883718967438,
|
6000 |
-
"learning_rate": 2.3337902673063742e-07,
|
6001 |
-
"loss": 1.9666,
|
6002 |
-
"step": 855
|
6003 |
-
},
|
6004 |
-
{
|
6005 |
-
"epoch": 1.6690226663417012,
|
6006 |
-
"grad_norm": 0.44672197103500366,
|
6007 |
-
"learning_rate": 2.3303632625085674e-07,
|
6008 |
-
"loss": 1.9944,
|
6009 |
-
"step": 856
|
6010 |
-
},
|
6011 |
-
{
|
6012 |
-
"epoch": 1.6709724591762125,
|
6013 |
-
"grad_norm": 0.4896506667137146,
|
6014 |
-
"learning_rate": 2.3269362577107606e-07,
|
6015 |
-
"loss": 2.0492,
|
6016 |
-
"step": 857
|
6017 |
-
},
|
6018 |
-
{
|
6019 |
-
"epoch": 1.6729222520107239,
|
6020 |
-
"grad_norm": 0.4453061521053314,
|
6021 |
-
"learning_rate": 2.323509252912954e-07,
|
6022 |
-
"loss": 1.9757,
|
6023 |
-
"step": 858
|
6024 |
-
},
|
6025 |
-
{
|
6026 |
-
"epoch": 1.6748720448452352,
|
6027 |
-
"grad_norm": 0.4569021761417389,
|
6028 |
-
"learning_rate": 2.3200822481151473e-07,
|
6029 |
-
"loss": 2.0523,
|
6030 |
-
"step": 859
|
6031 |
-
},
|
6032 |
-
{
|
6033 |
-
"epoch": 1.6768218376797466,
|
6034 |
-
"grad_norm": 0.4553905427455902,
|
6035 |
-
"learning_rate": 2.3166552433173405e-07,
|
6036 |
-
"loss": 2.0189,
|
6037 |
-
"step": 860
|
6038 |
-
},
|
6039 |
-
{
|
6040 |
-
"epoch": 1.678771630514258,
|
6041 |
-
"grad_norm": 0.4560829699039459,
|
6042 |
-
"learning_rate": 2.3132282385195337e-07,
|
6043 |
-
"loss": 2.0833,
|
6044 |
-
"step": 861
|
6045 |
-
},
|
6046 |
-
{
|
6047 |
-
"epoch": 1.680721423348769,
|
6048 |
-
"grad_norm": 0.4487151503562927,
|
6049 |
-
"learning_rate": 2.3098012337217272e-07,
|
6050 |
-
"loss": 1.9806,
|
6051 |
-
"step": 862
|
6052 |
-
},
|
6053 |
-
{
|
6054 |
-
"epoch": 1.6826712161832806,
|
6055 |
-
"grad_norm": 0.440891832113266,
|
6056 |
-
"learning_rate": 2.3063742289239204e-07,
|
6057 |
-
"loss": 1.9989,
|
6058 |
-
"step": 863
|
6059 |
-
},
|
6060 |
-
{
|
6061 |
-
"epoch": 1.6846210090177918,
|
6062 |
-
"grad_norm": 0.469881534576416,
|
6063 |
-
"learning_rate": 2.3029472241261136e-07,
|
6064 |
-
"loss": 2.0626,
|
6065 |
-
"step": 864
|
6066 |
-
},
|
6067 |
-
{
|
6068 |
-
"epoch": 1.6865708018523033,
|
6069 |
-
"grad_norm": 0.43621349334716797,
|
6070 |
-
"learning_rate": 2.299520219328307e-07,
|
6071 |
-
"loss": 2.063,
|
6072 |
-
"step": 865
|
6073 |
-
},
|
6074 |
-
{
|
6075 |
-
"epoch": 1.6885205946868145,
|
6076 |
-
"grad_norm": 0.45750436186790466,
|
6077 |
-
"learning_rate": 2.2960932145305003e-07,
|
6078 |
-
"loss": 2.0164,
|
6079 |
-
"step": 866
|
6080 |
-
},
|
6081 |
-
{
|
6082 |
-
"epoch": 1.6904703875213258,
|
6083 |
-
"grad_norm": 0.46832090616226196,
|
6084 |
-
"learning_rate": 2.2926662097326935e-07,
|
6085 |
-
"loss": 2.0459,
|
6086 |
-
"step": 867
|
6087 |
-
},
|
6088 |
-
{
|
6089 |
-
"epoch": 1.6924201803558372,
|
6090 |
-
"grad_norm": 0.4424852728843689,
|
6091 |
-
"learning_rate": 2.289239204934887e-07,
|
6092 |
-
"loss": 2.0148,
|
6093 |
-
"step": 868
|
6094 |
-
},
|
6095 |
-
{
|
6096 |
-
"epoch": 1.6943699731903485,
|
6097 |
-
"grad_norm": 0.4639265239238739,
|
6098 |
-
"learning_rate": 2.28581220013708e-07,
|
6099 |
-
"loss": 2.0453,
|
6100 |
-
"step": 869
|
6101 |
-
},
|
6102 |
-
{
|
6103 |
-
"epoch": 1.69631976602486,
|
6104 |
-
"grad_norm": 0.42720574140548706,
|
6105 |
-
"learning_rate": 2.2823851953392734e-07,
|
6106 |
-
"loss": 2.0164,
|
6107 |
-
"step": 870
|
6108 |
-
},
|
6109 |
-
{
|
6110 |
-
"epoch": 1.698269558859371,
|
6111 |
-
"grad_norm": 0.46615973114967346,
|
6112 |
-
"learning_rate": 2.2789581905414668e-07,
|
6113 |
-
"loss": 2.0235,
|
6114 |
-
"step": 871
|
6115 |
-
},
|
6116 |
-
{
|
6117 |
-
"epoch": 1.7002193516938826,
|
6118 |
-
"grad_norm": 0.46956273913383484,
|
6119 |
-
"learning_rate": 2.2755311857436598e-07,
|
6120 |
-
"loss": 2.0668,
|
6121 |
-
"step": 872
|
6122 |
-
},
|
6123 |
-
{
|
6124 |
-
"epoch": 1.7021691445283937,
|
6125 |
-
"grad_norm": 0.45590096712112427,
|
6126 |
-
"learning_rate": 2.2721041809458533e-07,
|
6127 |
-
"loss": 2.0767,
|
6128 |
-
"step": 873
|
6129 |
-
},
|
6130 |
-
{
|
6131 |
-
"epoch": 1.7041189373629053,
|
6132 |
-
"grad_norm": 0.4419032037258148,
|
6133 |
-
"learning_rate": 2.2686771761480465e-07,
|
6134 |
-
"loss": 2.0298,
|
6135 |
-
"step": 874
|
6136 |
-
},
|
6137 |
-
{
|
6138 |
-
"epoch": 1.7060687301974164,
|
6139 |
-
"grad_norm": 0.48438993096351624,
|
6140 |
-
"learning_rate": 2.2652501713502397e-07,
|
6141 |
-
"loss": 2.0881,
|
6142 |
-
"step": 875
|
6143 |
-
},
|
6144 |
-
{
|
6145 |
-
"epoch": 1.7080185230319278,
|
6146 |
-
"grad_norm": 0.4674246609210968,
|
6147 |
-
"learning_rate": 2.2618231665524332e-07,
|
6148 |
-
"loss": 1.9858,
|
6149 |
-
"step": 876
|
6150 |
-
},
|
6151 |
-
{
|
6152 |
-
"epoch": 1.7099683158664392,
|
6153 |
-
"grad_norm": 0.4731968641281128,
|
6154 |
-
"learning_rate": 2.2583961617546264e-07,
|
6155 |
-
"loss": 2.0684,
|
6156 |
-
"step": 877
|
6157 |
-
},
|
6158 |
-
{
|
6159 |
-
"epoch": 1.7119181087009505,
|
6160 |
-
"grad_norm": 0.44370540976524353,
|
6161 |
-
"learning_rate": 2.2549691569568196e-07,
|
6162 |
-
"loss": 2.0222,
|
6163 |
-
"step": 878
|
6164 |
-
},
|
6165 |
-
{
|
6166 |
-
"epoch": 1.7138679015354619,
|
6167 |
-
"grad_norm": 0.43057727813720703,
|
6168 |
-
"learning_rate": 2.251542152159013e-07,
|
6169 |
-
"loss": 2.0054,
|
6170 |
-
"step": 879
|
6171 |
-
},
|
6172 |
-
{
|
6173 |
-
"epoch": 1.7158176943699732,
|
6174 |
-
"grad_norm": 0.4575825035572052,
|
6175 |
-
"learning_rate": 2.2481151473612063e-07,
|
6176 |
-
"loss": 2.0194,
|
6177 |
-
"step": 880
|
6178 |
-
},
|
6179 |
-
{
|
6180 |
-
"epoch": 1.7177674872044846,
|
6181 |
-
"grad_norm": 0.46100616455078125,
|
6182 |
-
"learning_rate": 2.2446881425633995e-07,
|
6183 |
-
"loss": 2.0362,
|
6184 |
-
"step": 881
|
6185 |
-
},
|
6186 |
-
{
|
6187 |
-
"epoch": 1.7197172800389957,
|
6188 |
-
"grad_norm": 0.46780040860176086,
|
6189 |
-
"learning_rate": 2.2412611377655927e-07,
|
6190 |
-
"loss": 2.0458,
|
6191 |
-
"step": 882
|
6192 |
-
},
|
6193 |
-
{
|
6194 |
-
"epoch": 1.7216670728735073,
|
6195 |
-
"grad_norm": 0.4316709339618683,
|
6196 |
-
"learning_rate": 2.2378341329677862e-07,
|
6197 |
-
"loss": 2.0401,
|
6198 |
-
"step": 883
|
6199 |
-
},
|
6200 |
-
{
|
6201 |
-
"epoch": 1.7236168657080184,
|
6202 |
-
"grad_norm": 0.43883568048477173,
|
6203 |
-
"learning_rate": 2.2344071281699794e-07,
|
6204 |
-
"loss": 2.0407,
|
6205 |
-
"step": 884
|
6206 |
-
},
|
6207 |
-
{
|
6208 |
-
"epoch": 1.72556665854253,
|
6209 |
-
"grad_norm": 0.44989317655563354,
|
6210 |
-
"learning_rate": 2.2309801233721726e-07,
|
6211 |
-
"loss": 2.0253,
|
6212 |
-
"step": 885
|
6213 |
-
},
|
6214 |
-
{
|
6215 |
-
"epoch": 1.7275164513770411,
|
6216 |
-
"grad_norm": 0.4468737840652466,
|
6217 |
-
"learning_rate": 2.227553118574366e-07,
|
6218 |
-
"loss": 2.0336,
|
6219 |
-
"step": 886
|
6220 |
-
},
|
6221 |
-
{
|
6222 |
-
"epoch": 1.7294662442115525,
|
6223 |
-
"grad_norm": 0.45126405358314514,
|
6224 |
-
"learning_rate": 2.224126113776559e-07,
|
6225 |
-
"loss": 2.0259,
|
6226 |
-
"step": 887
|
6227 |
-
},
|
6228 |
-
{
|
6229 |
-
"epoch": 1.7314160370460638,
|
6230 |
-
"grad_norm": 0.43270209431648254,
|
6231 |
-
"learning_rate": 2.2206991089787525e-07,
|
6232 |
-
"loss": 2.0071,
|
6233 |
-
"step": 888
|
6234 |
-
},
|
6235 |
-
{
|
6236 |
-
"epoch": 1.7333658298805752,
|
6237 |
-
"grad_norm": 0.4503726363182068,
|
6238 |
-
"learning_rate": 2.217272104180946e-07,
|
6239 |
-
"loss": 2.1025,
|
6240 |
-
"step": 889
|
6241 |
-
},
|
6242 |
-
{
|
6243 |
-
"epoch": 1.7353156227150865,
|
6244 |
-
"grad_norm": 0.44900792837142944,
|
6245 |
-
"learning_rate": 2.213845099383139e-07,
|
6246 |
-
"loss": 1.9883,
|
6247 |
-
"step": 890
|
6248 |
-
},
|
6249 |
-
{
|
6250 |
-
"epoch": 1.737265415549598,
|
6251 |
-
"grad_norm": 0.4531221091747284,
|
6252 |
-
"learning_rate": 2.2104180945853324e-07,
|
6253 |
-
"loss": 2.0095,
|
6254 |
-
"step": 891
|
6255 |
-
},
|
6256 |
-
{
|
6257 |
-
"epoch": 1.7392152083841093,
|
6258 |
-
"grad_norm": 0.46359124779701233,
|
6259 |
-
"learning_rate": 2.2069910897875258e-07,
|
6260 |
-
"loss": 2.003,
|
6261 |
-
"step": 892
|
6262 |
-
},
|
6263 |
-
{
|
6264 |
-
"epoch": 1.7411650012186204,
|
6265 |
-
"grad_norm": 0.4506163001060486,
|
6266 |
-
"learning_rate": 2.2035640849897188e-07,
|
6267 |
-
"loss": 1.9438,
|
6268 |
-
"step": 893
|
6269 |
-
},
|
6270 |
-
{
|
6271 |
-
"epoch": 1.743114794053132,
|
6272 |
-
"grad_norm": 0.4618943929672241,
|
6273 |
-
"learning_rate": 2.2001370801919123e-07,
|
6274 |
-
"loss": 2.0772,
|
6275 |
-
"step": 894
|
6276 |
-
},
|
6277 |
-
{
|
6278 |
-
"epoch": 1.745064586887643,
|
6279 |
-
"grad_norm": 0.4341379404067993,
|
6280 |
-
"learning_rate": 2.1967100753941055e-07,
|
6281 |
-
"loss": 1.9443,
|
6282 |
-
"step": 895
|
6283 |
-
},
|
6284 |
-
{
|
6285 |
-
"epoch": 1.7470143797221547,
|
6286 |
-
"grad_norm": 0.4800126254558563,
|
6287 |
-
"learning_rate": 2.1932830705962987e-07,
|
6288 |
-
"loss": 1.9994,
|
6289 |
-
"step": 896
|
6290 |
-
},
|
6291 |
-
{
|
6292 |
-
"epoch": 1.7489641725566658,
|
6293 |
-
"grad_norm": 0.45474764704704285,
|
6294 |
-
"learning_rate": 2.1898560657984922e-07,
|
6295 |
-
"loss": 2.0635,
|
6296 |
-
"step": 897
|
6297 |
-
},
|
6298 |
-
{
|
6299 |
-
"epoch": 1.7509139653911772,
|
6300 |
-
"grad_norm": 0.44301092624664307,
|
6301 |
-
"learning_rate": 2.1864290610006854e-07,
|
6302 |
-
"loss": 1.9752,
|
6303 |
-
"step": 898
|
6304 |
-
},
|
6305 |
-
{
|
6306 |
-
"epoch": 1.7528637582256885,
|
6307 |
-
"grad_norm": 0.4428479075431824,
|
6308 |
-
"learning_rate": 2.1830020562028786e-07,
|
6309 |
-
"loss": 1.9371,
|
6310 |
-
"step": 899
|
6311 |
-
},
|
6312 |
-
{
|
6313 |
-
"epoch": 1.7548135510601999,
|
6314 |
-
"grad_norm": 0.4576126039028168,
|
6315 |
-
"learning_rate": 2.1795750514050718e-07,
|
6316 |
-
"loss": 2.063,
|
6317 |
-
"step": 900
|
6318 |
-
},
|
6319 |
-
{
|
6320 |
-
"epoch": 1.7567633438947112,
|
6321 |
-
"grad_norm": 0.47722387313842773,
|
6322 |
-
"learning_rate": 2.1761480466072653e-07,
|
6323 |
-
"loss": 2.0743,
|
6324 |
-
"step": 901
|
6325 |
-
},
|
6326 |
-
{
|
6327 |
-
"epoch": 1.7587131367292224,
|
6328 |
-
"grad_norm": 0.4575481712818146,
|
6329 |
-
"learning_rate": 2.1727210418094585e-07,
|
6330 |
-
"loss": 1.9873,
|
6331 |
-
"step": 902
|
6332 |
-
},
|
6333 |
-
{
|
6334 |
-
"epoch": 1.760662929563734,
|
6335 |
-
"grad_norm": 0.4340214729309082,
|
6336 |
-
"learning_rate": 2.1692940370116517e-07,
|
6337 |
-
"loss": 1.9459,
|
6338 |
-
"step": 903
|
6339 |
-
},
|
6340 |
-
{
|
6341 |
-
"epoch": 1.762612722398245,
|
6342 |
-
"grad_norm": 0.41616639494895935,
|
6343 |
-
"learning_rate": 2.1658670322138452e-07,
|
6344 |
-
"loss": 1.9505,
|
6345 |
-
"step": 904
|
6346 |
-
},
|
6347 |
-
{
|
6348 |
-
"epoch": 1.7645625152327566,
|
6349 |
-
"grad_norm": 0.472650408744812,
|
6350 |
-
"learning_rate": 2.162440027416038e-07,
|
6351 |
-
"loss": 2.0594,
|
6352 |
-
"step": 905
|
6353 |
-
},
|
6354 |
-
{
|
6355 |
-
"epoch": 1.7665123080672678,
|
6356 |
-
"grad_norm": 0.4756447374820709,
|
6357 |
-
"learning_rate": 2.1590130226182316e-07,
|
6358 |
-
"loss": 1.9695,
|
6359 |
-
"step": 906
|
6360 |
-
},
|
6361 |
-
{
|
6362 |
-
"epoch": 1.7684621009017791,
|
6363 |
-
"grad_norm": 0.44738152623176575,
|
6364 |
-
"learning_rate": 2.155586017820425e-07,
|
6365 |
-
"loss": 2.0771,
|
6366 |
-
"step": 907
|
6367 |
-
},
|
6368 |
-
{
|
6369 |
-
"epoch": 1.7704118937362905,
|
6370 |
-
"grad_norm": 0.4602157771587372,
|
6371 |
-
"learning_rate": 2.152159013022618e-07,
|
6372 |
-
"loss": 2.0813,
|
6373 |
-
"step": 908
|
6374 |
-
},
|
6375 |
-
{
|
6376 |
-
"epoch": 1.7723616865708018,
|
6377 |
-
"grad_norm": 0.46765050292015076,
|
6378 |
-
"learning_rate": 2.1487320082248115e-07,
|
6379 |
-
"loss": 2.0801,
|
6380 |
-
"step": 909
|
6381 |
-
},
|
6382 |
-
{
|
6383 |
-
"epoch": 1.7743114794053132,
|
6384 |
-
"grad_norm": 0.4703747034072876,
|
6385 |
-
"learning_rate": 2.145305003427005e-07,
|
6386 |
-
"loss": 2.0093,
|
6387 |
-
"step": 910
|
6388 |
-
},
|
6389 |
-
{
|
6390 |
-
"epoch": 1.7762612722398246,
|
6391 |
-
"grad_norm": 0.48457059264183044,
|
6392 |
-
"learning_rate": 2.141877998629198e-07,
|
6393 |
-
"loss": 2.0528,
|
6394 |
-
"step": 911
|
6395 |
-
},
|
6396 |
-
{
|
6397 |
-
"epoch": 1.778211065074336,
|
6398 |
-
"grad_norm": 0.478710412979126,
|
6399 |
-
"learning_rate": 2.1384509938313914e-07,
|
6400 |
-
"loss": 2.1099,
|
6401 |
-
"step": 912
|
6402 |
-
},
|
6403 |
-
{
|
6404 |
-
"epoch": 1.780160857908847,
|
6405 |
-
"grad_norm": 0.4458109438419342,
|
6406 |
-
"learning_rate": 2.1350239890335843e-07,
|
6407 |
-
"loss": 2.0592,
|
6408 |
-
"step": 913
|
6409 |
-
},
|
6410 |
-
{
|
6411 |
-
"epoch": 1.7821106507433586,
|
6412 |
-
"grad_norm": 0.4474625885486603,
|
6413 |
-
"learning_rate": 2.1315969842357778e-07,
|
6414 |
-
"loss": 2.0055,
|
6415 |
-
"step": 914
|
6416 |
-
},
|
6417 |
-
{
|
6418 |
-
"epoch": 1.7840604435778697,
|
6419 |
-
"grad_norm": 0.4586813151836395,
|
6420 |
-
"learning_rate": 2.1281699794379713e-07,
|
6421 |
-
"loss": 2.0131,
|
6422 |
-
"step": 915
|
6423 |
-
},
|
6424 |
-
{
|
6425 |
-
"epoch": 1.7860102364123813,
|
6426 |
-
"grad_norm": 0.45083218812942505,
|
6427 |
-
"learning_rate": 2.1247429746401642e-07,
|
6428 |
-
"loss": 2.0437,
|
6429 |
-
"step": 916
|
6430 |
-
},
|
6431 |
-
{
|
6432 |
-
"epoch": 1.7879600292468925,
|
6433 |
-
"grad_norm": 0.44078171253204346,
|
6434 |
-
"learning_rate": 2.1213159698423577e-07,
|
6435 |
-
"loss": 1.9792,
|
6436 |
-
"step": 917
|
6437 |
-
},
|
6438 |
-
{
|
6439 |
-
"epoch": 1.7899098220814038,
|
6440 |
-
"grad_norm": 0.4346940219402313,
|
6441 |
-
"learning_rate": 2.117888965044551e-07,
|
6442 |
-
"loss": 1.9933,
|
6443 |
-
"step": 918
|
6444 |
-
},
|
6445 |
-
{
|
6446 |
-
"epoch": 1.7918596149159152,
|
6447 |
-
"grad_norm": 0.45846906304359436,
|
6448 |
-
"learning_rate": 2.114461960246744e-07,
|
6449 |
-
"loss": 1.9682,
|
6450 |
-
"step": 919
|
6451 |
-
},
|
6452 |
-
{
|
6453 |
-
"epoch": 1.7938094077504265,
|
6454 |
-
"grad_norm": 0.4335155785083771,
|
6455 |
-
"learning_rate": 2.1110349554489376e-07,
|
6456 |
-
"loss": 2.03,
|
6457 |
-
"step": 920
|
6458 |
-
},
|
6459 |
-
{
|
6460 |
-
"epoch": 1.7957592005849379,
|
6461 |
-
"grad_norm": 0.4618023633956909,
|
6462 |
-
"learning_rate": 2.1076079506511308e-07,
|
6463 |
-
"loss": 2.0966,
|
6464 |
-
"step": 921
|
6465 |
-
},
|
6466 |
-
{
|
6467 |
-
"epoch": 1.7977089934194492,
|
6468 |
-
"grad_norm": 0.46044906973838806,
|
6469 |
-
"learning_rate": 2.104180945853324e-07,
|
6470 |
-
"loss": 2.0873,
|
6471 |
-
"step": 922
|
6472 |
-
},
|
6473 |
-
{
|
6474 |
-
"epoch": 1.7996587862539606,
|
6475 |
-
"grad_norm": 0.4635170102119446,
|
6476 |
-
"learning_rate": 2.1007539410555175e-07,
|
6477 |
-
"loss": 1.9897,
|
6478 |
-
"step": 923
|
6479 |
-
},
|
6480 |
-
{
|
6481 |
-
"epoch": 1.8016085790884717,
|
6482 |
-
"grad_norm": 0.4335494637489319,
|
6483 |
-
"learning_rate": 2.0973269362577107e-07,
|
6484 |
-
"loss": 2.0228,
|
6485 |
-
"step": 924
|
6486 |
-
},
|
6487 |
-
{
|
6488 |
-
"epoch": 1.8035583719229833,
|
6489 |
-
"grad_norm": 0.44605642557144165,
|
6490 |
-
"learning_rate": 2.093899931459904e-07,
|
6491 |
-
"loss": 2.0561,
|
6492 |
-
"step": 925
|
6493 |
-
},
|
6494 |
-
{
|
6495 |
-
"epoch": 1.8055081647574944,
|
6496 |
-
"grad_norm": 0.4611765146255493,
|
6497 |
-
"learning_rate": 2.090472926662097e-07,
|
6498 |
-
"loss": 2.0329,
|
6499 |
-
"step": 926
|
6500 |
-
},
|
6501 |
-
{
|
6502 |
-
"epoch": 1.807457957592006,
|
6503 |
-
"grad_norm": 0.443036288022995,
|
6504 |
-
"learning_rate": 2.0870459218642906e-07,
|
6505 |
-
"loss": 1.9565,
|
6506 |
-
"step": 927
|
6507 |
-
},
|
6508 |
-
{
|
6509 |
-
"epoch": 1.8094077504265171,
|
6510 |
-
"grad_norm": 0.4552265405654907,
|
6511 |
-
"learning_rate": 2.0836189170664838e-07,
|
6512 |
-
"loss": 2.0842,
|
6513 |
-
"step": 928
|
6514 |
-
},
|
6515 |
-
{
|
6516 |
-
"epoch": 1.8113575432610285,
|
6517 |
-
"grad_norm": 0.41511160135269165,
|
6518 |
-
"learning_rate": 2.080191912268677e-07,
|
6519 |
-
"loss": 2.0043,
|
6520 |
-
"step": 929
|
6521 |
-
},
|
6522 |
-
{
|
6523 |
-
"epoch": 1.8133073360955398,
|
6524 |
-
"grad_norm": 0.44421470165252686,
|
6525 |
-
"learning_rate": 2.0767649074708705e-07,
|
6526 |
-
"loss": 2.0433,
|
6527 |
-
"step": 930
|
6528 |
-
},
|
6529 |
-
{
|
6530 |
-
"epoch": 1.8152571289300512,
|
6531 |
-
"grad_norm": 0.43709036707878113,
|
6532 |
-
"learning_rate": 2.0733379026730634e-07,
|
6533 |
-
"loss": 2.0405,
|
6534 |
-
"step": 931
|
6535 |
-
},
|
6536 |
-
{
|
6537 |
-
"epoch": 1.8172069217645626,
|
6538 |
-
"grad_norm": 0.429074227809906,
|
6539 |
-
"learning_rate": 2.069910897875257e-07,
|
6540 |
-
"loss": 1.964,
|
6541 |
-
"step": 932
|
6542 |
-
},
|
6543 |
-
{
|
6544 |
-
"epoch": 1.8191567145990737,
|
6545 |
-
"grad_norm": 0.4392930269241333,
|
6546 |
-
"learning_rate": 2.0664838930774504e-07,
|
6547 |
-
"loss": 1.9819,
|
6548 |
-
"step": 933
|
6549 |
-
},
|
6550 |
-
{
|
6551 |
-
"epoch": 1.8211065074335853,
|
6552 |
-
"grad_norm": 0.41590166091918945,
|
6553 |
-
"learning_rate": 2.0630568882796433e-07,
|
6554 |
-
"loss": 1.9821,
|
6555 |
-
"step": 934
|
6556 |
-
},
|
6557 |
-
{
|
6558 |
-
"epoch": 1.8230563002680964,
|
6559 |
-
"grad_norm": 0.445362389087677,
|
6560 |
-
"learning_rate": 2.0596298834818368e-07,
|
6561 |
-
"loss": 2.092,
|
6562 |
-
"step": 935
|
6563 |
-
},
|
6564 |
-
{
|
6565 |
-
"epoch": 1.825006093102608,
|
6566 |
-
"grad_norm": 0.43674713373184204,
|
6567 |
-
"learning_rate": 2.0562028786840303e-07,
|
6568 |
-
"loss": 2.0371,
|
6569 |
-
"step": 936
|
6570 |
-
},
|
6571 |
-
{
|
6572 |
-
"epoch": 1.826955885937119,
|
6573 |
-
"grad_norm": 0.4520663022994995,
|
6574 |
-
"learning_rate": 2.0527758738862232e-07,
|
6575 |
-
"loss": 2.0329,
|
6576 |
-
"step": 937
|
6577 |
-
},
|
6578 |
-
{
|
6579 |
-
"epoch": 1.8289056787716305,
|
6580 |
-
"grad_norm": 0.4744395613670349,
|
6581 |
-
"learning_rate": 2.0493488690884167e-07,
|
6582 |
-
"loss": 2.0828,
|
6583 |
-
"step": 938
|
6584 |
-
},
|
6585 |
-
{
|
6586 |
-
"epoch": 1.8308554716061418,
|
6587 |
-
"grad_norm": 0.45714208483695984,
|
6588 |
-
"learning_rate": 2.04592186429061e-07,
|
6589 |
-
"loss": 2.017,
|
6590 |
-
"step": 939
|
6591 |
-
},
|
6592 |
-
{
|
6593 |
-
"epoch": 1.8328052644406532,
|
6594 |
-
"grad_norm": 0.4604392647743225,
|
6595 |
-
"learning_rate": 2.042494859492803e-07,
|
6596 |
-
"loss": 1.9813,
|
6597 |
-
"step": 940
|
6598 |
-
},
|
6599 |
-
{
|
6600 |
-
"epoch": 1.8347550572751645,
|
6601 |
-
"grad_norm": 0.43890222907066345,
|
6602 |
-
"learning_rate": 2.0390678546949966e-07,
|
6603 |
-
"loss": 1.9902,
|
6604 |
-
"step": 941
|
6605 |
-
},
|
6606 |
-
{
|
6607 |
-
"epoch": 1.8367048501096759,
|
6608 |
-
"grad_norm": 0.44383513927459717,
|
6609 |
-
"learning_rate": 2.0356408498971898e-07,
|
6610 |
-
"loss": 2.0434,
|
6611 |
-
"step": 942
|
6612 |
-
},
|
6613 |
-
{
|
6614 |
-
"epoch": 1.8386546429441872,
|
6615 |
-
"grad_norm": 0.43706512451171875,
|
6616 |
-
"learning_rate": 2.032213845099383e-07,
|
6617 |
-
"loss": 2.052,
|
6618 |
-
"step": 943
|
6619 |
-
},
|
6620 |
-
{
|
6621 |
-
"epoch": 1.8406044357786984,
|
6622 |
-
"grad_norm": 0.427843302488327,
|
6623 |
-
"learning_rate": 2.0287868403015762e-07,
|
6624 |
-
"loss": 1.8841,
|
6625 |
-
"step": 944
|
6626 |
-
},
|
6627 |
-
{
|
6628 |
-
"epoch": 1.84255422861321,
|
6629 |
-
"grad_norm": 0.4639602601528168,
|
6630 |
-
"learning_rate": 2.0253598355037697e-07,
|
6631 |
-
"loss": 2.0831,
|
6632 |
-
"step": 945
|
6633 |
-
},
|
6634 |
-
{
|
6635 |
-
"epoch": 1.844504021447721,
|
6636 |
-
"grad_norm": 0.44139614701271057,
|
6637 |
-
"learning_rate": 2.021932830705963e-07,
|
6638 |
-
"loss": 1.9867,
|
6639 |
-
"step": 946
|
6640 |
-
},
|
6641 |
-
{
|
6642 |
-
"epoch": 1.8464538142822327,
|
6643 |
-
"grad_norm": 0.4408351182937622,
|
6644 |
-
"learning_rate": 2.018505825908156e-07,
|
6645 |
-
"loss": 2.0199,
|
6646 |
-
"step": 947
|
6647 |
-
},
|
6648 |
-
{
|
6649 |
-
"epoch": 1.8484036071167438,
|
6650 |
-
"grad_norm": 0.49647897481918335,
|
6651 |
-
"learning_rate": 2.0150788211103496e-07,
|
6652 |
-
"loss": 2.0877,
|
6653 |
-
"step": 948
|
6654 |
-
},
|
6655 |
-
{
|
6656 |
-
"epoch": 1.8503533999512551,
|
6657 |
-
"grad_norm": 0.46033725142478943,
|
6658 |
-
"learning_rate": 2.0116518163125428e-07,
|
6659 |
-
"loss": 2.0584,
|
6660 |
-
"step": 949
|
6661 |
-
},
|
6662 |
-
{
|
6663 |
-
"epoch": 1.8523031927857665,
|
6664 |
-
"grad_norm": 0.4471881687641144,
|
6665 |
-
"learning_rate": 2.008224811514736e-07,
|
6666 |
-
"loss": 1.9694,
|
6667 |
-
"step": 950
|
6668 |
-
},
|
6669 |
-
{
|
6670 |
-
"epoch": 1.8542529856202778,
|
6671 |
-
"grad_norm": 0.435660183429718,
|
6672 |
-
"learning_rate": 2.0047978067169295e-07,
|
6673 |
-
"loss": 2.0025,
|
6674 |
-
"step": 951
|
6675 |
-
},
|
6676 |
-
{
|
6677 |
-
"epoch": 1.8562027784547892,
|
6678 |
-
"grad_norm": 0.4504587650299072,
|
6679 |
-
"learning_rate": 2.0013708019191224e-07,
|
6680 |
-
"loss": 2.0403,
|
6681 |
-
"step": 952
|
6682 |
-
},
|
6683 |
-
{
|
6684 |
-
"epoch": 1.8581525712893006,
|
6685 |
-
"grad_norm": 0.446451336145401,
|
6686 |
-
"learning_rate": 1.997943797121316e-07,
|
6687 |
-
"loss": 1.9817,
|
6688 |
-
"step": 953
|
6689 |
-
},
|
6690 |
-
{
|
6691 |
-
"epoch": 1.860102364123812,
|
6692 |
-
"grad_norm": 0.46191105246543884,
|
6693 |
-
"learning_rate": 1.9945167923235094e-07,
|
6694 |
-
"loss": 2.0329,
|
6695 |
-
"step": 954
|
6696 |
-
},
|
6697 |
-
{
|
6698 |
-
"epoch": 1.862052156958323,
|
6699 |
-
"grad_norm": 0.4477747976779938,
|
6700 |
-
"learning_rate": 1.9910897875257023e-07,
|
6701 |
-
"loss": 2.0113,
|
6702 |
-
"step": 955
|
6703 |
-
},
|
6704 |
-
{
|
6705 |
-
"epoch": 1.8640019497928346,
|
6706 |
-
"grad_norm": 0.46400219202041626,
|
6707 |
-
"learning_rate": 1.9876627827278958e-07,
|
6708 |
-
"loss": 2.0142,
|
6709 |
-
"step": 956
|
6710 |
-
},
|
6711 |
-
{
|
6712 |
-
"epoch": 1.8659517426273458,
|
6713 |
-
"grad_norm": 0.45763564109802246,
|
6714 |
-
"learning_rate": 1.984235777930089e-07,
|
6715 |
-
"loss": 2.0555,
|
6716 |
-
"step": 957
|
6717 |
-
},
|
6718 |
-
{
|
6719 |
-
"epoch": 1.8679015354618573,
|
6720 |
-
"grad_norm": 0.4603627920150757,
|
6721 |
-
"learning_rate": 1.9808087731322822e-07,
|
6722 |
-
"loss": 2.0022,
|
6723 |
-
"step": 958
|
6724 |
-
},
|
6725 |
-
{
|
6726 |
-
"epoch": 1.8698513282963685,
|
6727 |
-
"grad_norm": 0.5134696364402771,
|
6728 |
-
"learning_rate": 1.9773817683344757e-07,
|
6729 |
-
"loss": 2.0396,
|
6730 |
-
"step": 959
|
6731 |
-
},
|
6732 |
-
{
|
6733 |
-
"epoch": 1.8718011211308798,
|
6734 |
-
"grad_norm": 0.46097123622894287,
|
6735 |
-
"learning_rate": 1.973954763536669e-07,
|
6736 |
-
"loss": 2.0887,
|
6737 |
-
"step": 960
|
6738 |
-
},
|
6739 |
-
{
|
6740 |
-
"epoch": 1.8737509139653912,
|
6741 |
-
"grad_norm": 0.45269545912742615,
|
6742 |
-
"learning_rate": 1.970527758738862e-07,
|
6743 |
-
"loss": 2.0184,
|
6744 |
-
"step": 961
|
6745 |
-
},
|
6746 |
-
{
|
6747 |
-
"epoch": 1.8757007067999025,
|
6748 |
-
"grad_norm": 0.463885635137558,
|
6749 |
-
"learning_rate": 1.9671007539410553e-07,
|
6750 |
-
"loss": 2.0701,
|
6751 |
-
"step": 962
|
6752 |
-
},
|
6753 |
-
{
|
6754 |
-
"epoch": 1.8776504996344139,
|
6755 |
-
"grad_norm": 0.4765574634075165,
|
6756 |
-
"learning_rate": 1.9636737491432488e-07,
|
6757 |
-
"loss": 1.9951,
|
6758 |
-
"step": 963
|
6759 |
-
},
|
6760 |
-
{
|
6761 |
-
"epoch": 1.879600292468925,
|
6762 |
-
"grad_norm": 0.48183631896972656,
|
6763 |
-
"learning_rate": 1.960246744345442e-07,
|
6764 |
-
"loss": 2.0723,
|
6765 |
-
"step": 964
|
6766 |
-
},
|
6767 |
-
{
|
6768 |
-
"epoch": 1.8815500853034366,
|
6769 |
-
"grad_norm": 0.44266360998153687,
|
6770 |
-
"learning_rate": 1.9568197395476352e-07,
|
6771 |
-
"loss": 2.0134,
|
6772 |
-
"step": 965
|
6773 |
-
},
|
6774 |
-
{
|
6775 |
-
"epoch": 1.8834998781379477,
|
6776 |
-
"grad_norm": 0.4508133828639984,
|
6777 |
-
"learning_rate": 1.9533927347498287e-07,
|
6778 |
-
"loss": 1.9951,
|
6779 |
-
"step": 966
|
6780 |
-
},
|
6781 |
-
{
|
6782 |
-
"epoch": 1.8854496709724593,
|
6783 |
-
"grad_norm": 0.4255620539188385,
|
6784 |
-
"learning_rate": 1.949965729952022e-07,
|
6785 |
-
"loss": 1.9663,
|
6786 |
-
"step": 967
|
6787 |
-
},
|
6788 |
-
{
|
6789 |
-
"epoch": 1.8873994638069704,
|
6790 |
-
"grad_norm": 0.45423394441604614,
|
6791 |
-
"learning_rate": 1.946538725154215e-07,
|
6792 |
-
"loss": 2.0072,
|
6793 |
-
"step": 968
|
6794 |
-
},
|
6795 |
-
{
|
6796 |
-
"epoch": 1.8893492566414818,
|
6797 |
-
"grad_norm": 0.4226663112640381,
|
6798 |
-
"learning_rate": 1.9431117203564086e-07,
|
6799 |
-
"loss": 1.9598,
|
6800 |
-
"step": 969
|
6801 |
-
},
|
6802 |
-
{
|
6803 |
-
"epoch": 1.8912990494759931,
|
6804 |
-
"grad_norm": 0.47366762161254883,
|
6805 |
-
"learning_rate": 1.9396847155586015e-07,
|
6806 |
-
"loss": 1.9927,
|
6807 |
-
"step": 970
|
6808 |
-
},
|
6809 |
-
{
|
6810 |
-
"epoch": 1.8932488423105045,
|
6811 |
-
"grad_norm": 0.44758790731430054,
|
6812 |
-
"learning_rate": 1.936257710760795e-07,
|
6813 |
-
"loss": 1.9628,
|
6814 |
-
"step": 971
|
6815 |
-
},
|
6816 |
-
{
|
6817 |
-
"epoch": 1.8951986351450159,
|
6818 |
-
"grad_norm": 0.48197463154792786,
|
6819 |
-
"learning_rate": 1.9328307059629885e-07,
|
6820 |
-
"loss": 2.1004,
|
6821 |
-
"step": 972
|
6822 |
-
},
|
6823 |
-
{
|
6824 |
-
"epoch": 1.8971484279795272,
|
6825 |
-
"grad_norm": 0.4538448750972748,
|
6826 |
-
"learning_rate": 1.9294037011651814e-07,
|
6827 |
-
"loss": 2.0199,
|
6828 |
-
"step": 973
|
6829 |
-
},
|
6830 |
-
{
|
6831 |
-
"epoch": 1.8990982208140386,
|
6832 |
-
"grad_norm": 0.47362738847732544,
|
6833 |
-
"learning_rate": 1.925976696367375e-07,
|
6834 |
-
"loss": 2.0746,
|
6835 |
-
"step": 974
|
6836 |
-
},
|
6837 |
-
{
|
6838 |
-
"epoch": 1.9010480136485497,
|
6839 |
-
"grad_norm": 0.47095638513565063,
|
6840 |
-
"learning_rate": 1.922549691569568e-07,
|
6841 |
-
"loss": 1.9897,
|
6842 |
-
"step": 975
|
6843 |
-
},
|
6844 |
-
{
|
6845 |
-
"epoch": 1.9029978064830613,
|
6846 |
-
"grad_norm": 0.4763641059398651,
|
6847 |
-
"learning_rate": 1.9191226867717613e-07,
|
6848 |
-
"loss": 2.0156,
|
6849 |
-
"step": 976
|
6850 |
-
},
|
6851 |
-
{
|
6852 |
-
"epoch": 1.9049475993175724,
|
6853 |
-
"grad_norm": 0.4224942922592163,
|
6854 |
-
"learning_rate": 1.9156956819739548e-07,
|
6855 |
-
"loss": 2.0114,
|
6856 |
-
"step": 977
|
6857 |
-
},
|
6858 |
-
{
|
6859 |
-
"epoch": 1.906897392152084,
|
6860 |
-
"grad_norm": 0.44930440187454224,
|
6861 |
-
"learning_rate": 1.912268677176148e-07,
|
6862 |
-
"loss": 2.0121,
|
6863 |
-
"step": 978
|
6864 |
-
},
|
6865 |
-
{
|
6866 |
-
"epoch": 1.9088471849865951,
|
6867 |
-
"grad_norm": 0.45916110277175903,
|
6868 |
-
"learning_rate": 1.9088416723783412e-07,
|
6869 |
-
"loss": 2.0053,
|
6870 |
-
"step": 979
|
6871 |
-
},
|
6872 |
-
{
|
6873 |
-
"epoch": 1.9107969778211065,
|
6874 |
-
"grad_norm": 0.42759600281715393,
|
6875 |
-
"learning_rate": 1.9054146675805347e-07,
|
6876 |
-
"loss": 2.0109,
|
6877 |
-
"step": 980
|
6878 |
-
},
|
6879 |
-
{
|
6880 |
-
"epoch": 1.9127467706556178,
|
6881 |
-
"grad_norm": 0.49347975850105286,
|
6882 |
-
"learning_rate": 1.901987662782728e-07,
|
6883 |
-
"loss": 2.0657,
|
6884 |
-
"step": 981
|
6885 |
-
},
|
6886 |
-
{
|
6887 |
-
"epoch": 1.9146965634901292,
|
6888 |
-
"grad_norm": 0.4315294027328491,
|
6889 |
-
"learning_rate": 1.898560657984921e-07,
|
6890 |
-
"loss": 1.9473,
|
6891 |
-
"step": 982
|
6892 |
-
},
|
6893 |
-
{
|
6894 |
-
"epoch": 1.9166463563246405,
|
6895 |
-
"grad_norm": 0.42915600538253784,
|
6896 |
-
"learning_rate": 1.8951336531871143e-07,
|
6897 |
-
"loss": 1.9958,
|
6898 |
-
"step": 983
|
6899 |
-
},
|
6900 |
-
{
|
6901 |
-
"epoch": 1.9185961491591519,
|
6902 |
-
"grad_norm": 0.48152124881744385,
|
6903 |
-
"learning_rate": 1.8917066483893078e-07,
|
6904 |
-
"loss": 2.0815,
|
6905 |
-
"step": 984
|
6906 |
-
},
|
6907 |
-
{
|
6908 |
-
"epoch": 1.9205459419936632,
|
6909 |
-
"grad_norm": 0.44423532485961914,
|
6910 |
-
"learning_rate": 1.888279643591501e-07,
|
6911 |
-
"loss": 2.0227,
|
6912 |
-
"step": 985
|
6913 |
-
},
|
6914 |
-
{
|
6915 |
-
"epoch": 1.9224957348281744,
|
6916 |
-
"grad_norm": 0.4499359130859375,
|
6917 |
-
"learning_rate": 1.8848526387936942e-07,
|
6918 |
-
"loss": 1.961,
|
6919 |
-
"step": 986
|
6920 |
-
},
|
6921 |
-
{
|
6922 |
-
"epoch": 1.924445527662686,
|
6923 |
-
"grad_norm": 0.4560549855232239,
|
6924 |
-
"learning_rate": 1.8814256339958877e-07,
|
6925 |
-
"loss": 2.03,
|
6926 |
-
"step": 987
|
6927 |
-
},
|
6928 |
-
{
|
6929 |
-
"epoch": 1.926395320497197,
|
6930 |
-
"grad_norm": 0.48396381735801697,
|
6931 |
-
"learning_rate": 1.8779986291980806e-07,
|
6932 |
-
"loss": 1.985,
|
6933 |
-
"step": 988
|
6934 |
-
},
|
6935 |
-
{
|
6936 |
-
"epoch": 1.9283451133317087,
|
6937 |
-
"grad_norm": 0.456910103559494,
|
6938 |
-
"learning_rate": 1.874571624400274e-07,
|
6939 |
-
"loss": 1.9802,
|
6940 |
-
"step": 989
|
6941 |
-
},
|
6942 |
-
{
|
6943 |
-
"epoch": 1.9302949061662198,
|
6944 |
-
"grad_norm": 0.46041303873062134,
|
6945 |
-
"learning_rate": 1.8711446196024676e-07,
|
6946 |
-
"loss": 1.9507,
|
6947 |
-
"step": 990
|
6948 |
-
},
|
6949 |
-
{
|
6950 |
-
"epoch": 1.9322446990007311,
|
6951 |
-
"grad_norm": 0.4496663510799408,
|
6952 |
-
"learning_rate": 1.8677176148046605e-07,
|
6953 |
-
"loss": 2.0329,
|
6954 |
-
"step": 991
|
6955 |
-
},
|
6956 |
-
{
|
6957 |
-
"epoch": 1.9341944918352425,
|
6958 |
-
"grad_norm": 0.4381345212459564,
|
6959 |
-
"learning_rate": 1.864290610006854e-07,
|
6960 |
-
"loss": 1.9643,
|
6961 |
-
"step": 992
|
6962 |
-
},
|
6963 |
-
{
|
6964 |
-
"epoch": 1.9361442846697539,
|
6965 |
-
"grad_norm": 0.43699464201927185,
|
6966 |
-
"learning_rate": 1.8608636052090475e-07,
|
6967 |
-
"loss": 2.026,
|
6968 |
-
"step": 993
|
6969 |
-
},
|
6970 |
-
{
|
6971 |
-
"epoch": 1.9380940775042652,
|
6972 |
-
"grad_norm": 0.4496040344238281,
|
6973 |
-
"learning_rate": 1.8574366004112404e-07,
|
6974 |
-
"loss": 1.9318,
|
6975 |
-
"step": 994
|
6976 |
-
},
|
6977 |
-
{
|
6978 |
-
"epoch": 1.9400438703387763,
|
6979 |
-
"grad_norm": 0.45028945803642273,
|
6980 |
-
"learning_rate": 1.854009595613434e-07,
|
6981 |
-
"loss": 2.0254,
|
6982 |
-
"step": 995
|
6983 |
-
},
|
6984 |
-
{
|
6985 |
-
"epoch": 1.941993663173288,
|
6986 |
-
"grad_norm": 0.46241873502731323,
|
6987 |
-
"learning_rate": 1.8505825908156268e-07,
|
6988 |
-
"loss": 2.0224,
|
6989 |
-
"step": 996
|
6990 |
-
},
|
6991 |
-
{
|
6992 |
-
"epoch": 1.943943456007799,
|
6993 |
-
"grad_norm": 0.4494277238845825,
|
6994 |
-
"learning_rate": 1.8471555860178203e-07,
|
6995 |
-
"loss": 2.0734,
|
6996 |
-
"step": 997
|
6997 |
-
},
|
6998 |
-
{
|
6999 |
-
"epoch": 1.9458932488423106,
|
7000 |
-
"grad_norm": 0.44225579500198364,
|
7001 |
-
"learning_rate": 1.8437285812200138e-07,
|
7002 |
-
"loss": 2.0548,
|
7003 |
-
"step": 998
|
7004 |
-
},
|
7005 |
-
{
|
7006 |
-
"epoch": 1.9478430416768218,
|
7007 |
-
"grad_norm": 0.4850820004940033,
|
7008 |
-
"learning_rate": 1.8403015764222067e-07,
|
7009 |
-
"loss": 1.9961,
|
7010 |
-
"step": 999
|
7011 |
-
},
|
7012 |
-
{
|
7013 |
-
"epoch": 1.9497928345113331,
|
7014 |
-
"grad_norm": 0.46442610025405884,
|
7015 |
-
"learning_rate": 1.8368745716244002e-07,
|
7016 |
-
"loss": 1.9777,
|
7017 |
-
"step": 1000
|
7018 |
-
},
|
7019 |
-
{
|
7020 |
-
"epoch": 1.9517426273458445,
|
7021 |
-
"grad_norm": 0.457109272480011,
|
7022 |
-
"learning_rate": 1.8334475668265934e-07,
|
7023 |
-
"loss": 2.0949,
|
7024 |
-
"step": 1001
|
7025 |
-
},
|
7026 |
-
{
|
7027 |
-
"epoch": 1.9536924201803558,
|
7028 |
-
"grad_norm": 0.4514349699020386,
|
7029 |
-
"learning_rate": 1.8300205620287866e-07,
|
7030 |
-
"loss": 2.0933,
|
7031 |
-
"step": 1002
|
7032 |
-
},
|
7033 |
-
{
|
7034 |
-
"epoch": 1.9556422130148672,
|
7035 |
-
"grad_norm": 0.4601777195930481,
|
7036 |
-
"learning_rate": 1.82659355723098e-07,
|
7037 |
-
"loss": 1.9975,
|
7038 |
-
"step": 1003
|
7039 |
-
},
|
7040 |
-
{
|
7041 |
-
"epoch": 1.9575920058493785,
|
7042 |
-
"grad_norm": 0.4604569673538208,
|
7043 |
-
"learning_rate": 1.8231665524331733e-07,
|
7044 |
-
"loss": 2.0364,
|
7045 |
-
"step": 1004
|
7046 |
-
},
|
7047 |
-
{
|
7048 |
-
"epoch": 1.95954179868389,
|
7049 |
-
"grad_norm": 0.4434170424938202,
|
7050 |
-
"learning_rate": 1.8197395476353665e-07,
|
7051 |
-
"loss": 1.9835,
|
7052 |
-
"step": 1005
|
7053 |
-
},
|
7054 |
-
{
|
7055 |
-
"epoch": 1.961491591518401,
|
7056 |
-
"grad_norm": 0.45063334703445435,
|
7057 |
-
"learning_rate": 1.81631254283756e-07,
|
7058 |
-
"loss": 1.9904,
|
7059 |
-
"step": 1006
|
7060 |
-
},
|
7061 |
-
{
|
7062 |
-
"epoch": 1.9634413843529126,
|
7063 |
-
"grad_norm": 0.45276153087615967,
|
7064 |
-
"learning_rate": 1.8128855380397532e-07,
|
7065 |
-
"loss": 2.021,
|
7066 |
-
"step": 1007
|
7067 |
-
},
|
7068 |
-
{
|
7069 |
-
"epoch": 1.9653911771874237,
|
7070 |
-
"grad_norm": 0.44774502515792847,
|
7071 |
-
"learning_rate": 1.8094585332419464e-07,
|
7072 |
-
"loss": 2.0024,
|
7073 |
-
"step": 1008
|
7074 |
-
},
|
7075 |
-
{
|
7076 |
-
"epoch": 1.9673409700219353,
|
7077 |
-
"grad_norm": 0.43734362721443176,
|
7078 |
-
"learning_rate": 1.8060315284441396e-07,
|
7079 |
-
"loss": 2.0261,
|
7080 |
-
"step": 1009
|
7081 |
-
},
|
7082 |
-
{
|
7083 |
-
"epoch": 1.9692907628564464,
|
7084 |
-
"grad_norm": 0.45293501019477844,
|
7085 |
-
"learning_rate": 1.802604523646333e-07,
|
7086 |
-
"loss": 2.0781,
|
7087 |
-
"step": 1010
|
7088 |
-
},
|
7089 |
-
{
|
7090 |
-
"epoch": 1.9712405556909578,
|
7091 |
-
"grad_norm": 0.4538004994392395,
|
7092 |
-
"learning_rate": 1.7991775188485263e-07,
|
7093 |
-
"loss": 2.0081,
|
7094 |
-
"step": 1011
|
7095 |
-
},
|
7096 |
-
{
|
7097 |
-
"epoch": 1.9731903485254692,
|
7098 |
-
"grad_norm": 0.45042964816093445,
|
7099 |
-
"learning_rate": 1.7957505140507195e-07,
|
7100 |
-
"loss": 2.0121,
|
7101 |
-
"step": 1012
|
7102 |
-
},
|
7103 |
-
{
|
7104 |
-
"epoch": 1.9751401413599805,
|
7105 |
-
"grad_norm": 0.4721399247646332,
|
7106 |
-
"learning_rate": 1.792323509252913e-07,
|
7107 |
-
"loss": 2.0071,
|
7108 |
-
"step": 1013
|
7109 |
-
},
|
7110 |
-
{
|
7111 |
-
"epoch": 1.9770899341944919,
|
7112 |
-
"grad_norm": 0.4297287166118622,
|
7113 |
-
"learning_rate": 1.788896504455106e-07,
|
7114 |
-
"loss": 2.0213,
|
7115 |
-
"step": 1014
|
7116 |
-
},
|
7117 |
-
{
|
7118 |
-
"epoch": 1.9790397270290032,
|
7119 |
-
"grad_norm": 0.4454828202724457,
|
7120 |
-
"learning_rate": 1.7854694996572994e-07,
|
7121 |
-
"loss": 2.0093,
|
7122 |
-
"step": 1015
|
7123 |
-
},
|
7124 |
-
{
|
7125 |
-
"epoch": 1.9809895198635146,
|
7126 |
-
"grad_norm": 0.4550788700580597,
|
7127 |
-
"learning_rate": 1.782042494859493e-07,
|
7128 |
-
"loss": 2.0599,
|
7129 |
-
"step": 1016
|
7130 |
-
},
|
7131 |
-
{
|
7132 |
-
"epoch": 1.9829393126980257,
|
7133 |
-
"grad_norm": 0.44854849576950073,
|
7134 |
-
"learning_rate": 1.7786154900616858e-07,
|
7135 |
-
"loss": 2.0262,
|
7136 |
-
"step": 1017
|
7137 |
-
},
|
7138 |
-
{
|
7139 |
-
"epoch": 1.9848891055325373,
|
7140 |
-
"grad_norm": 0.4477459192276001,
|
7141 |
-
"learning_rate": 1.7751884852638793e-07,
|
7142 |
-
"loss": 1.9533,
|
7143 |
-
"step": 1018
|
7144 |
-
},
|
7145 |
-
{
|
7146 |
-
"epoch": 1.9868388983670484,
|
7147 |
-
"grad_norm": 0.43663471937179565,
|
7148 |
-
"learning_rate": 1.7717614804660728e-07,
|
7149 |
-
"loss": 2.0122,
|
7150 |
-
"step": 1019
|
7151 |
-
},
|
7152 |
-
{
|
7153 |
-
"epoch": 1.98878869120156,
|
7154 |
-
"grad_norm": 0.45281800627708435,
|
7155 |
-
"learning_rate": 1.7683344756682657e-07,
|
7156 |
-
"loss": 2.0711,
|
7157 |
-
"step": 1020
|
7158 |
-
},
|
7159 |
-
{
|
7160 |
-
"epoch": 1.9907384840360711,
|
7161 |
-
"grad_norm": 0.44143861532211304,
|
7162 |
-
"learning_rate": 1.7649074708704592e-07,
|
7163 |
-
"loss": 2.0198,
|
7164 |
-
"step": 1021
|
7165 |
-
},
|
7166 |
-
{
|
7167 |
-
"epoch": 1.9926882768705825,
|
7168 |
-
"grad_norm": 0.4464763402938843,
|
7169 |
-
"learning_rate": 1.7614804660726524e-07,
|
7170 |
-
"loss": 2.0117,
|
7171 |
-
"step": 1022
|
7172 |
-
},
|
7173 |
-
{
|
7174 |
-
"epoch": 1.9946380697050938,
|
7175 |
-
"grad_norm": 0.42707762122154236,
|
7176 |
-
"learning_rate": 1.7580534612748456e-07,
|
7177 |
-
"loss": 1.9629,
|
7178 |
-
"step": 1023
|
7179 |
-
},
|
7180 |
-
{
|
7181 |
-
"epoch": 1.9965878625396052,
|
7182 |
-
"grad_norm": 0.4683617949485779,
|
7183 |
-
"learning_rate": 1.754626456477039e-07,
|
7184 |
-
"loss": 2.0467,
|
7185 |
-
"step": 1024
|
7186 |
-
},
|
7187 |
-
{
|
7188 |
-
"epoch": 1.9985376553741165,
|
7189 |
-
"grad_norm": 0.4215565025806427,
|
7190 |
-
"learning_rate": 1.7511994516792323e-07,
|
7191 |
-
"loss": 1.9545,
|
7192 |
-
"step": 1025
|
7193 |
-
},
|
7194 |
-
{
|
7195 |
-
"epoch": 1.9985376553741165,
|
7196 |
-
"eval_loss": 2.0196783542633057,
|
7197 |
-
"eval_runtime": 480.5583,
|
7198 |
-
"eval_samples_per_second": 1.294,
|
7199 |
-
"eval_steps_per_second": 0.325,
|
7200 |
-
"step": 1025
|
7201 |
}
|
7202 |
],
|
7203 |
"logging_steps": 1,
|
@@ -7217,7 +3618,7 @@
|
|
7217 |
"attributes": {}
|
7218 |
}
|
7219 |
},
|
7220 |
-
"total_flos":
|
7221 |
"train_batch_size": 2,
|
7222 |
"trial_name": null,
|
7223 |
"trial_params": null
|
|
|
1 |
{
|
2 |
+
"best_metric": 2.046032667160034,
|
3 |
+
"best_model_checkpoint": "/home/sunggeunan/data/ICL/outputs/lora/SKIML-ICL_mrqa_nq_v3/Meta-Llama-3-8B-Instruct-unanswerable-5Q-0U-0C-qa_first/checkpoint-512",
|
4 |
+
"epoch": 0.9982939312698026,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 512,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
3599 |
"eval_samples_per_second": 1.293,
|
3600 |
"eval_steps_per_second": 0.324,
|
3601 |
"step": 512
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3602 |
}
|
3603 |
],
|
3604 |
"logging_steps": 1,
|
|
|
3618 |
"attributes": {}
|
3619 |
}
|
3620 |
},
|
3621 |
+
"total_flos": 1.5142103496146289e+18,
|
3622 |
"train_batch_size": 2,
|
3623 |
"trial_name": null,
|
3624 |
"trial_params": null
|