|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"global_step": 791, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0, |
|
"loss": 1.5654, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0, |
|
"loss": 1.5907, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0, |
|
"loss": 1.5874, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0, |
|
"loss": 1.562, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0, |
|
"loss": 1.5541, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0, |
|
"loss": 1.567, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0, |
|
"loss": 1.5696, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0, |
|
"loss": 1.6244, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0, |
|
"loss": 1.5748, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0, |
|
"loss": 1.5526, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0, |
|
"loss": 1.552, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0, |
|
"loss": 1.5895, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0, |
|
"loss": 1.5527, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0, |
|
"loss": 1.5988, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0, |
|
"loss": 1.5558, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0, |
|
"loss": 1.5579, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0, |
|
"loss": 1.5671, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.0, |
|
"loss": 1.5648, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.581044635020827e-06, |
|
"loss": 1.6027, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.675821459916692e-06, |
|
"loss": 1.589, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.162089270041654e-06, |
|
"loss": 1.5901, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.31492814710052e-06, |
|
"loss": 1.5896, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.256866094937518e-06, |
|
"loss": 1.5966, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.0053263282233697e-05, |
|
"loss": 1.5472, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.0743133905062482e-05, |
|
"loss": 1.557, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1351642919833384e-05, |
|
"loss": 1.5989, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1895972782121347e-05, |
|
"loss": 1.5863, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2388379038142511e-05, |
|
"loss": 1.5452, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2837910729958346e-05, |
|
"loss": 1.5958, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.325143979986714e-05, |
|
"loss": 1.6016, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3634307917254523e-05, |
|
"loss": 1.5522, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.399074960701721e-05, |
|
"loss": 1.5394, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4324178540083309e-05, |
|
"loss": 1.547, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4637386878506516e-05, |
|
"loss": 1.5934, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4932687554854209e-05, |
|
"loss": 1.6129, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.5212018031974513e-05, |
|
"loss": 1.554, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.5477017417142176e-05, |
|
"loss": 1.5764, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.5729084742150388e-05, |
|
"loss": 1.5516, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.596942367316334e-05, |
|
"loss": 1.6008, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6199077273922286e-05, |
|
"loss": 1.5685, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6418955364979175e-05, |
|
"loss": 1.5862, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.662985629420104e-05, |
|
"loss": 1.5953, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6832484434887967e-05, |
|
"loss": 1.5745, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.7027464379750072e-05, |
|
"loss": 1.5314, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.721535255227535e-05, |
|
"loss": 1.5197, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.7396646779634733e-05, |
|
"loss": 1.5728, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.757179424203804e-05, |
|
"loss": 1.5845, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.7741198118150898e-05, |
|
"loss": 1.5758, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.7905223175104135e-05, |
|
"loss": 1.5164, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.80642004980592e-05, |
|
"loss": 1.5868, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.8218431513527344e-05, |
|
"loss": 1.5559, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.8368191429334215e-05, |
|
"loss": 1.5845, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.8513732189875035e-05, |
|
"loss": 1.5432, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.8655285026376745e-05, |
|
"loss": 1.5356, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.8793062666995343e-05, |
|
"loss": 1.5105, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.892726125978383e-05, |
|
"loss": 1.5472, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9058062052163002e-05, |
|
"loss": 1.5493, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9185632862982668e-05, |
|
"loss": 1.5358, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9310129377171218e-05, |
|
"loss": 1.5605, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9431696288028554e-05, |
|
"loss": 1.5145, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9550468308184164e-05, |
|
"loss": 1.5266, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.96665710669339e-05, |
|
"loss": 1.4972, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9780121908943112e-05, |
|
"loss": 1.5375, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.98912306070467e-05, |
|
"loss": 1.4777, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5253, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2e-05, |
|
"loss": 1.4702, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9986962190352023e-05, |
|
"loss": 1.5171, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9973924380704045e-05, |
|
"loss": 1.5054, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9960886571056063e-05, |
|
"loss": 1.494, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9947848761408084e-05, |
|
"loss": 1.5168, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9934810951760106e-05, |
|
"loss": 1.4954, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9921773142112127e-05, |
|
"loss": 1.521, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.990873533246415e-05, |
|
"loss": 1.4507, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9895697522816167e-05, |
|
"loss": 1.4855, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.988265971316819e-05, |
|
"loss": 1.4561, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.986962190352021e-05, |
|
"loss": 1.4458, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9856584093872232e-05, |
|
"loss": 1.4755, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.984354628422425e-05, |
|
"loss": 1.4816, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9830508474576275e-05, |
|
"loss": 1.4276, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9817470664928293e-05, |
|
"loss": 1.432, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9804432855280314e-05, |
|
"loss": 1.4384, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9791395045632336e-05, |
|
"loss": 1.468, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9778357235984354e-05, |
|
"loss": 1.455, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.976531942633638e-05, |
|
"loss": 1.4219, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9752281616688397e-05, |
|
"loss": 1.4377, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.973924380704042e-05, |
|
"loss": 1.4341, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.972620599739244e-05, |
|
"loss": 1.4198, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9713168187744462e-05, |
|
"loss": 1.4469, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.970013037809648e-05, |
|
"loss": 1.4092, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.96870925684485e-05, |
|
"loss": 1.4312, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9674054758800523e-05, |
|
"loss": 1.4139, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9661016949152545e-05, |
|
"loss": 1.4419, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9647979139504566e-05, |
|
"loss": 1.4158, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9634941329856584e-05, |
|
"loss": 1.4456, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9621903520208606e-05, |
|
"loss": 1.4178, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9608865710560627e-05, |
|
"loss": 1.4132, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.959582790091265e-05, |
|
"loss": 1.4174, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.958279009126467e-05, |
|
"loss": 1.4387, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.956975228161669e-05, |
|
"loss": 1.3621, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9556714471968713e-05, |
|
"loss": 1.4186, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.954367666232073e-05, |
|
"loss": 1.3762, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9530638852672753e-05, |
|
"loss": 1.4232, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9517601043024775e-05, |
|
"loss": 1.3823, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9504563233376793e-05, |
|
"loss": 1.3579, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9491525423728814e-05, |
|
"loss": 1.3751, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9478487614080836e-05, |
|
"loss": 1.3787, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9465449804432857e-05, |
|
"loss": 1.3884, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9452411994784876e-05, |
|
"loss": 1.4022, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.94393741851369e-05, |
|
"loss": 1.3467, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.942633637548892e-05, |
|
"loss": 1.3571, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.941329856584094e-05, |
|
"loss": 1.4038, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.940026075619296e-05, |
|
"loss": 1.3687, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.938722294654498e-05, |
|
"loss": 1.3716, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9374185136897005e-05, |
|
"loss": 1.3608, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9361147327249023e-05, |
|
"loss": 1.3607, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9348109517601044e-05, |
|
"loss": 1.342, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9335071707953066e-05, |
|
"loss": 1.3776, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9322033898305087e-05, |
|
"loss": 1.3895, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.930899608865711e-05, |
|
"loss": 1.3678, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9295958279009127e-05, |
|
"loss": 1.3893, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.928292046936115e-05, |
|
"loss": 1.373, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.926988265971317e-05, |
|
"loss": 1.3845, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9256844850065192e-05, |
|
"loss": 1.3303, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.924380704041721e-05, |
|
"loss": 1.3756, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.923076923076923e-05, |
|
"loss": 1.3575, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9217731421121253e-05, |
|
"loss": 1.3395, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9204693611473274e-05, |
|
"loss": 1.3496, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9191655801825296e-05, |
|
"loss": 1.3474, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9178617992177314e-05, |
|
"loss": 1.3518, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.916558018252934e-05, |
|
"loss": 1.3422, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9152542372881357e-05, |
|
"loss": 1.3752, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.913950456323338e-05, |
|
"loss": 1.3889, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.91264667535854e-05, |
|
"loss": 1.354, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.911342894393742e-05, |
|
"loss": 1.3274, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.910039113428944e-05, |
|
"loss": 1.3337, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.908735332464146e-05, |
|
"loss": 1.3867, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9074315514993483e-05, |
|
"loss": 1.3417, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9061277705345505e-05, |
|
"loss": 1.3513, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9048239895697526e-05, |
|
"loss": 1.3544, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9035202086049544e-05, |
|
"loss": 1.3694, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9022164276401566e-05, |
|
"loss": 1.3424, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9009126466753587e-05, |
|
"loss": 1.3429, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8996088657105605e-05, |
|
"loss": 1.3328, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.898305084745763e-05, |
|
"loss": 1.3157, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.897001303780965e-05, |
|
"loss": 1.335, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.895697522816167e-05, |
|
"loss": 1.3044, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.894393741851369e-05, |
|
"loss": 1.3512, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8930899608865713e-05, |
|
"loss": 1.3273, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8917861799217735e-05, |
|
"loss": 1.3615, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8904823989569753e-05, |
|
"loss": 1.3544, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8891786179921774e-05, |
|
"loss": 1.3354, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8878748370273796e-05, |
|
"loss": 1.3845, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8865710560625817e-05, |
|
"loss": 1.3317, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8852672750977836e-05, |
|
"loss": 1.3263, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8839634941329857e-05, |
|
"loss": 1.3234, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.882659713168188e-05, |
|
"loss": 1.3269, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.88135593220339e-05, |
|
"loss": 1.2842, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.880052151238592e-05, |
|
"loss": 1.3395, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.878748370273794e-05, |
|
"loss": 1.312, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8774445893089965e-05, |
|
"loss": 1.2876, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8761408083441983e-05, |
|
"loss": 1.3389, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8748370273794004e-05, |
|
"loss": 1.3048, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8735332464146026e-05, |
|
"loss": 1.3628, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8722294654498044e-05, |
|
"loss": 1.303, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.870925684485007e-05, |
|
"loss": 1.3248, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8696219035202087e-05, |
|
"loss": 1.3054, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.868318122555411e-05, |
|
"loss": 1.3245, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.867014341590613e-05, |
|
"loss": 1.3425, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8657105606258152e-05, |
|
"loss": 1.3424, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.864406779661017e-05, |
|
"loss": 1.3203, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.863102998696219e-05, |
|
"loss": 1.3473, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8617992177314213e-05, |
|
"loss": 1.366, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.860495436766623e-05, |
|
"loss": 1.2986, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8591916558018256e-05, |
|
"loss": 1.3368, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8578878748370274e-05, |
|
"loss": 1.3269, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8565840938722296e-05, |
|
"loss": 1.2887, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8552803129074317e-05, |
|
"loss": 1.3384, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.853976531942634e-05, |
|
"loss": 1.3027, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.852672750977836e-05, |
|
"loss": 1.308, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.851368970013038e-05, |
|
"loss": 1.3005, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.85006518904824e-05, |
|
"loss": 1.3064, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.848761408083442e-05, |
|
"loss": 1.3042, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8474576271186443e-05, |
|
"loss": 1.2847, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 1.2906, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8448500651890483e-05, |
|
"loss": 1.3507, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8435462842242504e-05, |
|
"loss": 1.3068, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8422425032594526e-05, |
|
"loss": 1.3324, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8409387222946547e-05, |
|
"loss": 1.324, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8396349413298566e-05, |
|
"loss": 1.3074, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.838331160365059e-05, |
|
"loss": 1.3286, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.837027379400261e-05, |
|
"loss": 1.2745, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.835723598435463e-05, |
|
"loss": 1.25, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.834419817470665e-05, |
|
"loss": 1.3082, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.833116036505867e-05, |
|
"loss": 1.3041, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8318122555410695e-05, |
|
"loss": 1.2632, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8305084745762713e-05, |
|
"loss": 1.2894, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8292046936114734e-05, |
|
"loss": 1.292, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8279009126466756e-05, |
|
"loss": 1.3499, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8265971316818777e-05, |
|
"loss": 1.3208, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8252933507170796e-05, |
|
"loss": 1.3112, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8239895697522817e-05, |
|
"loss": 1.3215, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.822685788787484e-05, |
|
"loss": 1.3517, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.821382007822686e-05, |
|
"loss": 1.2925, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.8200782268578882e-05, |
|
"loss": 1.3116, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.81877444589309e-05, |
|
"loss": 1.3151, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.817470664928292e-05, |
|
"loss": 1.2968, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.8161668839634943e-05, |
|
"loss": 1.3361, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.8148631029986964e-05, |
|
"loss": 1.2667, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.8135593220338986e-05, |
|
"loss": 1.3018, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.8122555410691004e-05, |
|
"loss": 1.3289, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.8109517601043026e-05, |
|
"loss": 1.2583, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.8096479791395047e-05, |
|
"loss": 1.3079, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.808344198174707e-05, |
|
"loss": 1.303, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.807040417209909e-05, |
|
"loss": 1.325, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.805736636245111e-05, |
|
"loss": 1.264, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.804432855280313e-05, |
|
"loss": 1.2833, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.803129074315515e-05, |
|
"loss": 1.2604, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.8018252933507173e-05, |
|
"loss": 1.3169, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.800521512385919e-05, |
|
"loss": 1.3234, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7992177314211213e-05, |
|
"loss": 1.2804, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7979139504563234e-05, |
|
"loss": 1.2938, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7966101694915256e-05, |
|
"loss": 1.3267, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7953063885267277e-05, |
|
"loss": 1.2988, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7940026075619295e-05, |
|
"loss": 1.34, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.792698826597132e-05, |
|
"loss": 1.2924, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.791395045632334e-05, |
|
"loss": 1.276, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.790091264667536e-05, |
|
"loss": 1.3103, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.788787483702738e-05, |
|
"loss": 1.3031, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.78748370273794e-05, |
|
"loss": 1.2872, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7861799217731425e-05, |
|
"loss": 1.301, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7848761408083443e-05, |
|
"loss": 1.239, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7835723598435464e-05, |
|
"loss": 1.3019, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7822685788787486e-05, |
|
"loss": 1.3147, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7809647979139507e-05, |
|
"loss": 1.2756, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7796610169491526e-05, |
|
"loss": 1.2791, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7783572359843547e-05, |
|
"loss": 1.2959, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.777053455019557e-05, |
|
"loss": 1.2891, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7757496740547587e-05, |
|
"loss": 1.2815, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.774445893089961e-05, |
|
"loss": 1.304, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.773142112125163e-05, |
|
"loss": 1.2603, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.771838331160365e-05, |
|
"loss": 1.3018, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7705345501955673e-05, |
|
"loss": 1.3156, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7692307692307694e-05, |
|
"loss": 1.262, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7679269882659716e-05, |
|
"loss": 1.2852, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7666232073011734e-05, |
|
"loss": 1.2615, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7653194263363756e-05, |
|
"loss": 1.3041, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7640156453715777e-05, |
|
"loss": 1.2882, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.76271186440678e-05, |
|
"loss": 1.2821, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.761408083441982e-05, |
|
"loss": 1.2638, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.760104302477184e-05, |
|
"loss": 1.3298, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.758800521512386e-05, |
|
"loss": 1.2759, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.757496740547588e-05, |
|
"loss": 1.2672, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7561929595827903e-05, |
|
"loss": 1.248, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.754889178617992e-05, |
|
"loss": 1.2805, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7535853976531946e-05, |
|
"loss": 1.2679, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7522816166883964e-05, |
|
"loss": 1.2874, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7509778357235986e-05, |
|
"loss": 1.2874, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7496740547588007e-05, |
|
"loss": 1.2979, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7483702737940025e-05, |
|
"loss": 1.2655, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.747066492829205e-05, |
|
"loss": 1.2812, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.745762711864407e-05, |
|
"loss": 1.261, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.744458930899609e-05, |
|
"loss": 1.3212, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.743155149934811e-05, |
|
"loss": 1.2789, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7418513689700133e-05, |
|
"loss": 1.3164, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.740547588005215e-05, |
|
"loss": 1.2963, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7392438070404173e-05, |
|
"loss": 1.2548, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7379400260756194e-05, |
|
"loss": 1.2684, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7366362451108216e-05, |
|
"loss": 1.286, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7353324641460237e-05, |
|
"loss": 1.3213, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7340286831812255e-05, |
|
"loss": 1.3036, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7327249022164277e-05, |
|
"loss": 1.2478, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.73142112125163e-05, |
|
"loss": 1.307, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.730117340286832e-05, |
|
"loss": 1.2959, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.728813559322034e-05, |
|
"loss": 1.238, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.727509778357236e-05, |
|
"loss": 1.2871, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.726205997392438e-05, |
|
"loss": 1.3219, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7249022164276403e-05, |
|
"loss": 1.283, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7235984354628424e-05, |
|
"loss": 1.2689, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7222946544980446e-05, |
|
"loss": 1.2626, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7209908735332464e-05, |
|
"loss": 1.2833, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.7196870925684486e-05, |
|
"loss": 1.2628, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.7183833116036507e-05, |
|
"loss": 1.2903, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.717079530638853e-05, |
|
"loss": 1.2909, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.7157757496740547e-05, |
|
"loss": 1.2816, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.714471968709257e-05, |
|
"loss": 1.3226, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.713168187744459e-05, |
|
"loss": 1.252, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.711864406779661e-05, |
|
"loss": 1.3066, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.7105606258148633e-05, |
|
"loss": 1.2614, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.709256844850065e-05, |
|
"loss": 1.2714, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.7079530638852676e-05, |
|
"loss": 1.2695, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.7066492829204694e-05, |
|
"loss": 1.2495, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.7053455019556716e-05, |
|
"loss": 1.3086, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.7040417209908737e-05, |
|
"loss": 1.2717, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.702737940026076e-05, |
|
"loss": 1.2803, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.7014341590612777e-05, |
|
"loss": 1.3202, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.70013037809648e-05, |
|
"loss": 1.2826, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.698826597131682e-05, |
|
"loss": 1.2496, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.697522816166884e-05, |
|
"loss": 1.2594, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6962190352020863e-05, |
|
"loss": 1.2823, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.694915254237288e-05, |
|
"loss": 1.2816, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6936114732724903e-05, |
|
"loss": 1.3027, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6923076923076924e-05, |
|
"loss": 1.2732, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6910039113428946e-05, |
|
"loss": 1.2601, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6897001303780967e-05, |
|
"loss": 1.2765, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6883963494132985e-05, |
|
"loss": 1.2701, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.687092568448501e-05, |
|
"loss": 1.3153, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.685788787483703e-05, |
|
"loss": 1.2693, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.684485006518905e-05, |
|
"loss": 1.2637, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.683181225554107e-05, |
|
"loss": 1.2845, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.681877444589309e-05, |
|
"loss": 1.2961, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.680573663624511e-05, |
|
"loss": 1.2662, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6792698826597133e-05, |
|
"loss": 1.2484, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6779661016949154e-05, |
|
"loss": 1.2079, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6766623207301176e-05, |
|
"loss": 1.2853, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6753585397653197e-05, |
|
"loss": 1.2921, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6740547588005215e-05, |
|
"loss": 1.2922, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6727509778357237e-05, |
|
"loss": 1.2723, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.671447196870926e-05, |
|
"loss": 1.2664, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6701434159061277e-05, |
|
"loss": 1.2556, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.66883963494133e-05, |
|
"loss": 1.2946, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.667535853976532e-05, |
|
"loss": 1.2504, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.666232073011734e-05, |
|
"loss": 1.2742, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6649282920469363e-05, |
|
"loss": 1.2996, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6636245110821384e-05, |
|
"loss": 1.2412, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6623207301173406e-05, |
|
"loss": 1.2687, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6610169491525424e-05, |
|
"loss": 1.2854, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6597131681877446e-05, |
|
"loss": 1.2737, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6584093872229467e-05, |
|
"loss": 1.2399, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.657105606258149e-05, |
|
"loss": 1.2695, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6558018252933507e-05, |
|
"loss": 1.264, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.654498044328553e-05, |
|
"loss": 1.2629, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.653194263363755e-05, |
|
"loss": 1.27, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.651890482398957e-05, |
|
"loss": 1.243, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6505867014341593e-05, |
|
"loss": 1.2568, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.649282920469361e-05, |
|
"loss": 1.2618, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6479791395045636e-05, |
|
"loss": 1.2542, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6466753585397654e-05, |
|
"loss": 1.2521, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6453715775749676e-05, |
|
"loss": 1.2804, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6440677966101697e-05, |
|
"loss": 1.2739, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6427640156453715e-05, |
|
"loss": 1.2427, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6414602346805737e-05, |
|
"loss": 1.287, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.640156453715776e-05, |
|
"loss": 1.2985, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.638852672750978e-05, |
|
"loss": 1.2667, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.63754889178618e-05, |
|
"loss": 1.257, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6362451108213823e-05, |
|
"loss": 1.2571, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.634941329856584e-05, |
|
"loss": 1.2734, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6336375488917863e-05, |
|
"loss": 1.2654, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6323337679269884e-05, |
|
"loss": 1.2743, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6310299869621902e-05, |
|
"loss": 1.2706, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6297262059973927e-05, |
|
"loss": 1.2262, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6284224250325945e-05, |
|
"loss": 1.2726, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.6271186440677967e-05, |
|
"loss": 1.2593, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.625814863102999e-05, |
|
"loss": 1.2729, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.624511082138201e-05, |
|
"loss": 1.29, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.623207301173403e-05, |
|
"loss": 1.2191, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.621903520208605e-05, |
|
"loss": 1.2603, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.620599739243807e-05, |
|
"loss": 1.2557, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.6192959582790093e-05, |
|
"loss": 1.2937, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.6179921773142114e-05, |
|
"loss": 1.3005, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.6166883963494132e-05, |
|
"loss": 1.2596, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.6153846153846154e-05, |
|
"loss": 1.3081, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.6140808344198176e-05, |
|
"loss": 1.2283, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.6127770534550197e-05, |
|
"loss": 1.24, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.611473272490222e-05, |
|
"loss": 1.2529, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.6101694915254237e-05, |
|
"loss": 1.2649, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.608865710560626e-05, |
|
"loss": 1.2634, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.607561929595828e-05, |
|
"loss": 1.2498, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.60625814863103e-05, |
|
"loss": 1.2787, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.6049543676662323e-05, |
|
"loss": 1.319, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.603650586701434e-05, |
|
"loss": 1.2584, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.6023468057366366e-05, |
|
"loss": 1.2662, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.6010430247718384e-05, |
|
"loss": 1.2046, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5997392438070406e-05, |
|
"loss": 1.246, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5984354628422427e-05, |
|
"loss": 1.2365, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.597131681877445e-05, |
|
"loss": 1.2535, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5958279009126467e-05, |
|
"loss": 1.2272, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.594524119947849e-05, |
|
"loss": 1.1935, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.593220338983051e-05, |
|
"loss": 1.2494, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5919165580182528e-05, |
|
"loss": 1.2434, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5906127770534553e-05, |
|
"loss": 1.2443, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.589308996088657e-05, |
|
"loss": 1.2432, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5880052151238593e-05, |
|
"loss": 1.2986, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5867014341590614e-05, |
|
"loss": 1.2634, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5853976531942636e-05, |
|
"loss": 1.2103, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5840938722294657e-05, |
|
"loss": 1.2687, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5827900912646675e-05, |
|
"loss": 1.2755, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5814863102998697e-05, |
|
"loss": 1.2452, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.580182529335072e-05, |
|
"loss": 1.2461, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.578878748370274e-05, |
|
"loss": 1.2838, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.577574967405476e-05, |
|
"loss": 1.2122, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.576271186440678e-05, |
|
"loss": 1.2466, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.57496740547588e-05, |
|
"loss": 1.248, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5736636245110823e-05, |
|
"loss": 1.281, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5723598435462844e-05, |
|
"loss": 1.2549, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5710560625814862e-05, |
|
"loss": 1.2725, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5697522816166887e-05, |
|
"loss": 1.264, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5684485006518905e-05, |
|
"loss": 1.2319, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5671447196870927e-05, |
|
"loss": 1.2664, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.565840938722295e-05, |
|
"loss": 1.2634, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5645371577574967e-05, |
|
"loss": 1.2524, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.563233376792699e-05, |
|
"loss": 1.2482, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.561929595827901e-05, |
|
"loss": 1.2443, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.560625814863103e-05, |
|
"loss": 1.271, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5593220338983053e-05, |
|
"loss": 1.2687, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5580182529335074e-05, |
|
"loss": 1.2257, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5567144719687092e-05, |
|
"loss": 1.2828, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5554106910039114e-05, |
|
"loss": 1.2346, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5541069100391136e-05, |
|
"loss": 1.2562, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5528031290743157e-05, |
|
"loss": 1.2599, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.551499348109518e-05, |
|
"loss": 1.2144, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5501955671447197e-05, |
|
"loss": 1.3008, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5488917861799218e-05, |
|
"loss": 1.2646, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.547588005215124e-05, |
|
"loss": 1.2612, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.546284224250326e-05, |
|
"loss": 1.2371, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5449804432855283e-05, |
|
"loss": 1.2661, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.54367666232073e-05, |
|
"loss": 1.2742, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5423728813559326e-05, |
|
"loss": 1.2406, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5410691003911344e-05, |
|
"loss": 1.2693, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5397653194263366e-05, |
|
"loss": 1.2902, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 1.3, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5371577574967405e-05, |
|
"loss": 1.2482, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5358539765319427e-05, |
|
"loss": 1.2594, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.534550195567145e-05, |
|
"loss": 1.2493, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.533246414602347e-05, |
|
"loss": 1.2396, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5319426336375488e-05, |
|
"loss": 1.2704, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5306388526727513e-05, |
|
"loss": 1.2302, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.529335071707953e-05, |
|
"loss": 1.2756, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5280312907431553e-05, |
|
"loss": 1.2382, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5267275097783574e-05, |
|
"loss": 1.2432, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5254237288135594e-05, |
|
"loss": 1.2303, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5241199478487616e-05, |
|
"loss": 1.2335, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.5228161668839635e-05, |
|
"loss": 1.2186, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.5215123859191657e-05, |
|
"loss": 1.2338, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.5202086049543678e-05, |
|
"loss": 1.2839, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.5189048239895698e-05, |
|
"loss": 1.2556, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.517601043024772e-05, |
|
"loss": 1.2371, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.5162972620599741e-05, |
|
"loss": 1.1931, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.5149934810951761e-05, |
|
"loss": 1.2543, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.5136897001303783e-05, |
|
"loss": 1.2205, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.5123859191655803e-05, |
|
"loss": 1.2842, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.5110821382007822e-05, |
|
"loss": 1.2721, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.5097783572359846e-05, |
|
"loss": 1.2308, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.5084745762711865e-05, |
|
"loss": 1.2686, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.5071707953063885e-05, |
|
"loss": 1.2467, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.5058670143415907e-05, |
|
"loss": 1.2302, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.5045632333767928e-05, |
|
"loss": 1.2609, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.503259452411995e-05, |
|
"loss": 1.2659, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.501955671447197e-05, |
|
"loss": 1.228, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.500651890482399e-05, |
|
"loss": 1.2555, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4993481095176013e-05, |
|
"loss": 1.2301, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4980443285528033e-05, |
|
"loss": 1.2632, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4967405475880053e-05, |
|
"loss": 1.2665, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4954367666232074e-05, |
|
"loss": 1.2283, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4941329856584096e-05, |
|
"loss": 1.2106, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4928292046936117e-05, |
|
"loss": 1.2329, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4915254237288137e-05, |
|
"loss": 1.2777, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4902216427640157e-05, |
|
"loss": 1.2612, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.488917861799218e-05, |
|
"loss": 1.2745, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.48761408083442e-05, |
|
"loss": 1.2434, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.486310299869622e-05, |
|
"loss": 1.2522, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4850065189048241e-05, |
|
"loss": 1.2383, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4837027379400261e-05, |
|
"loss": 1.272, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4823989569752283e-05, |
|
"loss": 1.2487, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4810951760104304e-05, |
|
"loss": 1.2676, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4797913950456324e-05, |
|
"loss": 1.2345, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4784876140808346e-05, |
|
"loss": 1.224, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4771838331160367e-05, |
|
"loss": 1.2468, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4758800521512387e-05, |
|
"loss": 1.2506, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4745762711864408e-05, |
|
"loss": 1.2106, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4732724902216428e-05, |
|
"loss": 1.2383, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4719687092568448e-05, |
|
"loss": 1.2518, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4706649282920471e-05, |
|
"loss": 1.2449, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4693611473272491e-05, |
|
"loss": 1.2302, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4680573663624513e-05, |
|
"loss": 1.2455, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4667535853976533e-05, |
|
"loss": 1.2515, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4654498044328554e-05, |
|
"loss": 1.2377, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4641460234680576e-05, |
|
"loss": 1.2371, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4628422425032595e-05, |
|
"loss": 1.1954, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4615384615384615e-05, |
|
"loss": 1.2476, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4602346805736639e-05, |
|
"loss": 1.2271, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4589308996088658e-05, |
|
"loss": 1.2389, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4576271186440678e-05, |
|
"loss": 1.2675, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.45632333767927e-05, |
|
"loss": 1.2733, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.455019556714472e-05, |
|
"loss": 1.2128, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4537157757496743e-05, |
|
"loss": 1.2449, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4524119947848763e-05, |
|
"loss": 1.2703, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4511082138200782e-05, |
|
"loss": 1.2638, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4498044328552806e-05, |
|
"loss": 1.2646, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4485006518904826e-05, |
|
"loss": 1.2557, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4471968709256845e-05, |
|
"loss": 1.2294, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4458930899608867e-05, |
|
"loss": 1.2352, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4445893089960887e-05, |
|
"loss": 1.2557, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.443285528031291e-05, |
|
"loss": 1.2339, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.441981747066493e-05, |
|
"loss": 1.2534, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.440677966101695e-05, |
|
"loss": 1.2535, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4393741851368971e-05, |
|
"loss": 1.2371, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4380704041720993e-05, |
|
"loss": 1.2476, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4367666232073013e-05, |
|
"loss": 1.2487, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4354628422425034e-05, |
|
"loss": 1.23, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4341590612777054e-05, |
|
"loss": 1.2228, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4328552803129077e-05, |
|
"loss": 1.2144, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4315514993481097e-05, |
|
"loss": 1.1974, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4302477183833117e-05, |
|
"loss": 1.2505, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4289439374185138e-05, |
|
"loss": 1.2189, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4276401564537158e-05, |
|
"loss": 1.3004, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.426336375488918e-05, |
|
"loss": 1.2303, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4250325945241201e-05, |
|
"loss": 1.2507, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4237288135593221e-05, |
|
"loss": 1.2526, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4224250325945241e-05, |
|
"loss": 1.1986, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4211212516297264e-05, |
|
"loss": 1.256, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4198174706649284e-05, |
|
"loss": 1.2341, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4185136897001306e-05, |
|
"loss": 1.2296, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4172099087353325e-05, |
|
"loss": 1.222, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4159061277705345e-05, |
|
"loss": 1.2379, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4146023468057368e-05, |
|
"loss": 1.266, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4132985658409388e-05, |
|
"loss": 1.2318, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4119947848761408e-05, |
|
"loss": 1.224, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4106910039113431e-05, |
|
"loss": 1.2378, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4093872229465451e-05, |
|
"loss": 1.2502, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4080834419817473e-05, |
|
"loss": 1.2629, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4067796610169493e-05, |
|
"loss": 1.2478, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4054758800521512e-05, |
|
"loss": 1.2451, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4041720990873536e-05, |
|
"loss": 1.2341, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4028683181225555e-05, |
|
"loss": 1.2424, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4015645371577575e-05, |
|
"loss": 1.2479, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4002607561929597e-05, |
|
"loss": 1.2714, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3989569752281618e-05, |
|
"loss": 1.2075, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3976531942633638e-05, |
|
"loss": 1.2098, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.396349413298566e-05, |
|
"loss": 1.2498, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.395045632333768e-05, |
|
"loss": 1.2395, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3937418513689703e-05, |
|
"loss": 1.2181, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3924380704041723e-05, |
|
"loss": 1.2526, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3911342894393742e-05, |
|
"loss": 1.2103, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3898305084745764e-05, |
|
"loss": 1.2539, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3885267275097784e-05, |
|
"loss": 1.2194, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3872229465449805e-05, |
|
"loss": 1.2319, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3859191655801827e-05, |
|
"loss": 1.2878, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3846153846153847e-05, |
|
"loss": 1.2428, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.383311603650587e-05, |
|
"loss": 1.2076, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.382007822685789e-05, |
|
"loss": 1.2354, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.380704041720991e-05, |
|
"loss": 1.215, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3794002607561931e-05, |
|
"loss": 1.2285, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3780964797913951e-05, |
|
"loss": 1.2247, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3767926988265971e-05, |
|
"loss": 1.2616, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3754889178617994e-05, |
|
"loss": 1.2357, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3741851368970014e-05, |
|
"loss": 1.2456, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3728813559322034e-05, |
|
"loss": 1.2196, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3715775749674057e-05, |
|
"loss": 1.2515, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3702737940026077e-05, |
|
"loss": 1.2621, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3689700130378098e-05, |
|
"loss": 1.2527, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3676662320730118e-05, |
|
"loss": 1.2391, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3663624511082138e-05, |
|
"loss": 1.2236, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3650586701434161e-05, |
|
"loss": 1.2314, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3637548891786181e-05, |
|
"loss": 1.2709, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3624511082138201e-05, |
|
"loss": 1.2664, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3611473272490223e-05, |
|
"loss": 1.2439, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3598435462842244e-05, |
|
"loss": 1.2516, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3585397653194266e-05, |
|
"loss": 1.1953, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3572359843546285e-05, |
|
"loss": 1.2264, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3559322033898305e-05, |
|
"loss": 1.2375, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3546284224250328e-05, |
|
"loss": 1.2517, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3533246414602348e-05, |
|
"loss": 1.2524, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3520208604954368e-05, |
|
"loss": 1.2576, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.350717079530639e-05, |
|
"loss": 1.2208, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.349413298565841e-05, |
|
"loss": 1.2578, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3481095176010431e-05, |
|
"loss": 1.2454, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3468057366362453e-05, |
|
"loss": 1.2465, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3455019556714472e-05, |
|
"loss": 1.1848, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3441981747066496e-05, |
|
"loss": 1.2263, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3428943937418515e-05, |
|
"loss": 1.2611, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3415906127770535e-05, |
|
"loss": 1.267, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3402868318122557e-05, |
|
"loss": 1.2039, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3389830508474577e-05, |
|
"loss": 1.2028, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3376792698826597e-05, |
|
"loss": 1.2123, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.336375488917862e-05, |
|
"loss": 1.2323, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.335071707953064e-05, |
|
"loss": 1.2263, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3337679269882661e-05, |
|
"loss": 1.2428, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3324641460234683e-05, |
|
"loss": 1.2151, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3311603650586703e-05, |
|
"loss": 1.2126, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3298565840938724e-05, |
|
"loss": 1.2164, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3285528031290744e-05, |
|
"loss": 1.2487, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3272490221642764e-05, |
|
"loss": 1.2478, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3259452411994787e-05, |
|
"loss": 1.2043, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3246414602346807e-05, |
|
"loss": 1.24, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3233376792698827e-05, |
|
"loss": 1.2255, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3220338983050848e-05, |
|
"loss": 1.2319, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.320730117340287e-05, |
|
"loss": 1.2208, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3194263363754891e-05, |
|
"loss": 1.2074, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3181225554106911e-05, |
|
"loss": 1.2559, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3168187744458931e-05, |
|
"loss": 1.2375, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3155149934810954e-05, |
|
"loss": 1.2394, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3142112125162974e-05, |
|
"loss": 1.1995, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3129074315514994e-05, |
|
"loss": 1.2228, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3116036505867015e-05, |
|
"loss": 1.2177, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3102998696219035e-05, |
|
"loss": 1.212, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3089960886571058e-05, |
|
"loss": 1.2421, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3076923076923078e-05, |
|
"loss": 1.1732, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3063885267275098e-05, |
|
"loss": 1.2311, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.305084745762712e-05, |
|
"loss": 1.1901, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3037809647979141e-05, |
|
"loss": 1.2166, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3024771838331161e-05, |
|
"loss": 1.2255, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3011734028683183e-05, |
|
"loss": 1.2339, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2998696219035202e-05, |
|
"loss": 1.1895, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2985658409387226e-05, |
|
"loss": 1.249, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2972620599739245e-05, |
|
"loss": 1.2374, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2959582790091265e-05, |
|
"loss": 1.233, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2946544980443287e-05, |
|
"loss": 1.2328, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2933507170795307e-05, |
|
"loss": 1.2249, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2920469361147328e-05, |
|
"loss": 1.2308, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.290743155149935e-05, |
|
"loss": 1.2108, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.289439374185137e-05, |
|
"loss": 1.2081, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.288135593220339e-05, |
|
"loss": 1.1864, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2868318122555413e-05, |
|
"loss": 1.2281, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2855280312907432e-05, |
|
"loss": 1.2393, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2842242503259454e-05, |
|
"loss": 1.22, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2829204693611474e-05, |
|
"loss": 1.2476, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2816166883963494e-05, |
|
"loss": 1.2466, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2803129074315517e-05, |
|
"loss": 1.2136, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2790091264667537e-05, |
|
"loss": 1.2001, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2777053455019557e-05, |
|
"loss": 1.2329, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.276401564537158e-05, |
|
"loss": 1.2358, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.27509778357236e-05, |
|
"loss": 1.1984, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2737940026075621e-05, |
|
"loss": 1.2155, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2724902216427641e-05, |
|
"loss": 1.1827, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2711864406779661e-05, |
|
"loss": 1.2338, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2698826597131684e-05, |
|
"loss": 1.1949, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2685788787483704e-05, |
|
"loss": 1.257, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2672750977835724e-05, |
|
"loss": 1.2255, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2659713168187745e-05, |
|
"loss": 1.2471, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2646675358539767e-05, |
|
"loss": 1.2235, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2633637548891787e-05, |
|
"loss": 1.2341, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2620599739243808e-05, |
|
"loss": 1.27, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2607561929595828e-05, |
|
"loss": 1.2129, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2594524119947851e-05, |
|
"loss": 1.2241, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2581486310299871e-05, |
|
"loss": 1.2374, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2568448500651891e-05, |
|
"loss": 1.2211, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2555410691003912e-05, |
|
"loss": 1.2537, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2542372881355932e-05, |
|
"loss": 1.2599, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2529335071707954e-05, |
|
"loss": 1.209, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2516297262059975e-05, |
|
"loss": 1.2432, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2503259452411995e-05, |
|
"loss": 1.2017, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2490221642764018e-05, |
|
"loss": 1.2379, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2477183833116038e-05, |
|
"loss": 1.2214, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2464146023468058e-05, |
|
"loss": 1.2206, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.245110821382008e-05, |
|
"loss": 1.2076, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.24380704041721e-05, |
|
"loss": 1.2047, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.242503259452412e-05, |
|
"loss": 1.2461, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2411994784876143e-05, |
|
"loss": 1.2225, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2398956975228162e-05, |
|
"loss": 1.2384, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2385919165580182e-05, |
|
"loss": 1.2668, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2372881355932205e-05, |
|
"loss": 1.2402, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2359843546284225e-05, |
|
"loss": 1.2328, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2346805736636247e-05, |
|
"loss": 1.2308, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2333767926988267e-05, |
|
"loss": 1.2576, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2320730117340287e-05, |
|
"loss": 1.2299, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.230769230769231e-05, |
|
"loss": 1.2217, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.229465449804433e-05, |
|
"loss": 1.2507, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.228161668839635e-05, |
|
"loss": 1.2576, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2268578878748371e-05, |
|
"loss": 1.2118, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2255541069100392e-05, |
|
"loss": 1.251, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2242503259452414e-05, |
|
"loss": 1.2106, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2229465449804434e-05, |
|
"loss": 1.2234, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2216427640156454e-05, |
|
"loss": 1.1669, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2203389830508477e-05, |
|
"loss": 1.1932, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2190352020860497e-05, |
|
"loss": 1.2327, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2177314211212517e-05, |
|
"loss": 1.2292, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2164276401564538e-05, |
|
"loss": 1.2108, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2151238591916558e-05, |
|
"loss": 1.2222, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.213820078226858e-05, |
|
"loss": 1.2054, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2125162972620601e-05, |
|
"loss": 1.2456, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2112125162972621e-05, |
|
"loss": 1.2206, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2099087353324644e-05, |
|
"loss": 1.2288, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2086049543676664e-05, |
|
"loss": 1.2538, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2073011734028684e-05, |
|
"loss": 1.2056, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2059973924380705e-05, |
|
"loss": 1.1759, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2046936114732725e-05, |
|
"loss": 1.2075, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2033898305084745e-05, |
|
"loss": 1.2072, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2020860495436768e-05, |
|
"loss": 1.2133, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2007822685788788e-05, |
|
"loss": 1.2198, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.199478487614081e-05, |
|
"loss": 1.2267, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1981747066492831e-05, |
|
"loss": 1.2028, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1968709256844851e-05, |
|
"loss": 1.2196, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1955671447196873e-05, |
|
"loss": 1.1947, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1942633637548892e-05, |
|
"loss": 1.1884, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1929595827900912e-05, |
|
"loss": 1.2349, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1916558018252935e-05, |
|
"loss": 1.2426, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1903520208604955e-05, |
|
"loss": 1.217, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1890482398956975e-05, |
|
"loss": 1.207, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1877444589308997e-05, |
|
"loss": 1.2053, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1864406779661018e-05, |
|
"loss": 1.2238, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.185136897001304e-05, |
|
"loss": 1.2151, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.183833116036506e-05, |
|
"loss": 1.2343, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.182529335071708e-05, |
|
"loss": 1.2358, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1812255541069103e-05, |
|
"loss": 1.2286, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1799217731421122e-05, |
|
"loss": 1.1934, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1786179921773142e-05, |
|
"loss": 1.1842, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1773142112125164e-05, |
|
"loss": 1.2043, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1760104302477184e-05, |
|
"loss": 1.234, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1747066492829207e-05, |
|
"loss": 1.2546, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1734028683181227e-05, |
|
"loss": 1.1737, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1720990873533247e-05, |
|
"loss": 1.2408, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.170795306388527e-05, |
|
"loss": 1.2415, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.169491525423729e-05, |
|
"loss": 1.2068, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.168187744458931e-05, |
|
"loss": 1.2448, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1668839634941331e-05, |
|
"loss": 1.2314, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.165580182529335e-05, |
|
"loss": 1.2467, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1642764015645374e-05, |
|
"loss": 1.208, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1629726205997394e-05, |
|
"loss": 1.2262, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1616688396349414e-05, |
|
"loss": 1.1771, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1603650586701435e-05, |
|
"loss": 1.1926, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1590612777053457e-05, |
|
"loss": 1.2075, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1577574967405477e-05, |
|
"loss": 1.2258, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1564537157757498e-05, |
|
"loss": 1.2113, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1551499348109518e-05, |
|
"loss": 1.2128, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1538461538461538e-05, |
|
"loss": 1.2002, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1525423728813561e-05, |
|
"loss": 1.2115, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1512385919165581e-05, |
|
"loss": 1.1877, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1499348109517602e-05, |
|
"loss": 1.2522, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1486310299869622e-05, |
|
"loss": 1.2061, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1473272490221644e-05, |
|
"loss": 1.1938, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1460234680573665e-05, |
|
"loss": 1.1967, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1447196870925685e-05, |
|
"loss": 1.2426, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1434159061277705e-05, |
|
"loss": 1.1947, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1421121251629728e-05, |
|
"loss": 1.2333, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1408083441981748e-05, |
|
"loss": 1.1958, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.139504563233377e-05, |
|
"loss": 1.1732, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.138200782268579e-05, |
|
"loss": 1.2079, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.136897001303781e-05, |
|
"loss": 1.2202, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1355932203389833e-05, |
|
"loss": 1.1875, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1342894393741852e-05, |
|
"loss": 1.2174, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1329856584093872e-05, |
|
"loss": 1.1995, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1316818774445895e-05, |
|
"loss": 1.2087, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1303780964797915e-05, |
|
"loss": 1.2072, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1290743155149935e-05, |
|
"loss": 1.2104, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1277705345501957e-05, |
|
"loss": 1.192, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1264667535853976e-05, |
|
"loss": 1.2162, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1251629726206e-05, |
|
"loss": 1.1765, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.123859191655802e-05, |
|
"loss": 1.1801, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.122555410691004e-05, |
|
"loss": 1.2258, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1212516297262061e-05, |
|
"loss": 1.1937, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1199478487614082e-05, |
|
"loss": 1.2339, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1186440677966102e-05, |
|
"loss": 1.1935, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1173402868318124e-05, |
|
"loss": 1.2578, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1160365058670144e-05, |
|
"loss": 1.1941, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1147327249022167e-05, |
|
"loss": 1.2177, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1134289439374187e-05, |
|
"loss": 1.2064, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1121251629726207e-05, |
|
"loss": 1.2084, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1108213820078228e-05, |
|
"loss": 1.2031, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1095176010430248e-05, |
|
"loss": 1.2231, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.108213820078227e-05, |
|
"loss": 1.2166, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1069100391134291e-05, |
|
"loss": 1.2198, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1056062581486311e-05, |
|
"loss": 1.2103, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.104302477183833e-05, |
|
"loss": 1.2113, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1029986962190354e-05, |
|
"loss": 1.2184, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1016949152542374e-05, |
|
"loss": 1.2081, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.1003911342894395e-05, |
|
"loss": 1.2292, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0990873533246415e-05, |
|
"loss": 1.1931, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0977835723598435e-05, |
|
"loss": 1.179, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0964797913950458e-05, |
|
"loss": 1.2032, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0951760104302478e-05, |
|
"loss": 1.2179, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0938722294654498e-05, |
|
"loss": 1.2339, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.092568448500652e-05, |
|
"loss": 1.2264, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0912646675358541e-05, |
|
"loss": 1.2042, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0899608865710562e-05, |
|
"loss": 1.1958, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0886571056062582e-05, |
|
"loss": 1.2015, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0873533246414602e-05, |
|
"loss": 1.2152, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0860495436766625e-05, |
|
"loss": 1.2483, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0847457627118645e-05, |
|
"loss": 1.2361, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0834419817470665e-05, |
|
"loss": 1.2026, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0821382007822687e-05, |
|
"loss": 1.2201, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0808344198174706e-05, |
|
"loss": 1.2163, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0795306388526728e-05, |
|
"loss": 1.2191, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.078226857887875e-05, |
|
"loss": 1.2307, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.076923076923077e-05, |
|
"loss": 1.2061, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0756192959582793e-05, |
|
"loss": 1.2155, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0743155149934812e-05, |
|
"loss": 1.2186, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0730117340286832e-05, |
|
"loss": 1.2218, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0717079530638854e-05, |
|
"loss": 1.2332, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0704041720990874e-05, |
|
"loss": 1.1931, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0691003911342893e-05, |
|
"loss": 1.1971, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0677966101694917e-05, |
|
"loss": 1.2264, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0664928292046937e-05, |
|
"loss": 1.2499, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0651890482398958e-05, |
|
"loss": 1.2117, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.063885267275098e-05, |
|
"loss": 1.2285, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0625814863103e-05, |
|
"loss": 1.2216, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0612777053455021e-05, |
|
"loss": 1.2141, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.059973924380704e-05, |
|
"loss": 1.1837, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.058670143415906e-05, |
|
"loss": 1.1733, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0573663624511084e-05, |
|
"loss": 1.2407, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0560625814863104e-05, |
|
"loss": 1.198, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0547588005215125e-05, |
|
"loss": 1.3012, |
|
"step": 791 |
|
} |
|
], |
|
"max_steps": 1582, |
|
"num_train_epochs": 2, |
|
"total_flos": 8472934817464320.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|