|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 47724, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03143072667840081, |
|
"grad_norm": 12.45950698852539, |
|
"learning_rate": 4.947615455535999e-05, |
|
"loss": 3.2597, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.06286145335680161, |
|
"grad_norm": 10.082228660583496, |
|
"learning_rate": 4.895230911071997e-05, |
|
"loss": 2.8456, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.09429218003520241, |
|
"grad_norm": 11.17715835571289, |
|
"learning_rate": 4.842846366607996e-05, |
|
"loss": 2.7087, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.12572290671360323, |
|
"grad_norm": 13.012718200683594, |
|
"learning_rate": 4.790461822143995e-05, |
|
"loss": 2.5935, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.15715363339200403, |
|
"grad_norm": 11.075544357299805, |
|
"learning_rate": 4.738077277679994e-05, |
|
"loss": 2.4862, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.18858436007040483, |
|
"grad_norm": 13.529458045959473, |
|
"learning_rate": 4.685692733215992e-05, |
|
"loss": 2.4535, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.22001508674880563, |
|
"grad_norm": 10.073129653930664, |
|
"learning_rate": 4.633308188751991e-05, |
|
"loss": 2.346, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.25144581342720645, |
|
"grad_norm": 11.378911018371582, |
|
"learning_rate": 4.580923644287989e-05, |
|
"loss": 2.3565, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.28287654010560725, |
|
"grad_norm": 10.559321403503418, |
|
"learning_rate": 4.528539099823988e-05, |
|
"loss": 2.2937, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.31430726678400805, |
|
"grad_norm": 10.023640632629395, |
|
"learning_rate": 4.476154555359987e-05, |
|
"loss": 2.2474, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.34573799346240885, |
|
"grad_norm": 9.890427589416504, |
|
"learning_rate": 4.423770010895986e-05, |
|
"loss": 2.1995, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.37716872014080965, |
|
"grad_norm": 11.694580078125, |
|
"learning_rate": 4.3713854664319845e-05, |
|
"loss": 2.2092, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.40859944681921045, |
|
"grad_norm": 10.519980430603027, |
|
"learning_rate": 4.319000921967983e-05, |
|
"loss": 2.1238, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.44003017349761125, |
|
"grad_norm": 9.350607872009277, |
|
"learning_rate": 4.2666163775039816e-05, |
|
"loss": 2.1271, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.47146090017601205, |
|
"grad_norm": 10.53451919555664, |
|
"learning_rate": 4.21423183303998e-05, |
|
"loss": 2.167, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.5028916268544129, |
|
"grad_norm": 11.468996047973633, |
|
"learning_rate": 4.1618472885759786e-05, |
|
"loss": 2.1129, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.5343223535328137, |
|
"grad_norm": 8.491691589355469, |
|
"learning_rate": 4.1094627441119775e-05, |
|
"loss": 2.0646, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.5657530802112145, |
|
"grad_norm": 20.68869400024414, |
|
"learning_rate": 4.0570781996479764e-05, |
|
"loss": 2.091, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.5971838068896153, |
|
"grad_norm": 8.865952491760254, |
|
"learning_rate": 4.0046936551839745e-05, |
|
"loss": 2.0602, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.6286145335680161, |
|
"grad_norm": 12.874096870422363, |
|
"learning_rate": 3.9523091107199734e-05, |
|
"loss": 2.0064, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.6600452602464169, |
|
"grad_norm": 9.40224838256836, |
|
"learning_rate": 3.8999245662559716e-05, |
|
"loss": 2.0428, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.6914759869248177, |
|
"grad_norm": 10.570882797241211, |
|
"learning_rate": 3.8475400217919705e-05, |
|
"loss": 2.0254, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.7229067136032185, |
|
"grad_norm": 12.117438316345215, |
|
"learning_rate": 3.795155477327969e-05, |
|
"loss": 2.003, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.7543374402816193, |
|
"grad_norm": 11.80452823638916, |
|
"learning_rate": 3.742770932863968e-05, |
|
"loss": 1.9471, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.7857681669600202, |
|
"grad_norm": 9.42381477355957, |
|
"learning_rate": 3.690386388399967e-05, |
|
"loss": 1.92, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.8171988936384209, |
|
"grad_norm": 14.402335166931152, |
|
"learning_rate": 3.638001843935965e-05, |
|
"loss": 1.939, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.8486296203168218, |
|
"grad_norm": 7.873473644256592, |
|
"learning_rate": 3.585617299471964e-05, |
|
"loss": 1.9271, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.8800603469952225, |
|
"grad_norm": 8.49427604675293, |
|
"learning_rate": 3.533232755007962e-05, |
|
"loss": 1.962, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.9114910736736234, |
|
"grad_norm": 9.598845481872559, |
|
"learning_rate": 3.480848210543961e-05, |
|
"loss": 1.8921, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.9429218003520241, |
|
"grad_norm": 10.971700668334961, |
|
"learning_rate": 3.42846366607996e-05, |
|
"loss": 1.8815, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.974352527030425, |
|
"grad_norm": 8.383946418762207, |
|
"learning_rate": 3.376079121615959e-05, |
|
"loss": 1.8714, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.0057832537088258, |
|
"grad_norm": 12.830138206481934, |
|
"learning_rate": 3.323694577151957e-05, |
|
"loss": 1.8797, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.0372139803872265, |
|
"grad_norm": 11.94987964630127, |
|
"learning_rate": 3.271310032687956e-05, |
|
"loss": 1.8339, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.0686447070656273, |
|
"grad_norm": 9.588113784790039, |
|
"learning_rate": 3.218925488223955e-05, |
|
"loss": 1.7749, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.1000754337440282, |
|
"grad_norm": 10.824664115905762, |
|
"learning_rate": 3.166540943759953e-05, |
|
"loss": 1.8145, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.131506160422429, |
|
"grad_norm": 11.03293228149414, |
|
"learning_rate": 3.114156399295952e-05, |
|
"loss": 1.7979, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.1629368871008299, |
|
"grad_norm": 8.919577598571777, |
|
"learning_rate": 3.061771854831951e-05, |
|
"loss": 1.8023, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.1943676137792305, |
|
"grad_norm": 7.814962863922119, |
|
"learning_rate": 3.0093873103679492e-05, |
|
"loss": 1.7587, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.2257983404576314, |
|
"grad_norm": 7.930530071258545, |
|
"learning_rate": 2.9570027659039477e-05, |
|
"loss": 1.7956, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.2572290671360322, |
|
"grad_norm": 12.548938751220703, |
|
"learning_rate": 2.9046182214399466e-05, |
|
"loss": 1.7925, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.2886597938144329, |
|
"grad_norm": 9.993666648864746, |
|
"learning_rate": 2.852233676975945e-05, |
|
"loss": 1.7609, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.3200905204928337, |
|
"grad_norm": 15.507658958435059, |
|
"learning_rate": 2.7998491325119436e-05, |
|
"loss": 1.7692, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.3515212471712346, |
|
"grad_norm": 11.69993782043457, |
|
"learning_rate": 2.7474645880479428e-05, |
|
"loss": 1.7253, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.3829519738496354, |
|
"grad_norm": 10.937728881835938, |
|
"learning_rate": 2.6950800435839413e-05, |
|
"loss": 1.732, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.4143827005280363, |
|
"grad_norm": 8.787737846374512, |
|
"learning_rate": 2.64269549911994e-05, |
|
"loss": 1.7335, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.4458134272064371, |
|
"grad_norm": 6.934820652008057, |
|
"learning_rate": 2.5903109546559384e-05, |
|
"loss": 1.6903, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.4772441538848378, |
|
"grad_norm": 9.452235221862793, |
|
"learning_rate": 2.537926410191937e-05, |
|
"loss": 1.7124, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.5086748805632386, |
|
"grad_norm": 7.406110763549805, |
|
"learning_rate": 2.4855418657279358e-05, |
|
"loss": 1.6978, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.5401056072416395, |
|
"grad_norm": 8.471650123596191, |
|
"learning_rate": 2.4331573212639343e-05, |
|
"loss": 1.6761, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.57153633392004, |
|
"grad_norm": 8.462206840515137, |
|
"learning_rate": 2.380772776799933e-05, |
|
"loss": 1.723, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.602967060598441, |
|
"grad_norm": 10.058784484863281, |
|
"learning_rate": 2.3283882323359317e-05, |
|
"loss": 1.687, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.6343977872768418, |
|
"grad_norm": 8.863463401794434, |
|
"learning_rate": 2.2760036878719305e-05, |
|
"loss": 1.6844, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.6658285139552427, |
|
"grad_norm": 10.471988677978516, |
|
"learning_rate": 2.223619143407929e-05, |
|
"loss": 1.7151, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.6972592406336435, |
|
"grad_norm": 9.061573028564453, |
|
"learning_rate": 2.1712345989439276e-05, |
|
"loss": 1.5916, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.7286899673120444, |
|
"grad_norm": 8.41381549835205, |
|
"learning_rate": 2.1188500544799264e-05, |
|
"loss": 1.6388, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.760120693990445, |
|
"grad_norm": 9.17684268951416, |
|
"learning_rate": 2.066465510015925e-05, |
|
"loss": 1.7045, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.7915514206688459, |
|
"grad_norm": 10.983583450317383, |
|
"learning_rate": 2.0140809655519235e-05, |
|
"loss": 1.6749, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 1.8229821473472465, |
|
"grad_norm": 8.048336029052734, |
|
"learning_rate": 1.9616964210879224e-05, |
|
"loss": 1.6347, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.8544128740256474, |
|
"grad_norm": 9.640363693237305, |
|
"learning_rate": 1.909311876623921e-05, |
|
"loss": 1.6424, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 1.8858436007040482, |
|
"grad_norm": 10.264333724975586, |
|
"learning_rate": 1.8569273321599194e-05, |
|
"loss": 1.5919, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.917274327382449, |
|
"grad_norm": 10.100754737854004, |
|
"learning_rate": 1.8045427876959183e-05, |
|
"loss": 1.6477, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 1.94870505406085, |
|
"grad_norm": 7.901167392730713, |
|
"learning_rate": 1.752158243231917e-05, |
|
"loss": 1.636, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 1.9801357807392508, |
|
"grad_norm": 10.34730339050293, |
|
"learning_rate": 1.6997736987679156e-05, |
|
"loss": 1.639, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 2.0115665074176516, |
|
"grad_norm": 9.00157642364502, |
|
"learning_rate": 1.6473891543039145e-05, |
|
"loss": 1.6027, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.0429972340960525, |
|
"grad_norm": 8.343816757202148, |
|
"learning_rate": 1.595004609839913e-05, |
|
"loss": 1.5728, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 2.074427960774453, |
|
"grad_norm": 9.560211181640625, |
|
"learning_rate": 1.5426200653759116e-05, |
|
"loss": 1.5924, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.1058586874528538, |
|
"grad_norm": 9.660225868225098, |
|
"learning_rate": 1.49023552091191e-05, |
|
"loss": 1.5557, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 2.1372894141312546, |
|
"grad_norm": 10.864439964294434, |
|
"learning_rate": 1.437850976447909e-05, |
|
"loss": 1.5636, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.1687201408096555, |
|
"grad_norm": 9.609240531921387, |
|
"learning_rate": 1.3854664319839075e-05, |
|
"loss": 1.5643, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 2.2001508674880563, |
|
"grad_norm": 9.839557647705078, |
|
"learning_rate": 1.3330818875199062e-05, |
|
"loss": 1.5742, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.231581594166457, |
|
"grad_norm": 9.829071998596191, |
|
"learning_rate": 1.280697343055905e-05, |
|
"loss": 1.551, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 2.263012320844858, |
|
"grad_norm": 8.181549072265625, |
|
"learning_rate": 1.2283127985919035e-05, |
|
"loss": 1.5712, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.294443047523259, |
|
"grad_norm": 7.759120464324951, |
|
"learning_rate": 1.1759282541279022e-05, |
|
"loss": 1.5383, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.3258737742016597, |
|
"grad_norm": 8.53459644317627, |
|
"learning_rate": 1.1235437096639008e-05, |
|
"loss": 1.5472, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.35730450088006, |
|
"grad_norm": 9.845288276672363, |
|
"learning_rate": 1.0711591651998994e-05, |
|
"loss": 1.5299, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.388735227558461, |
|
"grad_norm": 9.097940444946289, |
|
"learning_rate": 1.0187746207358981e-05, |
|
"loss": 1.5513, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.420165954236862, |
|
"grad_norm": 10.140700340270996, |
|
"learning_rate": 9.663900762718968e-06, |
|
"loss": 1.5314, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 2.4515966809152627, |
|
"grad_norm": 8.499448776245117, |
|
"learning_rate": 9.140055318078955e-06, |
|
"loss": 1.5381, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.4830274075936636, |
|
"grad_norm": 10.239831924438477, |
|
"learning_rate": 8.61620987343894e-06, |
|
"loss": 1.5338, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 2.5144581342720644, |
|
"grad_norm": 12.466906547546387, |
|
"learning_rate": 8.092364428798927e-06, |
|
"loss": 1.5361, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.5458888609504653, |
|
"grad_norm": 5.410431385040283, |
|
"learning_rate": 7.568518984158913e-06, |
|
"loss": 1.5506, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 2.5773195876288657, |
|
"grad_norm": 10.963549613952637, |
|
"learning_rate": 7.0446735395189e-06, |
|
"loss": 1.5383, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 2.608750314307267, |
|
"grad_norm": 9.227997779846191, |
|
"learning_rate": 6.520828094878888e-06, |
|
"loss": 1.5007, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 2.6401810409856674, |
|
"grad_norm": 9.551142692565918, |
|
"learning_rate": 5.996982650238874e-06, |
|
"loss": 1.5002, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.6716117676640683, |
|
"grad_norm": 9.357813835144043, |
|
"learning_rate": 5.47313720559886e-06, |
|
"loss": 1.521, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 2.703042494342469, |
|
"grad_norm": 9.11950969696045, |
|
"learning_rate": 4.949291760958847e-06, |
|
"loss": 1.5389, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 2.73447322102087, |
|
"grad_norm": 10.955974578857422, |
|
"learning_rate": 4.425446316318833e-06, |
|
"loss": 1.5481, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 2.765903947699271, |
|
"grad_norm": 9.225030899047852, |
|
"learning_rate": 3.90160087167882e-06, |
|
"loss": 1.5181, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 2.7973346743776717, |
|
"grad_norm": 10.103915214538574, |
|
"learning_rate": 3.377755427038807e-06, |
|
"loss": 1.5013, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 2.8287654010560725, |
|
"grad_norm": 8.871588706970215, |
|
"learning_rate": 2.853909982398793e-06, |
|
"loss": 1.5107, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 2.860196127734473, |
|
"grad_norm": 8.05070686340332, |
|
"learning_rate": 2.3300645377587797e-06, |
|
"loss": 1.4924, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 2.8916268544128743, |
|
"grad_norm": 10.415308952331543, |
|
"learning_rate": 1.8062190931187664e-06, |
|
"loss": 1.4606, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 2.9230575810912747, |
|
"grad_norm": 8.8274507522583, |
|
"learning_rate": 1.2823736484787529e-06, |
|
"loss": 1.4867, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 2.9544883077696755, |
|
"grad_norm": 7.306589126586914, |
|
"learning_rate": 7.585282038387395e-07, |
|
"loss": 1.4848, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 2.9859190344480764, |
|
"grad_norm": 9.284761428833008, |
|
"learning_rate": 2.34682759198726e-07, |
|
"loss": 1.4519, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 47724, |
|
"total_flos": 2.514865055648973e+16, |
|
"train_loss": 1.8108088482396418, |
|
"train_runtime": 10130.0955, |
|
"train_samples_per_second": 9.422, |
|
"train_steps_per_second": 4.711 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 47724, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.514865055648973e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|