Spaces:
Build error
Build error
{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 1.3930348258706466, | |
"eval_steps": 35, | |
"global_step": 245, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.028429282160625444, | |
"grad_norm": 2.917722702026367, | |
"learning_rate": 1.4285714285714285e-05, | |
"loss": 1.3615, | |
"step": 5 | |
}, | |
{ | |
"epoch": 0.05685856432125089, | |
"grad_norm": 2.65405011177063, | |
"learning_rate": 2.857142857142857e-05, | |
"loss": 1.2182, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.08528784648187633, | |
"grad_norm": 4.0127272605896, | |
"learning_rate": 4.2857142857142856e-05, | |
"loss": 0.8944, | |
"step": 15 | |
}, | |
{ | |
"epoch": 0.11371712864250177, | |
"grad_norm": 0.8177616000175476, | |
"learning_rate": 5.714285714285714e-05, | |
"loss": 0.512, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.14214641080312723, | |
"grad_norm": 0.3800067901611328, | |
"learning_rate": 7.142857142857143e-05, | |
"loss": 0.3435, | |
"step": 25 | |
}, | |
{ | |
"epoch": 0.17057569296375266, | |
"grad_norm": 0.3455258309841156, | |
"learning_rate": 8.571428571428571e-05, | |
"loss": 0.3027, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.19900497512437812, | |
"grad_norm": 0.3401183485984802, | |
"learning_rate": 0.0001, | |
"loss": 0.3045, | |
"step": 35 | |
}, | |
{ | |
"epoch": 0.19900497512437812, | |
"eval_loss": 0.2556740641593933, | |
"eval_runtime": 1374.6693, | |
"eval_samples_per_second": 1.819, | |
"eval_steps_per_second": 1.819, | |
"step": 35 | |
}, | |
{ | |
"epoch": 0.22743425728500355, | |
"grad_norm": 0.3088541328907013, | |
"learning_rate": 9.993784606094612e-05, | |
"loss": 0.2799, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.255863539445629, | |
"grad_norm": 0.2526684105396271, | |
"learning_rate": 9.975153876827008e-05, | |
"loss": 0.2779, | |
"step": 45 | |
}, | |
{ | |
"epoch": 0.28429282160625446, | |
"grad_norm": 0.3618779182434082, | |
"learning_rate": 9.944154131125642e-05, | |
"loss": 0.2781, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.31272210376687987, | |
"grad_norm": 0.32333430647850037, | |
"learning_rate": 9.900862439242719e-05, | |
"loss": 0.2595, | |
"step": 55 | |
}, | |
{ | |
"epoch": 0.3411513859275053, | |
"grad_norm": 0.328197717666626, | |
"learning_rate": 9.84538643114539e-05, | |
"loss": 0.275, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.3695806680881308, | |
"grad_norm": 0.28067663311958313, | |
"learning_rate": 9.777864028930705e-05, | |
"loss": 0.2333, | |
"step": 65 | |
}, | |
{ | |
"epoch": 0.39800995024875624, | |
"grad_norm": 0.3941590487957001, | |
"learning_rate": 9.698463103929542e-05, | |
"loss": 0.2524, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.39800995024875624, | |
"eval_loss": 0.23971079289913177, | |
"eval_runtime": 1374.4109, | |
"eval_samples_per_second": 1.819, | |
"eval_steps_per_second": 1.819, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.42643923240938164, | |
"grad_norm": 0.2214515656232834, | |
"learning_rate": 9.607381059352038e-05, | |
"loss": 0.2585, | |
"step": 75 | |
}, | |
{ | |
"epoch": 0.4548685145700071, | |
"grad_norm": 0.27422434091567993, | |
"learning_rate": 9.504844339512095e-05, | |
"loss": 0.2122, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.48329779673063256, | |
"grad_norm": 0.28153732419013977, | |
"learning_rate": 9.391107866851143e-05, | |
"loss": 0.2397, | |
"step": 85 | |
}, | |
{ | |
"epoch": 0.511727078891258, | |
"grad_norm": 0.2967371344566345, | |
"learning_rate": 9.266454408160779e-05, | |
"loss": 0.2192, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.5401563610518835, | |
"grad_norm": 0.29277151823043823, | |
"learning_rate": 9.131193871579975e-05, | |
"loss": 0.234, | |
"step": 95 | |
}, | |
{ | |
"epoch": 0.5685856432125089, | |
"grad_norm": 0.30613964796066284, | |
"learning_rate": 8.985662536114613e-05, | |
"loss": 0.2376, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.5970149253731343, | |
"grad_norm": 0.27135342359542847, | |
"learning_rate": 8.83022221559489e-05, | |
"loss": 0.245, | |
"step": 105 | |
}, | |
{ | |
"epoch": 0.5970149253731343, | |
"eval_loss": 0.23002392053604126, | |
"eval_runtime": 1375.9551, | |
"eval_samples_per_second": 1.817, | |
"eval_steps_per_second": 1.817, | |
"step": 105 | |
}, | |
{ | |
"epoch": 0.6254442075337597, | |
"grad_norm": 0.23706920444965363, | |
"learning_rate": 8.665259359149132e-05, | |
"loss": 0.2193, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.6538734896943852, | |
"grad_norm": 0.3904951214790344, | |
"learning_rate": 8.491184090430364e-05, | |
"loss": 0.231, | |
"step": 115 | |
}, | |
{ | |
"epoch": 0.6823027718550106, | |
"grad_norm": 0.23460106551647186, | |
"learning_rate": 8.308429187984297e-05, | |
"loss": 0.229, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.7107320540156361, | |
"grad_norm": 0.29704052209854126, | |
"learning_rate": 8.117449009293668e-05, | |
"loss": 0.2277, | |
"step": 125 | |
}, | |
{ | |
"epoch": 0.7391613361762616, | |
"grad_norm": 0.3585428297519684, | |
"learning_rate": 7.91871836117395e-05, | |
"loss": 0.2356, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.767590618336887, | |
"grad_norm": 0.3541390895843506, | |
"learning_rate": 7.712731319328798e-05, | |
"loss": 0.2521, | |
"step": 135 | |
}, | |
{ | |
"epoch": 0.7960199004975125, | |
"grad_norm": 0.3026750087738037, | |
"learning_rate": 7.500000000000001e-05, | |
"loss": 0.2363, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.7960199004975125, | |
"eval_loss": 0.2129843682050705, | |
"eval_runtime": 1375.37, | |
"eval_samples_per_second": 1.818, | |
"eval_steps_per_second": 1.818, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.8244491826581379, | |
"grad_norm": 0.3274117410182953, | |
"learning_rate": 7.281053286765815e-05, | |
"loss": 0.2308, | |
"step": 145 | |
}, | |
{ | |
"epoch": 0.8528784648187633, | |
"grad_norm": 0.2670952379703522, | |
"learning_rate": 7.056435515653059e-05, | |
"loss": 0.2456, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.8813077469793887, | |
"grad_norm": 0.36056894063949585, | |
"learning_rate": 6.826705121831976e-05, | |
"loss": 0.249, | |
"step": 155 | |
}, | |
{ | |
"epoch": 0.9097370291400142, | |
"grad_norm": 0.3259892761707306, | |
"learning_rate": 6.592433251258423e-05, | |
"loss": 0.2312, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.9381663113006397, | |
"grad_norm": 0.39926743507385254, | |
"learning_rate": 6.354202340715026e-05, | |
"loss": 0.2157, | |
"step": 165 | |
}, | |
{ | |
"epoch": 0.9665955934612651, | |
"grad_norm": 0.23705609142780304, | |
"learning_rate": 6.112604669781572e-05, | |
"loss": 0.2128, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.9950248756218906, | |
"grad_norm": 0.27746665477752686, | |
"learning_rate": 5.868240888334653e-05, | |
"loss": 0.2383, | |
"step": 175 | |
}, | |
{ | |
"epoch": 0.9950248756218906, | |
"eval_loss": 0.2247208058834076, | |
"eval_runtime": 1375.4951, | |
"eval_samples_per_second": 1.818, | |
"eval_steps_per_second": 1.818, | |
"step": 175 | |
}, | |
{ | |
"epoch": 1.023454157782516, | |
"grad_norm": 0.2372225672006607, | |
"learning_rate": 5.621718523237427e-05, | |
"loss": 0.2187, | |
"step": 180 | |
}, | |
{ | |
"epoch": 1.0518834399431414, | |
"grad_norm": 0.2701272964477539, | |
"learning_rate": 5.373650467932122e-05, | |
"loss": 0.2024, | |
"step": 185 | |
}, | |
{ | |
"epoch": 1.080312722103767, | |
"grad_norm": 0.32787126302719116, | |
"learning_rate": 5.124653458690365e-05, | |
"loss": 0.221, | |
"step": 190 | |
}, | |
{ | |
"epoch": 1.1087420042643923, | |
"grad_norm": 0.2735784351825714, | |
"learning_rate": 4.875346541309637e-05, | |
"loss": 0.199, | |
"step": 195 | |
}, | |
{ | |
"epoch": 1.1371712864250179, | |
"grad_norm": 0.35804641246795654, | |
"learning_rate": 4.626349532067879e-05, | |
"loss": 0.2299, | |
"step": 200 | |
}, | |
{ | |
"epoch": 1.1656005685856432, | |
"grad_norm": 0.24261139333248138, | |
"learning_rate": 4.378281476762576e-05, | |
"loss": 0.2131, | |
"step": 205 | |
}, | |
{ | |
"epoch": 1.1940298507462686, | |
"grad_norm": 0.23894722759723663, | |
"learning_rate": 4.131759111665349e-05, | |
"loss": 0.2067, | |
"step": 210 | |
}, | |
{ | |
"epoch": 1.1940298507462686, | |
"eval_loss": 0.20743593573570251, | |
"eval_runtime": 1374.8457, | |
"eval_samples_per_second": 1.818, | |
"eval_steps_per_second": 1.818, | |
"step": 210 | |
}, | |
{ | |
"epoch": 1.2224591329068941, | |
"grad_norm": 0.26743921637535095, | |
"learning_rate": 3.887395330218429e-05, | |
"loss": 0.2012, | |
"step": 215 | |
}, | |
{ | |
"epoch": 1.2508884150675195, | |
"grad_norm": 0.26504266262054443, | |
"learning_rate": 3.6457976592849754e-05, | |
"loss": 0.1927, | |
"step": 220 | |
}, | |
{ | |
"epoch": 1.279317697228145, | |
"grad_norm": 0.45054301619529724, | |
"learning_rate": 3.4075667487415785e-05, | |
"loss": 0.2153, | |
"step": 225 | |
}, | |
{ | |
"epoch": 1.3077469793887704, | |
"grad_norm": 0.2784373462200165, | |
"learning_rate": 3.173294878168025e-05, | |
"loss": 0.211, | |
"step": 230 | |
}, | |
{ | |
"epoch": 1.336176261549396, | |
"grad_norm": 0.2335204929113388, | |
"learning_rate": 2.9435644843469436e-05, | |
"loss": 0.2156, | |
"step": 235 | |
}, | |
{ | |
"epoch": 1.3646055437100213, | |
"grad_norm": 0.25453269481658936, | |
"learning_rate": 2.718946713234185e-05, | |
"loss": 0.2048, | |
"step": 240 | |
}, | |
{ | |
"epoch": 1.3930348258706466, | |
"grad_norm": 0.2633484899997711, | |
"learning_rate": 2.500000000000001e-05, | |
"loss": 0.2277, | |
"step": 245 | |
}, | |
{ | |
"epoch": 1.3930348258706466, | |
"eval_loss": 0.20937685668468475, | |
"eval_runtime": 1374.1515, | |
"eval_samples_per_second": 1.819, | |
"eval_steps_per_second": 1.819, | |
"step": 245 | |
} | |
], | |
"logging_steps": 5, | |
"max_steps": 350, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 2, | |
"save_steps": 35, | |
"stateful_callbacks": { | |
"TrainerControl": { | |
"args": { | |
"should_epoch_stop": false, | |
"should_evaluate": false, | |
"should_log": false, | |
"should_save": true, | |
"should_training_stop": false | |
}, | |
"attributes": {} | |
} | |
}, | |
"total_flos": 6.563732349266166e+18, | |
"train_batch_size": 16, | |
"trial_name": null, | |
"trial_params": null | |
} | |