|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"global_step": 81250, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.6109, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.5501, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.1228, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.8543, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.8183, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3e-06, |
|
"loss": 0.7931, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.7792, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.7713, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.7678, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 5e-06, |
|
"loss": 0.7736, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.7527, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 6e-06, |
|
"loss": 0.7499, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.7438, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 7e-06, |
|
"loss": 0.7462, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.7524, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.7427, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.7342, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9e-06, |
|
"loss": 0.7347, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.749, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7288, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.993769470404985e-06, |
|
"loss": 0.7326, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.98753894080997e-06, |
|
"loss": 0.7256, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.981308411214954e-06, |
|
"loss": 0.7386, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.97507788161994e-06, |
|
"loss": 0.7285, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.968847352024923e-06, |
|
"loss": 0.7223, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.962616822429907e-06, |
|
"loss": 0.7327, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.956386292834892e-06, |
|
"loss": 0.7189, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.950155763239876e-06, |
|
"loss": 0.7316, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.943925233644862e-06, |
|
"loss": 0.7129, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.937694704049845e-06, |
|
"loss": 0.7096, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.931464174454829e-06, |
|
"loss": 0.7134, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.925233644859815e-06, |
|
"loss": 0.7201, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.919003115264798e-06, |
|
"loss": 0.7185, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.912772585669784e-06, |
|
"loss": 0.7215, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.906542056074768e-06, |
|
"loss": 0.7142, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.900311526479751e-06, |
|
"loss": 0.7261, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.894080996884737e-06, |
|
"loss": 0.7091, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.88785046728972e-06, |
|
"loss": 0.7138, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.881619937694706e-06, |
|
"loss": 0.7134, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.87538940809969e-06, |
|
"loss": 0.7157, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.869158878504674e-06, |
|
"loss": 0.7136, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.862928348909659e-06, |
|
"loss": 0.7133, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.856697819314643e-06, |
|
"loss": 0.7048, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.850467289719627e-06, |
|
"loss": 0.7004, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.844236760124612e-06, |
|
"loss": 0.69, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.838006230529596e-06, |
|
"loss": 0.7158, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.83177570093458e-06, |
|
"loss": 0.6995, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.825545171339565e-06, |
|
"loss": 0.6893, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.819314641744549e-06, |
|
"loss": 0.6978, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.813084112149533e-06, |
|
"loss": 0.7025, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.806853582554518e-06, |
|
"loss": 0.6952, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.800623052959502e-06, |
|
"loss": 0.7102, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.794392523364486e-06, |
|
"loss": 0.7042, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.788161993769471e-06, |
|
"loss": 0.6953, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.781931464174455e-06, |
|
"loss": 0.7247, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.775700934579439e-06, |
|
"loss": 0.7081, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.769470404984424e-06, |
|
"loss": 0.711, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.763239875389408e-06, |
|
"loss": 0.6943, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.757009345794393e-06, |
|
"loss": 0.6845, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.750778816199377e-06, |
|
"loss": 0.7012, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.744548286604361e-06, |
|
"loss": 0.7031, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.738317757009347e-06, |
|
"loss": 0.6994, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.73208722741433e-06, |
|
"loss": 0.7058, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.725856697819316e-06, |
|
"loss": 0.69, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.7196261682243e-06, |
|
"loss": 0.6888, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.713395638629283e-06, |
|
"loss": 0.7044, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.707165109034269e-06, |
|
"loss": 0.6841, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.700934579439253e-06, |
|
"loss": 0.6886, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.694704049844238e-06, |
|
"loss": 0.6884, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.688473520249222e-06, |
|
"loss": 0.699, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.682242990654206e-06, |
|
"loss": 0.7042, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.676012461059191e-06, |
|
"loss": 0.6993, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.669781931464175e-06, |
|
"loss": 0.6983, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.66355140186916e-06, |
|
"loss": 0.697, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.657320872274144e-06, |
|
"loss": 0.704, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.651090342679128e-06, |
|
"loss": 0.6917, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.644859813084113e-06, |
|
"loss": 0.6904, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.638629283489097e-06, |
|
"loss": 0.6946, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.632398753894083e-06, |
|
"loss": 0.6905, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.626168224299066e-06, |
|
"loss": 0.7, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.61993769470405e-06, |
|
"loss": 0.7068, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.69898, |
|
"eval_loss": 0.6849758625030518, |
|
"eval_runtime": 138.1081, |
|
"eval_samples_per_second": 362.035, |
|
"eval_steps_per_second": 22.627, |
|
"step": 40625 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.613707165109036e-06, |
|
"loss": 0.6541, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.60747663551402e-06, |
|
"loss": 0.6604, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.601246105919005e-06, |
|
"loss": 0.6514, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.595015576323989e-06, |
|
"loss": 0.6295, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.588785046728972e-06, |
|
"loss": 0.6594, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.582554517133958e-06, |
|
"loss": 0.6389, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.576323987538942e-06, |
|
"loss": 0.6329, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.570093457943927e-06, |
|
"loss": 0.6528, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.563862928348911e-06, |
|
"loss": 0.6429, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.557632398753895e-06, |
|
"loss": 0.6358, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.55140186915888e-06, |
|
"loss": 0.6328, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.545171339563864e-06, |
|
"loss": 0.6362, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.538940809968848e-06, |
|
"loss": 0.6299, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.532710280373833e-06, |
|
"loss": 0.6458, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.526479750778817e-06, |
|
"loss": 0.6404, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.520249221183802e-06, |
|
"loss": 0.6451, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.514018691588786e-06, |
|
"loss": 0.6299, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.50778816199377e-06, |
|
"loss": 0.6411, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.501557632398755e-06, |
|
"loss": 0.6398, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 9.49532710280374e-06, |
|
"loss": 0.6471, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.489096573208723e-06, |
|
"loss": 0.6479, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.482866043613708e-06, |
|
"loss": 0.6239, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.476635514018692e-06, |
|
"loss": 0.6405, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.470404984423676e-06, |
|
"loss": 0.6458, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.464174454828661e-06, |
|
"loss": 0.6269, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 9.457943925233645e-06, |
|
"loss": 0.6438, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.451713395638629e-06, |
|
"loss": 0.65, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.445482866043614e-06, |
|
"loss": 0.6495, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.439252336448598e-06, |
|
"loss": 0.6355, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 9.433021806853582e-06, |
|
"loss": 0.6452, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.426791277258568e-06, |
|
"loss": 0.6419, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 9.420560747663551e-06, |
|
"loss": 0.6589, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 9.414330218068537e-06, |
|
"loss": 0.6426, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 9.40809968847352e-06, |
|
"loss": 0.6367, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 9.401869158878504e-06, |
|
"loss": 0.6388, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.39563862928349e-06, |
|
"loss": 0.6507, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.389408099688474e-06, |
|
"loss": 0.6377, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.383177570093459e-06, |
|
"loss": 0.6612, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 9.376947040498443e-06, |
|
"loss": 0.659, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 9.370716510903427e-06, |
|
"loss": 0.6396, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 9.364485981308412e-06, |
|
"loss": 0.6541, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.358255451713396e-06, |
|
"loss": 0.6483, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 9.352024922118381e-06, |
|
"loss": 0.6366, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 9.345794392523365e-06, |
|
"loss": 0.6476, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.339563862928349e-06, |
|
"loss": 0.6553, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.6362, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.327102803738318e-06, |
|
"loss": 0.6442, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.320872274143304e-06, |
|
"loss": 0.6566, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.314641744548287e-06, |
|
"loss": 0.6497, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.308411214953271e-06, |
|
"loss": 0.6461, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.302180685358257e-06, |
|
"loss": 0.6609, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.29595015576324e-06, |
|
"loss": 0.6502, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.289719626168226e-06, |
|
"loss": 0.6531, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.28348909657321e-06, |
|
"loss": 0.6441, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.277258566978193e-06, |
|
"loss": 0.6487, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.271028037383179e-06, |
|
"loss": 0.6436, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.264797507788163e-06, |
|
"loss": 0.6575, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.258566978193148e-06, |
|
"loss": 0.6514, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.252336448598132e-06, |
|
"loss": 0.6424, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 9.246105919003116e-06, |
|
"loss": 0.646, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 9.239875389408101e-06, |
|
"loss": 0.6445, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 9.233644859813085e-06, |
|
"loss": 0.6507, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 9.22741433021807e-06, |
|
"loss": 0.6564, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 9.221183800623054e-06, |
|
"loss": 0.6422, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 9.214953271028038e-06, |
|
"loss": 0.6408, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 9.208722741433023e-06, |
|
"loss": 0.6394, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 9.202492211838007e-06, |
|
"loss": 0.6421, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.196261682242991e-06, |
|
"loss": 0.6379, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 9.190031152647976e-06, |
|
"loss": 0.6421, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 9.18380062305296e-06, |
|
"loss": 0.6277, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 9.177570093457944e-06, |
|
"loss": 0.6475, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 9.17133956386293e-06, |
|
"loss": 0.65, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 9.165109034267913e-06, |
|
"loss": 0.6546, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.158878504672899e-06, |
|
"loss": 0.6476, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 9.152647975077882e-06, |
|
"loss": 0.6362, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 9.146417445482866e-06, |
|
"loss": 0.6368, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 9.140186915887852e-06, |
|
"loss": 0.6338, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 9.133956386292835e-06, |
|
"loss": 0.6456, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 9.12772585669782e-06, |
|
"loss": 0.6563, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 9.121495327102805e-06, |
|
"loss": 0.6456, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 9.115264797507789e-06, |
|
"loss": 0.6518, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.70374, |
|
"eval_loss": 0.679389238357544, |
|
"eval_runtime": 138.0175, |
|
"eval_samples_per_second": 362.273, |
|
"eval_steps_per_second": 22.642, |
|
"step": 81250 |
|
} |
|
], |
|
"max_steps": 812500, |
|
"num_train_epochs": 20, |
|
"total_flos": 1.710267926016e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|