|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.13027618551328818, |
|
"eval_steps": 500, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0013027618551328818, |
|
"grad_norm": 14.949752807617188, |
|
"learning_rate": 4.0625e-06, |
|
"loss": 1.3439, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0026055237102657635, |
|
"grad_norm": 15.725403785705566, |
|
"learning_rate": 8.125e-06, |
|
"loss": 1.3354, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0039082855653986455, |
|
"grad_norm": 12.41278076171875, |
|
"learning_rate": 1.2187499999999998e-05, |
|
"loss": 1.3017, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005211047420531527, |
|
"grad_norm": 6.782334327697754, |
|
"learning_rate": 1.625e-05, |
|
"loss": 1.2634, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006513809275664409, |
|
"grad_norm": 3.730219841003418, |
|
"learning_rate": 2.03125e-05, |
|
"loss": 1.1704, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.007816571130797291, |
|
"grad_norm": 2.547757863998413, |
|
"learning_rate": 2.4374999999999996e-05, |
|
"loss": 1.165, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.009119332985930172, |
|
"grad_norm": 2.4353976249694824, |
|
"learning_rate": 2.8437499999999997e-05, |
|
"loss": 1.1567, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.010422094841063054, |
|
"grad_norm": 2.218895673751831, |
|
"learning_rate": 3.25e-05, |
|
"loss": 1.0846, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.011724856696195935, |
|
"grad_norm": 1.7433233261108398, |
|
"learning_rate": 3.6562499999999994e-05, |
|
"loss": 0.9816, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.013027618551328817, |
|
"grad_norm": 1.6148579120635986, |
|
"learning_rate": 4.0625e-05, |
|
"loss": 1.0869, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0143303804064617, |
|
"grad_norm": 1.5065230131149292, |
|
"learning_rate": 4.4687499999999995e-05, |
|
"loss": 0.9625, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.015633142261594582, |
|
"grad_norm": 1.4786823987960815, |
|
"learning_rate": 4.874999999999999e-05, |
|
"loss": 1.0489, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.016935904116727463, |
|
"grad_norm": 1.2991230487823486, |
|
"learning_rate": 5.2812499999999996e-05, |
|
"loss": 0.9935, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.018238665971860343, |
|
"grad_norm": 1.3199464082717896, |
|
"learning_rate": 5.687499999999999e-05, |
|
"loss": 1.0188, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.019541427826993224, |
|
"grad_norm": 1.2793257236480713, |
|
"learning_rate": 6.09375e-05, |
|
"loss": 1.0265, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.020844189682126108, |
|
"grad_norm": 1.2210655212402344, |
|
"learning_rate": 6.5e-05, |
|
"loss": 0.9019, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02214695153725899, |
|
"grad_norm": 1.3837445974349976, |
|
"learning_rate": 6.499971563731688e-05, |
|
"loss": 0.9487, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02344971339239187, |
|
"grad_norm": 1.2946529388427734, |
|
"learning_rate": 6.499886255424368e-05, |
|
"loss": 1.0031, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.024752475247524754, |
|
"grad_norm": 1.3142110109329224, |
|
"learning_rate": 6.499744076570868e-05, |
|
"loss": 0.9941, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.026055237102657634, |
|
"grad_norm": 1.8206599950790405, |
|
"learning_rate": 6.499545029659211e-05, |
|
"loss": 0.9767, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.027357998957790515, |
|
"grad_norm": 1.2736281156539917, |
|
"learning_rate": 6.499289118172568e-05, |
|
"loss": 0.9194, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0286607608129234, |
|
"grad_norm": 1.8105640411376953, |
|
"learning_rate": 6.498976346589196e-05, |
|
"loss": 0.9853, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02996352266805628, |
|
"grad_norm": 1.4621827602386475, |
|
"learning_rate": 6.49860672038236e-05, |
|
"loss": 0.9896, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.031266284523189164, |
|
"grad_norm": 1.212299108505249, |
|
"learning_rate": 6.498180246020238e-05, |
|
"loss": 0.9684, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.032569046378322045, |
|
"grad_norm": 1.2274290323257446, |
|
"learning_rate": 6.49769693096581e-05, |
|
"loss": 0.9831, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.033871808233454925, |
|
"grad_norm": 1.2909352779388428, |
|
"learning_rate": 6.49715678367672e-05, |
|
"loss": 0.9971, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.035174570088587806, |
|
"grad_norm": 1.2330715656280518, |
|
"learning_rate": 6.49655981360514e-05, |
|
"loss": 0.9923, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03647733194372069, |
|
"grad_norm": 1.2433793544769287, |
|
"learning_rate": 6.495906031197591e-05, |
|
"loss": 0.9121, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03778009379885357, |
|
"grad_norm": 1.2374027967453003, |
|
"learning_rate": 6.49519544789477e-05, |
|
"loss": 0.9224, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03908285565398645, |
|
"grad_norm": 1.2204357385635376, |
|
"learning_rate": 6.494428076131346e-05, |
|
"loss": 0.919, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.040385617509119336, |
|
"grad_norm": 1.1927292346954346, |
|
"learning_rate": 6.493603929335744e-05, |
|
"loss": 0.9589, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.041688379364252216, |
|
"grad_norm": 1.1353641748428345, |
|
"learning_rate": 6.492723021929908e-05, |
|
"loss": 0.8869, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0429911412193851, |
|
"grad_norm": 1.212263822555542, |
|
"learning_rate": 6.49178536932905e-05, |
|
"loss": 0.8934, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04429390307451798, |
|
"grad_norm": 1.0893241167068481, |
|
"learning_rate": 6.49079098794138e-05, |
|
"loss": 0.8718, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04559666492965086, |
|
"grad_norm": 1.1801607608795166, |
|
"learning_rate": 6.489739895167818e-05, |
|
"loss": 0.9774, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04689942678478374, |
|
"grad_norm": 1.1302330493927002, |
|
"learning_rate": 6.488632109401691e-05, |
|
"loss": 0.9584, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04820218863991663, |
|
"grad_norm": 1.1432682275772095, |
|
"learning_rate": 6.487467650028411e-05, |
|
"loss": 0.9452, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04950495049504951, |
|
"grad_norm": 1.1399481296539307, |
|
"learning_rate": 6.486246537425133e-05, |
|
"loss": 0.9017, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.05080771235018239, |
|
"grad_norm": 1.0911235809326172, |
|
"learning_rate": 6.484968792960403e-05, |
|
"loss": 0.9036, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05211047420531527, |
|
"grad_norm": 1.071078896522522, |
|
"learning_rate": 6.48363443899378e-05, |
|
"loss": 0.9273, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05341323606044815, |
|
"grad_norm": 1.053589105606079, |
|
"learning_rate": 6.482243498875449e-05, |
|
"loss": 0.8847, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05471599791558103, |
|
"grad_norm": 1.109771490097046, |
|
"learning_rate": 6.480795996945806e-05, |
|
"loss": 0.9364, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05601875977071391, |
|
"grad_norm": 1.1093286275863647, |
|
"learning_rate": 6.479291958535037e-05, |
|
"loss": 0.9088, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0573215216258468, |
|
"grad_norm": 1.0716255903244019, |
|
"learning_rate": 6.477731409962675e-05, |
|
"loss": 0.8885, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05862428348097968, |
|
"grad_norm": 1.1552095413208008, |
|
"learning_rate": 6.476114378537138e-05, |
|
"loss": 0.9736, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05992704533611256, |
|
"grad_norm": 1.1647394895553589, |
|
"learning_rate": 6.474440892555249e-05, |
|
"loss": 0.9138, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06122980719124544, |
|
"grad_norm": 1.1463412046432495, |
|
"learning_rate": 6.472710981301745e-05, |
|
"loss": 0.9204, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06253256904637833, |
|
"grad_norm": 1.175421118736267, |
|
"learning_rate": 6.470924675048762e-05, |
|
"loss": 0.9119, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06383533090151121, |
|
"grad_norm": 1.9249529838562012, |
|
"learning_rate": 6.469082005055304e-05, |
|
"loss": 0.8639, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06513809275664409, |
|
"grad_norm": 1.1802655458450317, |
|
"learning_rate": 6.467183003566701e-05, |
|
"loss": 0.9352, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06644085461177697, |
|
"grad_norm": 1.1035569906234741, |
|
"learning_rate": 6.465227703814038e-05, |
|
"loss": 0.9342, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06774361646690985, |
|
"grad_norm": 1.1226942539215088, |
|
"learning_rate": 6.463216140013581e-05, |
|
"loss": 0.8948, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06904637832204273, |
|
"grad_norm": 1.1143085956573486, |
|
"learning_rate": 6.46114834736617e-05, |
|
"loss": 0.9577, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07034914017717561, |
|
"grad_norm": 1.1214665174484253, |
|
"learning_rate": 6.459024362056608e-05, |
|
"loss": 0.9106, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07165190203230849, |
|
"grad_norm": 1.1105942726135254, |
|
"learning_rate": 6.45684422125303e-05, |
|
"loss": 0.8706, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07295466388744137, |
|
"grad_norm": 1.0562191009521484, |
|
"learning_rate": 6.454607963106247e-05, |
|
"loss": 0.8278, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07425742574257425, |
|
"grad_norm": 1.1298907995224, |
|
"learning_rate": 6.452315626749081e-05, |
|
"loss": 0.8883, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07556018759770713, |
|
"grad_norm": 1.133426547050476, |
|
"learning_rate": 6.449967252295682e-05, |
|
"loss": 0.8868, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07686294945284002, |
|
"grad_norm": 1.09175443649292, |
|
"learning_rate": 6.447562880840822e-05, |
|
"loss": 0.8486, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.0781657113079729, |
|
"grad_norm": 1.1153368949890137, |
|
"learning_rate": 6.44510255445918e-05, |
|
"loss": 0.9278, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07946847316310579, |
|
"grad_norm": 1.102013349533081, |
|
"learning_rate": 6.442586316204601e-05, |
|
"loss": 0.8362, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.08077123501823867, |
|
"grad_norm": 1.1041877269744873, |
|
"learning_rate": 6.44001421010935e-05, |
|
"loss": 0.8746, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08207399687337155, |
|
"grad_norm": 1.075576901435852, |
|
"learning_rate": 6.437386281183331e-05, |
|
"loss": 0.8567, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08337675872850443, |
|
"grad_norm": 1.1472443342208862, |
|
"learning_rate": 6.43470257541331e-05, |
|
"loss": 0.9023, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08467952058363731, |
|
"grad_norm": 1.1024972200393677, |
|
"learning_rate": 6.431963139762104e-05, |
|
"loss": 0.8982, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0859822824387702, |
|
"grad_norm": 1.1128768920898438, |
|
"learning_rate": 6.42916802216776e-05, |
|
"loss": 0.883, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08728504429390307, |
|
"grad_norm": 1.058384895324707, |
|
"learning_rate": 6.426317271542717e-05, |
|
"loss": 0.8428, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08858780614903596, |
|
"grad_norm": 1.0728991031646729, |
|
"learning_rate": 6.423410937772951e-05, |
|
"loss": 0.8553, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08989056800416884, |
|
"grad_norm": 1.0956963300704956, |
|
"learning_rate": 6.4204490717171e-05, |
|
"loss": 0.8894, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09119332985930172, |
|
"grad_norm": 1.0787287950515747, |
|
"learning_rate": 6.417431725205575e-05, |
|
"loss": 0.9086, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0924960917144346, |
|
"grad_norm": 1.1435580253601074, |
|
"learning_rate": 6.414358951039651e-05, |
|
"loss": 0.9074, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09379885356956748, |
|
"grad_norm": 1.0396053791046143, |
|
"learning_rate": 6.41123080299055e-05, |
|
"loss": 0.8553, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09510161542470036, |
|
"grad_norm": 1.0929105281829834, |
|
"learning_rate": 6.40804733579849e-05, |
|
"loss": 0.8504, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09640437727983325, |
|
"grad_norm": 1.1395248174667358, |
|
"learning_rate": 6.404808605171734e-05, |
|
"loss": 0.9222, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09770713913496613, |
|
"grad_norm": 1.0281647443771362, |
|
"learning_rate": 6.401514667785613e-05, |
|
"loss": 0.868, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09900990099009901, |
|
"grad_norm": 1.0565054416656494, |
|
"learning_rate": 6.398165581281536e-05, |
|
"loss": 0.8302, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1003126628452319, |
|
"grad_norm": 1.0529290437698364, |
|
"learning_rate": 6.394761404265977e-05, |
|
"loss": 0.8669, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.10161542470036478, |
|
"grad_norm": 1.0579882860183716, |
|
"learning_rate": 6.391302196309452e-05, |
|
"loss": 0.9204, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.10291818655549766, |
|
"grad_norm": 1.0531302690505981, |
|
"learning_rate": 6.387788017945482e-05, |
|
"loss": 0.9202, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.10422094841063054, |
|
"grad_norm": 1.0358260869979858, |
|
"learning_rate": 6.384218930669521e-05, |
|
"loss": 0.8647, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.10552371026576342, |
|
"grad_norm": 1.0599216222763062, |
|
"learning_rate": 6.380594996937895e-05, |
|
"loss": 0.9093, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1068264721208963, |
|
"grad_norm": 1.2026344537734985, |
|
"learning_rate": 6.376916280166696e-05, |
|
"loss": 0.8731, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.10812923397602918, |
|
"grad_norm": 1.1054397821426392, |
|
"learning_rate": 6.373182844730677e-05, |
|
"loss": 0.8835, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.10943199583116206, |
|
"grad_norm": 1.030562162399292, |
|
"learning_rate": 6.369394755962129e-05, |
|
"loss": 0.8432, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11073475768629494, |
|
"grad_norm": 1.0522128343582153, |
|
"learning_rate": 6.365552080149737e-05, |
|
"loss": 0.8598, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.11203751954142782, |
|
"grad_norm": 1.0692416429519653, |
|
"learning_rate": 6.361654884537413e-05, |
|
"loss": 0.8812, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1133402813965607, |
|
"grad_norm": 1.1009373664855957, |
|
"learning_rate": 6.357703237323127e-05, |
|
"loss": 0.8635, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1146430432516936, |
|
"grad_norm": 1.1628373861312866, |
|
"learning_rate": 6.353697207657708e-05, |
|
"loss": 0.8907, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11594580510682648, |
|
"grad_norm": 1.1273458003997803, |
|
"learning_rate": 6.349636865643642e-05, |
|
"loss": 0.8445, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11724856696195936, |
|
"grad_norm": 1.0971312522888184, |
|
"learning_rate": 6.345522282333835e-05, |
|
"loss": 0.8543, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11855132881709224, |
|
"grad_norm": 1.0826351642608643, |
|
"learning_rate": 6.341353529730374e-05, |
|
"loss": 0.8605, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.11985409067222512, |
|
"grad_norm": 1.0686413049697876, |
|
"learning_rate": 6.337130680783273e-05, |
|
"loss": 0.8814, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.121156852527358, |
|
"grad_norm": 1.21683931350708, |
|
"learning_rate": 6.332853809389189e-05, |
|
"loss": 0.8979, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.12245961438249088, |
|
"grad_norm": 1.0884461402893066, |
|
"learning_rate": 6.328522990390126e-05, |
|
"loss": 0.8577, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.12376237623762376, |
|
"grad_norm": 1.047310709953308, |
|
"learning_rate": 6.324138299572135e-05, |
|
"loss": 0.8174, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.12506513809275666, |
|
"grad_norm": 1.087296485900879, |
|
"learning_rate": 6.319699813663982e-05, |
|
"loss": 0.8566, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12636789994788952, |
|
"grad_norm": 1.070327639579773, |
|
"learning_rate": 6.315207610335808e-05, |
|
"loss": 0.8615, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12767066180302242, |
|
"grad_norm": 1.0299792289733887, |
|
"learning_rate": 6.310661768197764e-05, |
|
"loss": 0.866, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.12897342365815528, |
|
"grad_norm": 1.1011384725570679, |
|
"learning_rate": 6.306062366798642e-05, |
|
"loss": 0.8775, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13027618551328818, |
|
"grad_norm": 1.0193467140197754, |
|
"learning_rate": 6.301409486624481e-05, |
|
"loss": 0.8243, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 767, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.5374903328099533e+17, |
|
"train_batch_size": 6, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|