| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 804, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.018656716417910446, | |
| "grad_norm": 3.6549656925784713, | |
| "learning_rate": 6.0975609756097564e-06, | |
| "loss": 0.8899, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.03731343283582089, | |
| "grad_norm": 5.025090574937554, | |
| "learning_rate": 1.2195121951219513e-05, | |
| "loss": 0.8747, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.055970149253731345, | |
| "grad_norm": 2.342131675091634, | |
| "learning_rate": 1.8292682926829268e-05, | |
| "loss": 0.8338, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07462686567164178, | |
| "grad_norm": 1.1494234292820684, | |
| "learning_rate": 2.4390243902439026e-05, | |
| "loss": 0.7846, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09328358208955224, | |
| "grad_norm": 0.8632261864875254, | |
| "learning_rate": 3.048780487804878e-05, | |
| "loss": 0.7428, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.11194029850746269, | |
| "grad_norm": 0.657426681372709, | |
| "learning_rate": 3.6585365853658535e-05, | |
| "loss": 0.7252, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.13059701492537312, | |
| "grad_norm": 0.5368416614080481, | |
| "learning_rate": 4.26829268292683e-05, | |
| "loss": 0.7007, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.14925373134328357, | |
| "grad_norm": 0.46353463964140496, | |
| "learning_rate": 4.878048780487805e-05, | |
| "loss": 0.6861, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.16791044776119404, | |
| "grad_norm": 0.44636564035173865, | |
| "learning_rate": 4.999694850011677e-05, | |
| "loss": 0.6822, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1865671641791045, | |
| "grad_norm": 0.4138434792991525, | |
| "learning_rate": 4.998455320039942e-05, | |
| "loss": 0.6609, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.20522388059701493, | |
| "grad_norm": 0.422728596552499, | |
| "learning_rate": 4.9962628631365625e-05, | |
| "loss": 0.6614, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.22388059701492538, | |
| "grad_norm": 0.38581935884056645, | |
| "learning_rate": 4.9931184084955565e-05, | |
| "loss": 0.6551, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.24253731343283583, | |
| "grad_norm": 0.38792106424667805, | |
| "learning_rate": 4.989023288780946e-05, | |
| "loss": 0.644, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.26119402985074625, | |
| "grad_norm": 0.35124973951258276, | |
| "learning_rate": 4.9839792395619594e-05, | |
| "loss": 0.6397, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.2798507462686567, | |
| "grad_norm": 0.39928220528883474, | |
| "learning_rate": 4.977988398577472e-05, | |
| "loss": 0.6374, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.29850746268656714, | |
| "grad_norm": 0.390200695997598, | |
| "learning_rate": 4.971053304830001e-05, | |
| "loss": 0.6361, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.31716417910447764, | |
| "grad_norm": 0.38998393581603685, | |
| "learning_rate": 4.96317689750964e-05, | |
| "loss": 0.6315, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.3358208955223881, | |
| "grad_norm": 0.3977052204753468, | |
| "learning_rate": 4.954362514748392e-05, | |
| "loss": 0.6343, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.35447761194029853, | |
| "grad_norm": 0.3799021008048012, | |
| "learning_rate": 4.9446138922054206e-05, | |
| "loss": 0.6326, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.373134328358209, | |
| "grad_norm": 0.42417763729903135, | |
| "learning_rate": 4.933935161483824e-05, | |
| "loss": 0.628, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.3917910447761194, | |
| "grad_norm": 0.4175425415930285, | |
| "learning_rate": 4.922330848379606e-05, | |
| "loss": 0.6244, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.41044776119402987, | |
| "grad_norm": 0.3780496454852693, | |
| "learning_rate": 4.909805870963577e-05, | |
| "loss": 0.6316, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.4291044776119403, | |
| "grad_norm": 0.43250766991726497, | |
| "learning_rate": 4.89636553749701e-05, | |
| "loss": 0.6219, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.44776119402985076, | |
| "grad_norm": 0.45900715839677686, | |
| "learning_rate": 4.882015544181922e-05, | |
| "loss": 0.6234, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.4664179104477612, | |
| "grad_norm": 0.3801562720208039, | |
| "learning_rate": 4.866761972746946e-05, | |
| "loss": 0.6203, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.48507462686567165, | |
| "grad_norm": 0.40763368662979566, | |
| "learning_rate": 4.850611287869809e-05, | |
| "loss": 0.6192, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.503731343283582, | |
| "grad_norm": 0.387695878234886, | |
| "learning_rate": 4.833570334437505e-05, | |
| "loss": 0.6158, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.5223880597014925, | |
| "grad_norm": 0.3594282184260463, | |
| "learning_rate": 4.8156463346453454e-05, | |
| "loss": 0.6218, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.5410447761194029, | |
| "grad_norm": 0.38751010489421944, | |
| "learning_rate": 4.7968468849360844e-05, | |
| "loss": 0.6137, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.5597014925373134, | |
| "grad_norm": 0.44064154866322297, | |
| "learning_rate": 4.777179952780443e-05, | |
| "loss": 0.6124, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.5783582089552238, | |
| "grad_norm": 0.3950056095751924, | |
| "learning_rate": 4.756653873300381e-05, | |
| "loss": 0.6123, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.5970149253731343, | |
| "grad_norm": 0.3802169516389078, | |
| "learning_rate": 4.735277345736555e-05, | |
| "loss": 0.6101, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6156716417910447, | |
| "grad_norm": 0.5190273440198148, | |
| "learning_rate": 4.713059429761462e-05, | |
| "loss": 0.6092, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.6343283582089553, | |
| "grad_norm": 0.47079908923901587, | |
| "learning_rate": 4.690009541639818e-05, | |
| "loss": 0.6094, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.6529850746268657, | |
| "grad_norm": 0.4315284363771648, | |
| "learning_rate": 4.666137450237816e-05, | |
| "loss": 0.6095, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.6716417910447762, | |
| "grad_norm": 0.40236134418201497, | |
| "learning_rate": 4.641453272882943e-05, | |
| "loss": 0.6009, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.6902985074626866, | |
| "grad_norm": 0.43576469418064606, | |
| "learning_rate": 4.615967471076114e-05, | |
| "loss": 0.6043, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.7089552238805971, | |
| "grad_norm": 0.3924375734617119, | |
| "learning_rate": 4.5896908460579396e-05, | |
| "loss": 0.6082, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.7276119402985075, | |
| "grad_norm": 0.37813678352592345, | |
| "learning_rate": 4.562634534231012e-05, | |
| "loss": 0.6111, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.746268656716418, | |
| "grad_norm": 0.4255214995928645, | |
| "learning_rate": 4.5348100024401387e-05, | |
| "loss": 0.6053, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.7649253731343284, | |
| "grad_norm": 0.4230323004235886, | |
| "learning_rate": 4.5062290431125306e-05, | |
| "loss": 0.6021, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.7835820895522388, | |
| "grad_norm": 0.3891283415062517, | |
| "learning_rate": 4.476903769260014e-05, | |
| "loss": 0.6074, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.8022388059701493, | |
| "grad_norm": 0.359821193476202, | |
| "learning_rate": 4.4468466093453555e-05, | |
| "loss": 0.6011, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.8208955223880597, | |
| "grad_norm": 0.3922321010105192, | |
| "learning_rate": 4.416070302014912e-05, | |
| "loss": 0.6012, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.8395522388059702, | |
| "grad_norm": 0.4054255182956602, | |
| "learning_rate": 4.384587890699813e-05, | |
| "loss": 0.6078, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.8582089552238806, | |
| "grad_norm": 0.46435941050583623, | |
| "learning_rate": 4.352412718087967e-05, | |
| "loss": 0.6036, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.8768656716417911, | |
| "grad_norm": 0.4515063244889334, | |
| "learning_rate": 4.31955842046925e-05, | |
| "loss": 0.6037, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.8955223880597015, | |
| "grad_norm": 0.3782429863515032, | |
| "learning_rate": 4.2860389219562457e-05, | |
| "loss": 0.5992, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.914179104477612, | |
| "grad_norm": 0.3861236280491249, | |
| "learning_rate": 4.25186842858302e-05, | |
| "loss": 0.603, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.9328358208955224, | |
| "grad_norm": 0.36429692137779823, | |
| "learning_rate": 4.217061422284397e-05, | |
| "loss": 0.5994, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.9514925373134329, | |
| "grad_norm": 0.4118616829934472, | |
| "learning_rate": 4.181632654758317e-05, | |
| "loss": 0.5949, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.9701492537313433, | |
| "grad_norm": 0.41043315907009315, | |
| "learning_rate": 4.145597141213857e-05, | |
| "loss": 0.5986, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.9888059701492538, | |
| "grad_norm": 0.39055993718072285, | |
| "learning_rate": 4.1089701540075746e-05, | |
| "loss": 0.6022, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.007462686567164, | |
| "grad_norm": 0.3847644920200864, | |
| "learning_rate": 4.07176721617087e-05, | |
| "loss": 0.5929, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.0261194029850746, | |
| "grad_norm": 0.41157541834436956, | |
| "learning_rate": 4.034004094831106e-05, | |
| "loss": 0.595, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.044776119402985, | |
| "grad_norm": 0.4187910574304168, | |
| "learning_rate": 3.995696794529279e-05, | |
| "loss": 0.5958, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.0634328358208955, | |
| "grad_norm": 0.3752055081771545, | |
| "learning_rate": 3.9568615504370675e-05, | |
| "loss": 0.5961, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.0820895522388059, | |
| "grad_norm": 0.42027998796322924, | |
| "learning_rate": 3.9175148214761445e-05, | |
| "loss": 0.5855, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.1007462686567164, | |
| "grad_norm": 0.43658130712850085, | |
| "learning_rate": 3.877673283342647e-05, | |
| "loss": 0.5938, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.1194029850746268, | |
| "grad_norm": 0.430457565646321, | |
| "learning_rate": 3.8373538214397895e-05, | |
| "loss": 0.5889, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.1380597014925373, | |
| "grad_norm": 0.369429450742487, | |
| "learning_rate": 3.796573523721588e-05, | |
| "loss": 0.5917, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.1567164179104479, | |
| "grad_norm": 0.35118841190764827, | |
| "learning_rate": 3.755349673450747e-05, | |
| "loss": 0.5897, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.1753731343283582, | |
| "grad_norm": 0.43036161901845593, | |
| "learning_rate": 3.713699741873769e-05, | |
| "loss": 0.5899, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.1940298507462686, | |
| "grad_norm": 0.39048195458912954, | |
| "learning_rate": 3.6716413808163996e-05, | |
| "loss": 0.5919, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.212686567164179, | |
| "grad_norm": 0.38357903651062303, | |
| "learning_rate": 3.6291924152025287e-05, | |
| "loss": 0.59, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.2313432835820897, | |
| "grad_norm": 0.4362556920006053, | |
| "learning_rate": 3.5863708354997426e-05, | |
| "loss": 0.5872, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.38432808337966534, | |
| "learning_rate": 3.5431947900947086e-05, | |
| "loss": 0.5923, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.2686567164179103, | |
| "grad_norm": 0.3682629714132096, | |
| "learning_rate": 3.499682577601638e-05, | |
| "loss": 0.5834, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.287313432835821, | |
| "grad_norm": 0.4315330247371912, | |
| "learning_rate": 3.455852639107071e-05, | |
| "loss": 0.5896, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.3059701492537314, | |
| "grad_norm": 0.3629622380785368, | |
| "learning_rate": 3.4117235503542874e-05, | |
| "loss": 0.5871, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.3246268656716418, | |
| "grad_norm": 0.3922640004199405, | |
| "learning_rate": 3.3673140138706474e-05, | |
| "loss": 0.5914, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.3432835820895521, | |
| "grad_norm": 0.35817834691642214, | |
| "learning_rate": 3.322642851041199e-05, | |
| "loss": 0.5853, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.3619402985074627, | |
| "grad_norm": 0.3816232424070772, | |
| "learning_rate": 3.277728994131904e-05, | |
| "loss": 0.5824, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.3805970149253732, | |
| "grad_norm": 0.4132410785960827, | |
| "learning_rate": 3.232591478265887e-05, | |
| "loss": 0.5934, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.3992537313432836, | |
| "grad_norm": 0.36404595249873795, | |
| "learning_rate": 3.187249433356076e-05, | |
| "loss": 0.5874, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.417910447761194, | |
| "grad_norm": 0.38024271420241285, | |
| "learning_rate": 3.141722075997681e-05, | |
| "loss": 0.5867, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.4365671641791045, | |
| "grad_norm": 0.3334177836252411, | |
| "learning_rate": 3.096028701323926e-05, | |
| "loss": 0.5808, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.455223880597015, | |
| "grad_norm": 0.35555456743280156, | |
| "learning_rate": 3.050188674828507e-05, | |
| "loss": 0.5833, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.4738805970149254, | |
| "grad_norm": 0.3563293906188617, | |
| "learning_rate": 3.00422142415822e-05, | |
| "loss": 0.5794, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.4925373134328357, | |
| "grad_norm": 0.37347508205822805, | |
| "learning_rate": 2.958146430879254e-05, | |
| "loss": 0.5838, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.5111940298507462, | |
| "grad_norm": 0.3636667109098333, | |
| "learning_rate": 2.9119832222206262e-05, | |
| "loss": 0.578, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.5298507462686568, | |
| "grad_norm": 0.41991140965552654, | |
| "learning_rate": 2.8657513627982702e-05, | |
| "loss": 0.5824, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.5485074626865671, | |
| "grad_norm": 0.3847464343971541, | |
| "learning_rate": 2.8194704463232792e-05, | |
| "loss": 0.588, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.5671641791044775, | |
| "grad_norm": 0.35032797152530204, | |
| "learning_rate": 2.7731600872978102e-05, | |
| "loss": 0.5842, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.585820895522388, | |
| "grad_norm": 0.3656500421389817, | |
| "learning_rate": 2.726839912702191e-05, | |
| "loss": 0.5867, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.6044776119402986, | |
| "grad_norm": 0.40068784377780137, | |
| "learning_rate": 2.6805295536767224e-05, | |
| "loss": 0.5851, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.623134328358209, | |
| "grad_norm": 0.321173003139559, | |
| "learning_rate": 2.6342486372017306e-05, | |
| "loss": 0.5843, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.6417910447761193, | |
| "grad_norm": 0.32237157232459224, | |
| "learning_rate": 2.5880167777793746e-05, | |
| "loss": 0.5846, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.6604477611940298, | |
| "grad_norm": 0.3520631416157162, | |
| "learning_rate": 2.5418535691207464e-05, | |
| "loss": 0.5869, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.6791044776119404, | |
| "grad_norm": 0.35075232086508196, | |
| "learning_rate": 2.49577857584178e-05, | |
| "loss": 0.5783, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.6977611940298507, | |
| "grad_norm": 0.3292385498979277, | |
| "learning_rate": 2.4498113251714936e-05, | |
| "loss": 0.5875, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.716417910447761, | |
| "grad_norm": 0.3130865482957303, | |
| "learning_rate": 2.4039712986760755e-05, | |
| "loss": 0.5878, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.7350746268656716, | |
| "grad_norm": 0.3478733318118067, | |
| "learning_rate": 2.35827792400232e-05, | |
| "loss": 0.5874, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.7537313432835822, | |
| "grad_norm": 0.3544639262381643, | |
| "learning_rate": 2.3127505666439243e-05, | |
| "loss": 0.5854, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.7723880597014925, | |
| "grad_norm": 0.3587084792300111, | |
| "learning_rate": 2.267408521734113e-05, | |
| "loss": 0.5826, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.7910447761194028, | |
| "grad_norm": 0.3367799313961551, | |
| "learning_rate": 2.2222710058680963e-05, | |
| "loss": 0.5796, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.8097014925373134, | |
| "grad_norm": 0.31508822995257363, | |
| "learning_rate": 2.1773571489588017e-05, | |
| "loss": 0.5814, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.828358208955224, | |
| "grad_norm": 0.31530891070364053, | |
| "learning_rate": 2.132685986129353e-05, | |
| "loss": 0.5822, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.8470149253731343, | |
| "grad_norm": 0.3219435163937165, | |
| "learning_rate": 2.088276449645714e-05, | |
| "loss": 0.5886, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.8656716417910446, | |
| "grad_norm": 0.33551685144976545, | |
| "learning_rate": 2.0441473608929303e-05, | |
| "loss": 0.5899, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.8843283582089554, | |
| "grad_norm": 0.33919972994543235, | |
| "learning_rate": 2.0003174223983623e-05, | |
| "loss": 0.5844, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.9029850746268657, | |
| "grad_norm": 0.334804929939135, | |
| "learning_rate": 1.9568052099052912e-05, | |
| "loss": 0.5793, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.921641791044776, | |
| "grad_norm": 0.3678088109878758, | |
| "learning_rate": 1.913629164500258e-05, | |
| "loss": 0.5876, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.9402985074626866, | |
| "grad_norm": 0.3556925565126293, | |
| "learning_rate": 1.8708075847974722e-05, | |
| "loss": 0.5873, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.9589552238805972, | |
| "grad_norm": 0.32600987065289666, | |
| "learning_rate": 1.8283586191836006e-05, | |
| "loss": 0.5817, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.9776119402985075, | |
| "grad_norm": 0.3449350706208757, | |
| "learning_rate": 1.786300258126231e-05, | |
| "loss": 0.5875, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.9962686567164178, | |
| "grad_norm": 0.31606697268698664, | |
| "learning_rate": 1.744650326549254e-05, | |
| "loss": 0.5771, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.014925373134328, | |
| "grad_norm": 0.3226406048094545, | |
| "learning_rate": 1.703426476278413e-05, | |
| "loss": 0.5804, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.033582089552239, | |
| "grad_norm": 0.3192747041987855, | |
| "learning_rate": 1.6626461785602114e-05, | |
| "loss": 0.5779, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.0522388059701493, | |
| "grad_norm": 0.32942244104358903, | |
| "learning_rate": 1.622326716657353e-05, | |
| "loss": 0.5784, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.0708955223880596, | |
| "grad_norm": 0.3208641410944032, | |
| "learning_rate": 1.582485178523856e-05, | |
| "loss": 0.5752, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.08955223880597, | |
| "grad_norm": 0.32990050316542857, | |
| "learning_rate": 1.5431384495629337e-05, | |
| "loss": 0.5776, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.1082089552238807, | |
| "grad_norm": 0.32151659170031266, | |
| "learning_rate": 1.504303205470723e-05, | |
| "loss": 0.5791, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.126865671641791, | |
| "grad_norm": 0.3190957196858403, | |
| "learning_rate": 1.4659959051688944e-05, | |
| "loss": 0.5823, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.1455223880597014, | |
| "grad_norm": 0.46789518766409627, | |
| "learning_rate": 1.4282327838291304e-05, | |
| "loss": 0.5738, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.1641791044776117, | |
| "grad_norm": 0.3289296714854712, | |
| "learning_rate": 1.391029845992426e-05, | |
| "loss": 0.5784, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.1828358208955225, | |
| "grad_norm": 0.33039007431665324, | |
| "learning_rate": 1.3544028587861441e-05, | |
| "loss": 0.5783, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.201492537313433, | |
| "grad_norm": 0.3211020903500206, | |
| "learning_rate": 1.3183673452416833e-05, | |
| "loss": 0.5741, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.220149253731343, | |
| "grad_norm": 0.33763161409176584, | |
| "learning_rate": 1.2829385777156036e-05, | |
| "loss": 0.5729, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.2388059701492535, | |
| "grad_norm": 0.33060117221059376, | |
| "learning_rate": 1.2481315714169812e-05, | |
| "loss": 0.5781, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.2574626865671643, | |
| "grad_norm": 0.33725376949413927, | |
| "learning_rate": 1.2139610780437552e-05, | |
| "loss": 0.582, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.2761194029850746, | |
| "grad_norm": 0.2995179260780104, | |
| "learning_rate": 1.1804415795307511e-05, | |
| "loss": 0.5769, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.294776119402985, | |
| "grad_norm": 0.31943181761280764, | |
| "learning_rate": 1.1475872819120328e-05, | |
| "loss": 0.5777, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.3134328358208958, | |
| "grad_norm": 0.29904241071720106, | |
| "learning_rate": 1.1154121093001874e-05, | |
| "loss": 0.5751, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.332089552238806, | |
| "grad_norm": 0.29530206657628216, | |
| "learning_rate": 1.083929697985089e-05, | |
| "loss": 0.5641, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.3507462686567164, | |
| "grad_norm": 0.32449591504588926, | |
| "learning_rate": 1.0531533906546454e-05, | |
| "loss": 0.5783, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.3694029850746268, | |
| "grad_norm": 0.335784339109831, | |
| "learning_rate": 1.023096230739987e-05, | |
| "loss": 0.5723, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.388059701492537, | |
| "grad_norm": 0.31020627295629266, | |
| "learning_rate": 9.937709568874698e-06, | |
| "loss": 0.581, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.406716417910448, | |
| "grad_norm": 0.3012216543355107, | |
| "learning_rate": 9.651899975598627e-06, | |
| "loss": 0.5769, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.425373134328358, | |
| "grad_norm": 0.32450430571066397, | |
| "learning_rate": 9.373654657689884e-06, | |
| "loss": 0.5762, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.4440298507462686, | |
| "grad_norm": 0.3012930563631571, | |
| "learning_rate": 9.103091539420603e-06, | |
| "loss": 0.5747, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.4626865671641793, | |
| "grad_norm": 0.3100034944540072, | |
| "learning_rate": 8.840325289238862e-06, | |
| "loss": 0.5751, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.4813432835820897, | |
| "grad_norm": 0.29337231270061, | |
| "learning_rate": 8.585467271170572e-06, | |
| "loss": 0.5778, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.29327922763808, | |
| "learning_rate": 8.338625497621846e-06, | |
| "loss": 0.5751, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.5186567164179103, | |
| "grad_norm": 0.3044981666505116, | |
| "learning_rate": 8.099904583601826e-06, | |
| "loss": 0.5777, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.5373134328358207, | |
| "grad_norm": 0.30077068543810176, | |
| "learning_rate": 7.869405702385388e-06, | |
| "loss": 0.5733, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.5559701492537314, | |
| "grad_norm": 0.31187553828615966, | |
| "learning_rate": 7.647226542634454e-06, | |
| "loss": 0.5786, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 2.574626865671642, | |
| "grad_norm": 0.2928003883848623, | |
| "learning_rate": 7.433461266996197e-06, | |
| "loss": 0.5746, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.593283582089552, | |
| "grad_norm": 0.289384782079962, | |
| "learning_rate": 7.228200472195573e-06, | |
| "loss": 0.5749, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 2.611940298507463, | |
| "grad_norm": 0.2966613143366587, | |
| "learning_rate": 7.031531150639156e-06, | |
| "loss": 0.5769, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.6305970149253732, | |
| "grad_norm": 0.2955583658511631, | |
| "learning_rate": 6.843536653546554e-06, | |
| "loss": 0.5724, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 2.6492537313432836, | |
| "grad_norm": 0.2839683212080737, | |
| "learning_rate": 6.664296655624957e-06, | |
| "loss": 0.5767, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.667910447761194, | |
| "grad_norm": 0.3111148068064659, | |
| "learning_rate": 6.49388712130192e-06, | |
| "loss": 0.5812, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 2.6865671641791042, | |
| "grad_norm": 0.29595259288458947, | |
| "learning_rate": 6.332380272530536e-06, | |
| "loss": 0.5787, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.705223880597015, | |
| "grad_norm": 0.30049652644929575, | |
| "learning_rate": 6.17984455818078e-06, | |
| "loss": 0.569, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 2.7238805970149254, | |
| "grad_norm": 0.28596569042503084, | |
| "learning_rate": 6.036344625029903e-06, | |
| "loss": 0.573, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.7425373134328357, | |
| "grad_norm": 0.3137116309508735, | |
| "learning_rate": 5.901941290364234e-06, | |
| "loss": 0.5736, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 2.7611940298507465, | |
| "grad_norm": 0.3009948081748677, | |
| "learning_rate": 5.776691516203942e-06, | |
| "loss": 0.5787, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.779850746268657, | |
| "grad_norm": 0.3040010450151502, | |
| "learning_rate": 5.660648385161759e-06, | |
| "loss": 0.5767, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 2.798507462686567, | |
| "grad_norm": 0.29325707371427784, | |
| "learning_rate": 5.5538610779457975e-06, | |
| "loss": 0.5754, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.8171641791044775, | |
| "grad_norm": 0.2828494761102343, | |
| "learning_rate": 5.456374852516083e-06, | |
| "loss": 0.5767, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 2.835820895522388, | |
| "grad_norm": 0.2919715511195579, | |
| "learning_rate": 5.368231024903606e-06, | |
| "loss": 0.5731, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.8544776119402986, | |
| "grad_norm": 0.2984091496111364, | |
| "learning_rate": 5.289466951699997e-06, | |
| "loss": 0.5766, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 2.873134328358209, | |
| "grad_norm": 0.28179592978223916, | |
| "learning_rate": 5.2201160142252795e-06, | |
| "loss": 0.5777, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.8917910447761193, | |
| "grad_norm": 0.29010245215139613, | |
| "learning_rate": 5.1602076043804036e-06, | |
| "loss": 0.5717, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 2.91044776119403, | |
| "grad_norm": 0.28595377774074365, | |
| "learning_rate": 5.1097671121905425e-06, | |
| "loss": 0.5778, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.9291044776119404, | |
| "grad_norm": 0.29043591574669186, | |
| "learning_rate": 5.0688159150444395e-06, | |
| "loss": 0.5703, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 2.9477611940298507, | |
| "grad_norm": 0.28909438769255513, | |
| "learning_rate": 5.0373713686343774e-06, | |
| "loss": 0.5809, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.966417910447761, | |
| "grad_norm": 0.300234068745217, | |
| "learning_rate": 5.015446799600588e-06, | |
| "loss": 0.5728, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 2.9850746268656714, | |
| "grad_norm": 0.2900267072345629, | |
| "learning_rate": 5.003051499883236e-06, | |
| "loss": 0.5827, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 804, | |
| "total_flos": 1.3049081114918912e+16, | |
| "train_loss": 0.6017516222759266, | |
| "train_runtime": 25832.0855, | |
| "train_samples_per_second": 3.984, | |
| "train_steps_per_second": 0.031 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 804, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.3049081114918912e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |