|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9980411361410382, |
|
"eval_steps": 500, |
|
"global_step": 1530, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001305909239307868, |
|
"grad_norm": 1.5176929237900458, |
|
"learning_rate": 6.535947712418301e-06, |
|
"loss": 1.3807, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00652954619653934, |
|
"grad_norm": 1.4666860534955573, |
|
"learning_rate": 3.2679738562091506e-05, |
|
"loss": 1.3938, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01305909239307868, |
|
"grad_norm": 0.5363688932594572, |
|
"learning_rate": 6.535947712418301e-05, |
|
"loss": 1.356, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.019588638589618023, |
|
"grad_norm": 0.8089688556533041, |
|
"learning_rate": 9.80392156862745e-05, |
|
"loss": 1.2953, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02611818478615736, |
|
"grad_norm": 0.39846421466147275, |
|
"learning_rate": 0.00013071895424836603, |
|
"loss": 1.2151, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0326477309826967, |
|
"grad_norm": 0.3409043898948281, |
|
"learning_rate": 0.00016339869281045753, |
|
"loss": 1.1643, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.039177277179236046, |
|
"grad_norm": 0.31538679140145476, |
|
"learning_rate": 0.000196078431372549, |
|
"loss": 1.1351, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.045706823375775384, |
|
"grad_norm": 0.18553699683397415, |
|
"learning_rate": 0.00022875816993464052, |
|
"loss": 1.1183, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05223636957231472, |
|
"grad_norm": 0.17325786777759875, |
|
"learning_rate": 0.00026143790849673205, |
|
"loss": 1.1064, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.058765915768854066, |
|
"grad_norm": 0.13507429198216303, |
|
"learning_rate": 0.00029411764705882356, |
|
"loss": 1.0824, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0652954619653934, |
|
"grad_norm": 0.12274633467480245, |
|
"learning_rate": 0.00032679738562091506, |
|
"loss": 1.0838, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07182500816193274, |
|
"grad_norm": 0.11866432066945673, |
|
"learning_rate": 0.0003594771241830065, |
|
"loss": 1.062, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07835455435847209, |
|
"grad_norm": 0.11978234936206603, |
|
"learning_rate": 0.000392156862745098, |
|
"loss": 1.0613, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08488410055501143, |
|
"grad_norm": 0.12218251397033981, |
|
"learning_rate": 0.00042483660130718953, |
|
"loss": 1.0599, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09141364675155077, |
|
"grad_norm": 0.1296292349325357, |
|
"learning_rate": 0.00045751633986928104, |
|
"loss": 1.0498, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0979431929480901, |
|
"grad_norm": 0.1631295253125358, |
|
"learning_rate": 0.0004901960784313725, |
|
"loss": 1.0346, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.10447273914462944, |
|
"grad_norm": 0.16333656557624185, |
|
"learning_rate": 0.0005228758169934641, |
|
"loss": 1.0397, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1110022853411688, |
|
"grad_norm": 0.1219198345032216, |
|
"learning_rate": 0.0005555555555555556, |
|
"loss": 1.0239, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.11753183153770813, |
|
"grad_norm": 0.11908753148894326, |
|
"learning_rate": 0.0005882352941176471, |
|
"loss": 1.0417, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12406137773424747, |
|
"grad_norm": 0.12952769208513357, |
|
"learning_rate": 0.0006209150326797386, |
|
"loss": 1.0232, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1305909239307868, |
|
"grad_norm": 0.14465416033908837, |
|
"learning_rate": 0.0006535947712418301, |
|
"loss": 1.0224, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13712047012732614, |
|
"grad_norm": 0.14159329198635048, |
|
"learning_rate": 0.0006862745098039216, |
|
"loss": 1.0284, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.14365001632386548, |
|
"grad_norm": 0.14423304787158964, |
|
"learning_rate": 0.000718954248366013, |
|
"loss": 1.0181, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15017956252040482, |
|
"grad_norm": 0.13162876341322993, |
|
"learning_rate": 0.0007516339869281046, |
|
"loss": 1.0196, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15670910871694418, |
|
"grad_norm": 0.16821786515368486, |
|
"learning_rate": 0.000784313725490196, |
|
"loss": 1.0192, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16323865491348352, |
|
"grad_norm": 0.132554115819836, |
|
"learning_rate": 0.0008169934640522876, |
|
"loss": 1.015, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.16976820111002286, |
|
"grad_norm": 0.13510439743095423, |
|
"learning_rate": 0.0008496732026143791, |
|
"loss": 1.006, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1762977473065622, |
|
"grad_norm": 0.14032061281816732, |
|
"learning_rate": 0.0008823529411764706, |
|
"loss": 1.0133, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.18282729350310153, |
|
"grad_norm": 0.14509984551437993, |
|
"learning_rate": 0.0009150326797385621, |
|
"loss": 0.9948, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.18935683969964087, |
|
"grad_norm": 0.1218052170973073, |
|
"learning_rate": 0.0009477124183006536, |
|
"loss": 1.0105, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1958863858961802, |
|
"grad_norm": 0.1198234166583821, |
|
"learning_rate": 0.000980392156862745, |
|
"loss": 1.0015, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.20241593209271955, |
|
"grad_norm": 0.13090633943525062, |
|
"learning_rate": 0.0009999947948756244, |
|
"loss": 1.0003, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.20894547828925888, |
|
"grad_norm": 0.1701673187395784, |
|
"learning_rate": 0.000999936238470993, |
|
"loss": 0.9998, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21547502448579825, |
|
"grad_norm": 0.1353983791211149, |
|
"learning_rate": 0.0009998126269014254, |
|
"loss": 0.9919, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2220045706823376, |
|
"grad_norm": 0.17639183203845668, |
|
"learning_rate": 0.000999623976252115, |
|
"loss": 0.9973, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.22853411687887693, |
|
"grad_norm": 0.22317731245818054, |
|
"learning_rate": 0.0009993703110715907, |
|
"loss": 0.9837, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.23506366307541626, |
|
"grad_norm": 0.17944080802238344, |
|
"learning_rate": 0.0009990516643685222, |
|
"loss": 0.9888, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2415932092719556, |
|
"grad_norm": 0.14179006631877772, |
|
"learning_rate": 0.0009986680776074245, |
|
"loss": 0.9952, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.24812275546849494, |
|
"grad_norm": 0.12892751691678134, |
|
"learning_rate": 0.000998219600703263, |
|
"loss": 0.9865, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2546523016650343, |
|
"grad_norm": 0.14114362619876794, |
|
"learning_rate": 0.0009977062920149581, |
|
"loss": 0.9887, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2611818478615736, |
|
"grad_norm": 0.11951785802987536, |
|
"learning_rate": 0.0009971282183377902, |
|
"loss": 0.9869, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.26771139405811295, |
|
"grad_norm": 0.13109210166576132, |
|
"learning_rate": 0.000996485454894709, |
|
"loss": 0.9784, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2742409402546523, |
|
"grad_norm": 0.1213332482560537, |
|
"learning_rate": 0.0009957780853265441, |
|
"loss": 0.9721, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2807704864511916, |
|
"grad_norm": 0.1320737011423269, |
|
"learning_rate": 0.0009950062016811216, |
|
"loss": 0.9732, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.28730003264773096, |
|
"grad_norm": 0.20533331889811923, |
|
"learning_rate": 0.000994169904401286, |
|
"loss": 0.9903, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2938295788442703, |
|
"grad_norm": 0.14222408456303698, |
|
"learning_rate": 0.0009932693023118298, |
|
"loss": 0.9677, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.30035912504080964, |
|
"grad_norm": 0.14741767640934417, |
|
"learning_rate": 0.000992304512605333, |
|
"loss": 0.9693, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.30688867123734903, |
|
"grad_norm": 0.15919612029269203, |
|
"learning_rate": 0.000991275660826912, |
|
"loss": 0.9729, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.31341821743388837, |
|
"grad_norm": 0.16758071722876805, |
|
"learning_rate": 0.0009901828808578845, |
|
"loss": 0.9744, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3199477636304277, |
|
"grad_norm": 0.13991415144222957, |
|
"learning_rate": 0.0009890263148983464, |
|
"loss": 0.9792, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.32647730982696704, |
|
"grad_norm": 0.13812929689120448, |
|
"learning_rate": 0.0009878061134486684, |
|
"loss": 0.9641, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3330068560235064, |
|
"grad_norm": 0.12864407673138062, |
|
"learning_rate": 0.0009865224352899118, |
|
"loss": 0.9654, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.3395364022200457, |
|
"grad_norm": 0.1376072090028069, |
|
"learning_rate": 0.0009851754474631672, |
|
"loss": 0.9632, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.34606594841658506, |
|
"grad_norm": 0.10481451680364469, |
|
"learning_rate": 0.000983765325247817, |
|
"loss": 0.9483, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3525954946131244, |
|
"grad_norm": 0.16165468108484743, |
|
"learning_rate": 0.0009822922521387277, |
|
"loss": 0.9468, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.35912504080966373, |
|
"grad_norm": 0.1522191745081507, |
|
"learning_rate": 0.0009807564198223717, |
|
"loss": 0.9496, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.36565458700620307, |
|
"grad_norm": 0.23965626958055397, |
|
"learning_rate": 0.0009791580281518844, |
|
"loss": 0.9567, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3721841332027424, |
|
"grad_norm": 0.12506763543712593, |
|
"learning_rate": 0.000977497285121057, |
|
"loss": 0.9643, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.37871367939928174, |
|
"grad_norm": 0.195131565436314, |
|
"learning_rate": 0.0009757744068372723, |
|
"loss": 0.9614, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3852432255958211, |
|
"grad_norm": 0.17281337612882364, |
|
"learning_rate": 0.0009739896174933816, |
|
"loss": 0.966, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3917727717923604, |
|
"grad_norm": 0.1812097536777823, |
|
"learning_rate": 0.0009721431493385322, |
|
"loss": 0.9512, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.39830231798889976, |
|
"grad_norm": 0.1233723550793044, |
|
"learning_rate": 0.0009702352426479457, |
|
"loss": 0.945, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.4048318641854391, |
|
"grad_norm": 0.15877455699375392, |
|
"learning_rate": 0.0009682661456916509, |
|
"loss": 0.9502, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.41136141038197843, |
|
"grad_norm": 0.10673039266646496, |
|
"learning_rate": 0.0009662361147021779, |
|
"loss": 0.9433, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.41789095657851777, |
|
"grad_norm": 0.09755366289613175, |
|
"learning_rate": 0.0009641454138412152, |
|
"loss": 0.9468, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4244205027750571, |
|
"grad_norm": 0.13447768608632527, |
|
"learning_rate": 0.000961994315165235, |
|
"loss": 0.9561, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.4309500489715965, |
|
"grad_norm": 0.13249488099024306, |
|
"learning_rate": 0.0009597830985900912, |
|
"loss": 0.9379, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.43747959516813584, |
|
"grad_norm": 1.0347154204650033, |
|
"learning_rate": 0.0009575120518545955, |
|
"loss": 0.9538, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.4440091413646752, |
|
"grad_norm": 0.18955840040907523, |
|
"learning_rate": 0.0009551814704830733, |
|
"loss": 0.9475, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.4505386875612145, |
|
"grad_norm": 0.23929920817479278, |
|
"learning_rate": 0.0009527916577469104, |
|
"loss": 0.9423, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.45706823375775385, |
|
"grad_norm": 0.15365690168496443, |
|
"learning_rate": 0.0009503429246250867, |
|
"loss": 0.9517, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4635977799542932, |
|
"grad_norm": 0.1909418462656572, |
|
"learning_rate": 0.0009478355897637118, |
|
"loss": 0.9473, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.4701273261508325, |
|
"grad_norm": 0.15566275046966022, |
|
"learning_rate": 0.0009452699794345582, |
|
"loss": 0.953, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.47665687234737186, |
|
"grad_norm": 0.2815360039314576, |
|
"learning_rate": 0.0009426464274926065, |
|
"loss": 0.9678, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.4831864185439112, |
|
"grad_norm": 0.1712468077018436, |
|
"learning_rate": 0.0009399652753326013, |
|
"loss": 0.9472, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.48971596474045054, |
|
"grad_norm": 0.13256792301475717, |
|
"learning_rate": 0.0009372268718446258, |
|
"loss": 0.9397, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.4962455109369899, |
|
"grad_norm": 0.15499245755177785, |
|
"learning_rate": 0.0009344315733687028, |
|
"loss": 0.9417, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5027750571335292, |
|
"grad_norm": 0.12724271510138044, |
|
"learning_rate": 0.0009315797436484248, |
|
"loss": 0.9406, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.5093046033300686, |
|
"grad_norm": 0.11306248333266236, |
|
"learning_rate": 0.0009286717537836211, |
|
"loss": 0.9423, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5158341495266079, |
|
"grad_norm": 0.11779489818116985, |
|
"learning_rate": 0.0009257079821820683, |
|
"loss": 0.927, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5223636957231472, |
|
"grad_norm": 0.1912577157688328, |
|
"learning_rate": 0.0009226888145102483, |
|
"loss": 0.9238, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5288932419196866, |
|
"grad_norm": 0.12394516787312287, |
|
"learning_rate": 0.0009196146436431634, |
|
"loss": 0.9404, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.5354227881162259, |
|
"grad_norm": 0.10373101206766815, |
|
"learning_rate": 0.0009164858696132126, |
|
"loss": 0.9374, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5419523343127652, |
|
"grad_norm": 0.11567785512547579, |
|
"learning_rate": 0.0009133028995581365, |
|
"loss": 0.9142, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.5484818805093046, |
|
"grad_norm": 0.10630665932821644, |
|
"learning_rate": 0.0009100661476680379, |
|
"loss": 0.9234, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5550114267058439, |
|
"grad_norm": 0.11625895390628392, |
|
"learning_rate": 0.0009067760351314837, |
|
"loss": 0.9216, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5615409729023833, |
|
"grad_norm": 0.1369283022817628, |
|
"learning_rate": 0.0009034329900806987, |
|
"loss": 0.9203, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5680705190989226, |
|
"grad_norm": 0.12895491050680988, |
|
"learning_rate": 0.0009000374475358519, |
|
"loss": 0.9321, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.5746000652954619, |
|
"grad_norm": 0.10252489725913455, |
|
"learning_rate": 0.0008965898493484503, |
|
"loss": 0.9249, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.5811296114920013, |
|
"grad_norm": 0.11555535133540926, |
|
"learning_rate": 0.0008930906441438416, |
|
"loss": 0.9176, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.5876591576885406, |
|
"grad_norm": 0.13142293730234053, |
|
"learning_rate": 0.0008895402872628352, |
|
"loss": 0.9331, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5941887038850799, |
|
"grad_norm": 0.13261255329154362, |
|
"learning_rate": 0.0008859392407024518, |
|
"loss": 0.9269, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.6007182500816193, |
|
"grad_norm": 0.15460194439303204, |
|
"learning_rate": 0.0008822879730558035, |
|
"loss": 0.9231, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6072477962781586, |
|
"grad_norm": 0.119762673875016, |
|
"learning_rate": 0.0008785869594511183, |
|
"loss": 0.9214, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.6137773424746981, |
|
"grad_norm": 0.12397488268122098, |
|
"learning_rate": 0.0008748366814899118, |
|
"loss": 0.9163, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6203068886712374, |
|
"grad_norm": 0.1414441528692813, |
|
"learning_rate": 0.0008710376271843202, |
|
"loss": 0.9093, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.6268364348677767, |
|
"grad_norm": 0.10339148156020175, |
|
"learning_rate": 0.0008671902908935943, |
|
"loss": 0.9188, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6333659810643161, |
|
"grad_norm": 0.10562490305225787, |
|
"learning_rate": 0.0008632951732597714, |
|
"loss": 0.9215, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.6398955272608554, |
|
"grad_norm": 0.13035219132958403, |
|
"learning_rate": 0.0008593527811425279, |
|
"loss": 0.9011, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6464250734573947, |
|
"grad_norm": 0.11087666996405383, |
|
"learning_rate": 0.0008553636275532236, |
|
"loss": 0.9048, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6529546196539341, |
|
"grad_norm": 0.13208859767925016, |
|
"learning_rate": 0.0008513282315881448, |
|
"loss": 0.921, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6594841658504734, |
|
"grad_norm": 0.2523227581391801, |
|
"learning_rate": 0.0008472471183609556, |
|
"loss": 0.9127, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6660137120470128, |
|
"grad_norm": 0.11202017013731237, |
|
"learning_rate": 0.0008431208189343669, |
|
"loss": 0.921, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6725432582435521, |
|
"grad_norm": 0.10368829401033983, |
|
"learning_rate": 0.0008389498702510308, |
|
"loss": 0.909, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.6790728044400914, |
|
"grad_norm": 0.15566526829088048, |
|
"learning_rate": 0.0008347348150636703, |
|
"loss": 0.9203, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.6856023506366308, |
|
"grad_norm": 0.13196534235222668, |
|
"learning_rate": 0.000830476201864451, |
|
"loss": 0.9088, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.6921318968331701, |
|
"grad_norm": 0.14325442245754208, |
|
"learning_rate": 0.0008261745848136096, |
|
"loss": 0.8967, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6986614430297095, |
|
"grad_norm": 0.12043450869773256, |
|
"learning_rate": 0.0008218305236673415, |
|
"loss": 0.9171, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.7051909892262488, |
|
"grad_norm": 0.3483895582258434, |
|
"learning_rate": 0.0008174445837049614, |
|
"loss": 0.8949, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.7117205354227881, |
|
"grad_norm": 0.11520419753374456, |
|
"learning_rate": 0.0008130173356553458, |
|
"loss": 0.9107, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.7182500816193275, |
|
"grad_norm": 0.12444009573962261, |
|
"learning_rate": 0.0008085493556226653, |
|
"loss": 0.908, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7247796278158668, |
|
"grad_norm": 0.11696160855530892, |
|
"learning_rate": 0.0008040412250114183, |
|
"loss": 0.8962, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.7313091740124061, |
|
"grad_norm": 0.10560477394497295, |
|
"learning_rate": 0.0007994935304507746, |
|
"loss": 0.8962, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.7378387202089455, |
|
"grad_norm": 0.14779806033186757, |
|
"learning_rate": 0.0007949068637182388, |
|
"loss": 0.9256, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.7443682664054848, |
|
"grad_norm": 0.11623883306153455, |
|
"learning_rate": 0.0007902818216626446, |
|
"loss": 0.8933, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7508978126020242, |
|
"grad_norm": 0.09847908909955982, |
|
"learning_rate": 0.0007856190061264882, |
|
"loss": 0.8965, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.7574273587985635, |
|
"grad_norm": 0.11360405922709282, |
|
"learning_rate": 0.0007809190238676129, |
|
"loss": 0.9147, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7639569049951028, |
|
"grad_norm": 0.11549202352730681, |
|
"learning_rate": 0.0007761824864802529, |
|
"loss": 0.9016, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.7704864511916422, |
|
"grad_norm": 0.11599584602056406, |
|
"learning_rate": 0.0007714100103154493, |
|
"loss": 0.9099, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7770159973881815, |
|
"grad_norm": 0.0956828361706411, |
|
"learning_rate": 0.0007666022164008457, |
|
"loss": 0.8967, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.7835455435847208, |
|
"grad_norm": 0.1385079291346411, |
|
"learning_rate": 0.0007617597303598753, |
|
"loss": 0.8926, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7900750897812602, |
|
"grad_norm": 0.10113638563525007, |
|
"learning_rate": 0.0007568831823303517, |
|
"loss": 0.8899, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.7966046359777995, |
|
"grad_norm": 0.0986027480355384, |
|
"learning_rate": 0.0007519732068824699, |
|
"loss": 0.8882, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.8031341821743389, |
|
"grad_norm": 0.12200642828210775, |
|
"learning_rate": 0.000747030442936232, |
|
"loss": 0.8963, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.8096637283708782, |
|
"grad_norm": 0.12216867708075968, |
|
"learning_rate": 0.000742055533678307, |
|
"loss": 0.9091, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.8161932745674175, |
|
"grad_norm": 0.1100041898148475, |
|
"learning_rate": 0.0007370491264783337, |
|
"loss": 0.9009, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.8227228207639569, |
|
"grad_norm": 0.09232471853806018, |
|
"learning_rate": 0.0007320118728046817, |
|
"loss": 0.8936, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8292523669604962, |
|
"grad_norm": 0.1138639652329467, |
|
"learning_rate": 0.0007269444281396776, |
|
"loss": 0.895, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.8357819131570355, |
|
"grad_norm": 0.09640845595996536, |
|
"learning_rate": 0.0007218474518943077, |
|
"loss": 0.8866, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.8423114593535749, |
|
"grad_norm": 0.09519966958437291, |
|
"learning_rate": 0.0007167216073224135, |
|
"loss": 0.8829, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.8488410055501142, |
|
"grad_norm": 0.15743707352711644, |
|
"learning_rate": 0.0007115675614343822, |
|
"loss": 0.8987, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.8553705517466537, |
|
"grad_norm": 0.11563517600593853, |
|
"learning_rate": 0.0007063859849103515, |
|
"loss": 0.8938, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.861900097943193, |
|
"grad_norm": 0.10859011970541634, |
|
"learning_rate": 0.0007011775520129362, |
|
"loss": 0.8786, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8684296441397323, |
|
"grad_norm": 0.09890703875919929, |
|
"learning_rate": 0.0006959429404994886, |
|
"loss": 0.8926, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.8749591903362717, |
|
"grad_norm": 0.10367006544288325, |
|
"learning_rate": 0.0006906828315339034, |
|
"loss": 0.8844, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.881488736532811, |
|
"grad_norm": 0.11190920037210812, |
|
"learning_rate": 0.0006853979095979804, |
|
"loss": 0.8974, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.8880182827293504, |
|
"grad_norm": 0.08978120705436873, |
|
"learning_rate": 0.0006800888624023553, |
|
"loss": 0.8782, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8945478289258897, |
|
"grad_norm": 0.08687418281599266, |
|
"learning_rate": 0.0006747563807970089, |
|
"loss": 0.889, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.901077375122429, |
|
"grad_norm": 0.12896197858976532, |
|
"learning_rate": 0.0006694011586813706, |
|
"loss": 0.8986, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.9076069213189684, |
|
"grad_norm": 0.14368822339304488, |
|
"learning_rate": 0.0006640238929140214, |
|
"loss": 0.8779, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.9141364675155077, |
|
"grad_norm": 0.13198532471035, |
|
"learning_rate": 0.000658625283222016, |
|
"loss": 0.8909, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.920666013712047, |
|
"grad_norm": 0.09405996339290146, |
|
"learning_rate": 0.0006532060321098274, |
|
"loss": 0.875, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.9271955599085864, |
|
"grad_norm": 0.1289370575480504, |
|
"learning_rate": 0.0006477668447679336, |
|
"loss": 0.8897, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9337251061051257, |
|
"grad_norm": 0.11691715339087744, |
|
"learning_rate": 0.0006423084289810528, |
|
"loss": 0.868, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.940254652301665, |
|
"grad_norm": 0.09817807710054544, |
|
"learning_rate": 0.0006368314950360416, |
|
"loss": 0.8744, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.9467841984982044, |
|
"grad_norm": 0.0951960370980318, |
|
"learning_rate": 0.0006313367556294672, |
|
"loss": 0.879, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.9533137446947437, |
|
"grad_norm": 0.13293486592231005, |
|
"learning_rate": 0.0006258249257748674, |
|
"loss": 0.886, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.9598432908912831, |
|
"grad_norm": 0.1372169544162143, |
|
"learning_rate": 0.0006202967227097073, |
|
"loss": 0.8735, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.9663728370878224, |
|
"grad_norm": 0.11548048964132691, |
|
"learning_rate": 0.0006147528658020468, |
|
"loss": 0.8869, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.9729023832843617, |
|
"grad_norm": 0.10409868760634149, |
|
"learning_rate": 0.000609194076456933, |
|
"loss": 0.8826, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.9794319294809011, |
|
"grad_norm": 0.09056329542575928, |
|
"learning_rate": 0.0006036210780225249, |
|
"loss": 0.8661, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.9859614756774404, |
|
"grad_norm": 0.08918196113614406, |
|
"learning_rate": 0.0005980345956959663, |
|
"loss": 0.8759, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.9924910218739798, |
|
"grad_norm": 0.16146512592490705, |
|
"learning_rate": 0.0005924353564290188, |
|
"loss": 0.86, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9990205680705191, |
|
"grad_norm": 0.0957197145046814, |
|
"learning_rate": 0.0005868240888334653, |
|
"loss": 0.8837, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.9990205680705191, |
|
"eval_loss": 1.170108675956726, |
|
"eval_runtime": 113.2508, |
|
"eval_samples_per_second": 181.129, |
|
"eval_steps_per_second": 5.669, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.0055501142670584, |
|
"grad_norm": 0.16155269430539837, |
|
"learning_rate": 0.0005812015230862978, |
|
"loss": 0.7813, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.0120796604635978, |
|
"grad_norm": 0.13207273059211178, |
|
"learning_rate": 0.0005755683908347025, |
|
"loss": 0.7657, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.018609206660137, |
|
"grad_norm": 0.1169652226379318, |
|
"learning_rate": 0.0005699254251008523, |
|
"loss": 0.7718, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.0251387528566764, |
|
"grad_norm": 0.10689388389180696, |
|
"learning_rate": 0.0005642733601865202, |
|
"loss": 0.7676, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.0316682990532158, |
|
"grad_norm": 0.2491866679539747, |
|
"learning_rate": 0.0005586129315775285, |
|
"loss": 0.7737, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.0381978452497551, |
|
"grad_norm": 0.10641353761323494, |
|
"learning_rate": 0.0005529448758480408, |
|
"loss": 0.7689, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.0447273914462945, |
|
"grad_norm": 0.1249488657268593, |
|
"learning_rate": 0.0005472699305647143, |
|
"loss": 0.775, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.0512569376428338, |
|
"grad_norm": 0.0992659526063643, |
|
"learning_rate": 0.0005415888341907233, |
|
"loss": 0.7655, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.0577864838393731, |
|
"grad_norm": 0.09218770748495513, |
|
"learning_rate": 0.0005359023259896639, |
|
"loss": 0.7673, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.0643160300359125, |
|
"grad_norm": 0.10632383047811839, |
|
"learning_rate": 0.000530211145929357, |
|
"loss": 0.765, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.0708455762324518, |
|
"grad_norm": 0.10048028916097863, |
|
"learning_rate": 0.0005245160345855581, |
|
"loss": 0.7685, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.0773751224289911, |
|
"grad_norm": 0.10494848554578165, |
|
"learning_rate": 0.0005188177330455885, |
|
"loss": 0.7618, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.0839046686255305, |
|
"grad_norm": 0.10144185540045861, |
|
"learning_rate": 0.0005131169828119002, |
|
"loss": 0.7776, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.0904342148220698, |
|
"grad_norm": 0.33506826977406706, |
|
"learning_rate": 0.0005074145257055856, |
|
"loss": 0.765, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.0969637610186092, |
|
"grad_norm": 0.10107983422633167, |
|
"learning_rate": 0.0005017111037698476, |
|
"loss": 0.7682, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.1034933072151485, |
|
"grad_norm": 0.165492200011015, |
|
"learning_rate": 0.0004960074591734393, |
|
"loss": 0.7589, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.1100228534116878, |
|
"grad_norm": 0.10859254613037782, |
|
"learning_rate": 0.0004903043341140879, |
|
"loss": 0.7553, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.1165523996082272, |
|
"grad_norm": 0.10976530669338017, |
|
"learning_rate": 0.00048460247072191494, |
|
"loss": 0.7667, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.1230819458047665, |
|
"grad_norm": 0.10240931856309561, |
|
"learning_rate": 0.00047890261096286484, |
|
"loss": 0.7691, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.1296114920013058, |
|
"grad_norm": 0.11407710614848746, |
|
"learning_rate": 0.00047320549654215595, |
|
"loss": 0.7805, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.1361410381978452, |
|
"grad_norm": 0.12823395390366013, |
|
"learning_rate": 0.0004675118688077634, |
|
"loss": 0.7757, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.1426705843943845, |
|
"grad_norm": 0.1133411025356735, |
|
"learning_rate": 0.00046182246865395134, |
|
"loss": 0.7729, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.1492001305909239, |
|
"grad_norm": 0.10970977982858451, |
|
"learning_rate": 0.00045613803642486153, |
|
"loss": 0.7717, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.1557296767874632, |
|
"grad_norm": 0.12598805256728796, |
|
"learning_rate": 0.00045045931181817453, |
|
"loss": 0.7831, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.1622592229840025, |
|
"grad_norm": 0.10894474888273668, |
|
"learning_rate": 0.0004447870337888563, |
|
"loss": 0.7794, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.1687887691805419, |
|
"grad_norm": 0.17409947006735688, |
|
"learning_rate": 0.00043912194045299906, |
|
"loss": 0.775, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.1753183153770812, |
|
"grad_norm": 0.11069033013658751, |
|
"learning_rate": 0.00043346476899177336, |
|
"loss": 0.7746, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.1818478615736205, |
|
"grad_norm": 0.10942914356960881, |
|
"learning_rate": 0.0004278162555554997, |
|
"loss": 0.7745, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.1883774077701599, |
|
"grad_norm": 0.10149695219018244, |
|
"learning_rate": 0.00042217713516785773, |
|
"loss": 0.7693, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.1949069539666994, |
|
"grad_norm": 0.10011067707357035, |
|
"learning_rate": 0.00041654814163023736, |
|
"loss": 0.7788, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.2014365001632386, |
|
"grad_norm": 0.12458926027929569, |
|
"learning_rate": 0.0004109300074262533, |
|
"loss": 0.7615, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.2079660463597781, |
|
"grad_norm": 0.11149583679617998, |
|
"learning_rate": 0.0004053234636264286, |
|
"loss": 0.7686, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.2144955925563172, |
|
"grad_norm": 0.11582829686992874, |
|
"learning_rate": 0.0003997292397930624, |
|
"loss": 0.7838, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.2210251387528568, |
|
"grad_norm": 0.09717998925098204, |
|
"learning_rate": 0.00039414806388529477, |
|
"loss": 0.7624, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.227554684949396, |
|
"grad_norm": 0.09960479953694133, |
|
"learning_rate": 0.0003885806621643787, |
|
"loss": 0.7613, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.2340842311459355, |
|
"grad_norm": 0.10269926205036363, |
|
"learning_rate": 0.00038302775909917586, |
|
"loss": 0.7737, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.2406137773424746, |
|
"grad_norm": 0.09556773278626478, |
|
"learning_rate": 0.0003774900772718811, |
|
"loss": 0.7706, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.2471433235390141, |
|
"grad_norm": 0.09832265894426077, |
|
"learning_rate": 0.00037196833728399715, |
|
"loss": 0.7576, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.2536728697355533, |
|
"grad_norm": 0.09634029860771899, |
|
"learning_rate": 0.0003664632576625642, |
|
"loss": 0.763, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.2602024159320928, |
|
"grad_norm": 0.09904533493015076, |
|
"learning_rate": 0.00036097555476666, |
|
"loss": 0.7614, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.266731962128632, |
|
"grad_norm": 0.09578657292752235, |
|
"learning_rate": 0.0003555059426941827, |
|
"loss": 0.7754, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.2732615083251715, |
|
"grad_norm": 0.10417538507420639, |
|
"learning_rate": 0.0003500551331889266, |
|
"loss": 0.7642, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.2797910545217106, |
|
"grad_norm": 0.09005748935225892, |
|
"learning_rate": 0.0003446238355479667, |
|
"loss": 0.7674, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.2863206007182502, |
|
"grad_norm": 0.09488920789152981, |
|
"learning_rate": 0.00033921275652935834, |
|
"loss": 0.7709, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.2928501469147895, |
|
"grad_norm": 0.10089668367140452, |
|
"learning_rate": 0.00033382260026017024, |
|
"loss": 0.7574, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.2993796931113288, |
|
"grad_norm": 0.13661540313034193, |
|
"learning_rate": 0.00032845406814485775, |
|
"loss": 0.7629, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.3059092393078682, |
|
"grad_norm": 0.09335511370859859, |
|
"learning_rate": 0.00032310785877399175, |
|
"loss": 0.7646, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.3124387855044075, |
|
"grad_norm": 0.09564382591253417, |
|
"learning_rate": 0.0003177846678333532, |
|
"loss": 0.7508, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.3189683317009468, |
|
"grad_norm": 0.0848065967594756, |
|
"learning_rate": 0.0003124851880134051, |
|
"loss": 0.7659, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.3254978778974862, |
|
"grad_norm": 0.10150191813304216, |
|
"learning_rate": 0.0003072101089191558, |
|
"loss": 0.7619, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.3320274240940255, |
|
"grad_norm": 0.10649402238734328, |
|
"learning_rate": 0.0003019601169804216, |
|
"loss": 0.7558, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.3385569702905649, |
|
"grad_norm": 0.14361029414288792, |
|
"learning_rate": 0.0002967358953625052, |
|
"loss": 0.773, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.3450865164871042, |
|
"grad_norm": 0.09500348974946589, |
|
"learning_rate": 0.000291538123877296, |
|
"loss": 0.7542, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.3516160626836435, |
|
"grad_norm": 0.08520102759972477, |
|
"learning_rate": 0.0002863674788948097, |
|
"loss": 0.7597, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.3581456088801829, |
|
"grad_norm": 0.09217549242113522, |
|
"learning_rate": 0.0002812246332551728, |
|
"loss": 0.7485, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.3646751550767222, |
|
"grad_norm": 0.09907589892443452, |
|
"learning_rate": 0.00027611025618106945, |
|
"loss": 0.7543, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.3712047012732616, |
|
"grad_norm": 0.10292973524129939, |
|
"learning_rate": 0.00027102501319065705, |
|
"loss": 0.7595, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.3777342474698009, |
|
"grad_norm": 0.09834896864003749, |
|
"learning_rate": 0.00026596956601096354, |
|
"loss": 0.7491, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.3842637936663402, |
|
"grad_norm": 0.10834122959348784, |
|
"learning_rate": 0.00026094457249178037, |
|
"loss": 0.7511, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.3907933398628796, |
|
"grad_norm": 0.097823045679363, |
|
"learning_rate": 0.0002559506865200576, |
|
"loss": 0.766, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.397322886059419, |
|
"grad_norm": 0.08613113708448686, |
|
"learning_rate": 0.000250988557934816, |
|
"loss": 0.7538, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.4038524322559582, |
|
"grad_norm": 0.08570664150077177, |
|
"learning_rate": 0.00024605883244258476, |
|
"loss": 0.7585, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.4103819784524976, |
|
"grad_norm": 0.09343658073359985, |
|
"learning_rate": 0.0002411621515333788, |
|
"loss": 0.7589, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.416911524649037, |
|
"grad_norm": 0.09359751502821594, |
|
"learning_rate": 0.00023629915239722305, |
|
"loss": 0.7622, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.4234410708455763, |
|
"grad_norm": 0.09470117008172094, |
|
"learning_rate": 0.00023147046784123649, |
|
"loss": 0.7463, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.4299706170421156, |
|
"grad_norm": 0.086999603846484, |
|
"learning_rate": 0.0002266767262072878, |
|
"loss": 0.7427, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.436500163238655, |
|
"grad_norm": 0.09801106253386438, |
|
"learning_rate": 0.00022191855129023047, |
|
"loss": 0.7514, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.4430297094351943, |
|
"grad_norm": 0.10345836150800067, |
|
"learning_rate": 0.0002171965622567308, |
|
"loss": 0.7592, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.4495592556317336, |
|
"grad_norm": 0.08566757149955255, |
|
"learning_rate": 0.00021251137356469675, |
|
"loss": 0.759, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.456088801828273, |
|
"grad_norm": 0.08810972760972847, |
|
"learning_rate": 0.00020786359488332185, |
|
"loss": 0.7519, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.4626183480248123, |
|
"grad_norm": 0.09049690528433806, |
|
"learning_rate": 0.00020325383101374916, |
|
"loss": 0.756, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.4691478942213516, |
|
"grad_norm": 0.09979520102371152, |
|
"learning_rate": 0.00019868268181037185, |
|
"loss": 0.7599, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.475677440417891, |
|
"grad_norm": 0.09497392755152712, |
|
"learning_rate": 0.00019415074210277523, |
|
"loss": 0.7565, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.4822069866144303, |
|
"grad_norm": 0.08791804220197187, |
|
"learning_rate": 0.00018965860161833338, |
|
"loss": 0.7525, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.4887365328109696, |
|
"grad_norm": 0.08923200561041796, |
|
"learning_rate": 0.00018520684490547012, |
|
"loss": 0.7519, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.495266079007509, |
|
"grad_norm": 0.09839768246232901, |
|
"learning_rate": 0.00018079605125759286, |
|
"loss": 0.7658, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.5017956252040483, |
|
"grad_norm": 0.09924777189237874, |
|
"learning_rate": 0.00017642679463771195, |
|
"loss": 0.7557, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.5083251714005876, |
|
"grad_norm": 0.09245211497787646, |
|
"learning_rate": 0.00017209964360375134, |
|
"loss": 0.74, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.514854717597127, |
|
"grad_norm": 0.08256694904428881, |
|
"learning_rate": 0.0001678151612345653, |
|
"loss": 0.746, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.5213842637936663, |
|
"grad_norm": 0.0844842127140525, |
|
"learning_rate": 0.00016357390505666563, |
|
"loss": 0.7485, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.5279138099902057, |
|
"grad_norm": 0.08960950031974638, |
|
"learning_rate": 0.00015937642697167286, |
|
"loss": 0.7645, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.534443356186745, |
|
"grad_norm": 0.10144239915540011, |
|
"learning_rate": 0.00015522327318449926, |
|
"loss": 0.7503, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.5409729023832843, |
|
"grad_norm": 0.09487394341504404, |
|
"learning_rate": 0.0001511149841322718, |
|
"loss": 0.7527, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.5475024485798237, |
|
"grad_norm": 0.0878841248776266, |
|
"learning_rate": 0.0001470520944140084, |
|
"loss": 0.7511, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.554031994776363, |
|
"grad_norm": 0.08596123619643084, |
|
"learning_rate": 0.00014303513272105056, |
|
"loss": 0.7579, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.5605615409729023, |
|
"grad_norm": 0.0982230534586831, |
|
"learning_rate": 0.00013906462176826756, |
|
"loss": 0.755, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.5670910871694417, |
|
"grad_norm": 0.0885052070828312, |
|
"learning_rate": 0.0001351410782260366, |
|
"loss": 0.7456, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.573620633365981, |
|
"grad_norm": 0.10599778584205663, |
|
"learning_rate": 0.00013126501265301022, |
|
"loss": 0.7458, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.5801501795625204, |
|
"grad_norm": 0.08337466036805537, |
|
"learning_rate": 0.0001274369294296791, |
|
"loss": 0.7416, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.5866797257590597, |
|
"grad_norm": 0.10132703504537871, |
|
"learning_rate": 0.00012365732669273778, |
|
"loss": 0.7522, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.5932092719555992, |
|
"grad_norm": 0.1576080899554536, |
|
"learning_rate": 0.00011992669627026481, |
|
"loss": 0.75, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.5997388181521384, |
|
"grad_norm": 0.09197103603287382, |
|
"learning_rate": 0.00011624552361772167, |
|
"loss": 0.7475, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.606268364348678, |
|
"grad_norm": 0.09561767212758146, |
|
"learning_rate": 0.00011261428775478261, |
|
"loss": 0.7434, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.612797910545217, |
|
"grad_norm": 0.08990886493213987, |
|
"learning_rate": 0.000109033461203001, |
|
"loss": 0.7433, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.6193274567417566, |
|
"grad_norm": 0.0809001999881728, |
|
"learning_rate": 0.00010550350992432178, |
|
"loss": 0.7453, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.6258570029382957, |
|
"grad_norm": 0.08595365372801968, |
|
"learning_rate": 0.00010202489326044662, |
|
"loss": 0.7329, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.6323865491348353, |
|
"grad_norm": 0.09884104400593965, |
|
"learning_rate": 9.859806387306203e-05, |
|
"loss": 0.739, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.6389160953313744, |
|
"grad_norm": 0.08767110201008006, |
|
"learning_rate": 9.522346768493572e-05, |
|
"loss": 0.7585, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.645445641527914, |
|
"grad_norm": 0.1086367599838234, |
|
"learning_rate": 9.19015438218892e-05, |
|
"loss": 0.7491, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.651975187724453, |
|
"grad_norm": 0.09258991632851003, |
|
"learning_rate": 8.86327245556569e-05, |
|
"loss": 0.7596, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.6585047339209926, |
|
"grad_norm": 0.08028243544603576, |
|
"learning_rate": 8.541743524763517e-05, |
|
"loss": 0.7474, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.6650342801175317, |
|
"grad_norm": 0.08506201880375794, |
|
"learning_rate": 8.225609429353187e-05, |
|
"loss": 0.7492, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.6715638263140713, |
|
"grad_norm": 0.08045286371204356, |
|
"learning_rate": 7.914911306892081e-05, |
|
"loss": 0.7411, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.6780933725106104, |
|
"grad_norm": 0.08397787548068493, |
|
"learning_rate": 7.609689587571211e-05, |
|
"loss": 0.7426, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.68462291870715, |
|
"grad_norm": 0.09319893318152288, |
|
"learning_rate": 7.309983988954078e-05, |
|
"loss": 0.7455, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.691152464903689, |
|
"grad_norm": 0.08269483359583629, |
|
"learning_rate": 7.015833510808322e-05, |
|
"loss": 0.7454, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.6976820111002286, |
|
"grad_norm": 0.08855031100883796, |
|
"learning_rate": 6.727276430030888e-05, |
|
"loss": 0.7407, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.7042115572967678, |
|
"grad_norm": 0.0903978979394998, |
|
"learning_rate": 6.444350295667112e-05, |
|
"loss": 0.7403, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.7107411034933073, |
|
"grad_norm": 0.09172599593431895, |
|
"learning_rate": 6.16709192402461e-05, |
|
"loss": 0.7477, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.7172706496898464, |
|
"grad_norm": 0.0965922394270062, |
|
"learning_rate": 5.895537393882422e-05, |
|
"loss": 0.7437, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.723800195886386, |
|
"grad_norm": 0.0833688884936686, |
|
"learning_rate": 5.629722041796293e-05, |
|
"loss": 0.7435, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.7303297420829251, |
|
"grad_norm": 0.09545778086789393, |
|
"learning_rate": 5.369680457500342e-05, |
|
"loss": 0.7473, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.7368592882794647, |
|
"grad_norm": 0.08508391670612106, |
|
"learning_rate": 5.1154464794060606e-05, |
|
"loss": 0.7389, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.7433888344760038, |
|
"grad_norm": 0.08878757992222804, |
|
"learning_rate": 4.867053190199011e-05, |
|
"loss": 0.7437, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.7499183806725433, |
|
"grad_norm": 0.0820568403433627, |
|
"learning_rate": 4.6245329125338794e-05, |
|
"loss": 0.7408, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.7564479268690825, |
|
"grad_norm": 0.0858635548037367, |
|
"learning_rate": 4.387917204828429e-05, |
|
"loss": 0.7429, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.762977473065622, |
|
"grad_norm": 0.08641747714707529, |
|
"learning_rate": 4.15723685715686e-05, |
|
"loss": 0.7479, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.7695070192621611, |
|
"grad_norm": 0.08374191442920427, |
|
"learning_rate": 3.932521887243273e-05, |
|
"loss": 0.7385, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.7760365654587007, |
|
"grad_norm": 0.08066411477553104, |
|
"learning_rate": 3.713801536555483e-05, |
|
"loss": 0.7474, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.7825661116552398, |
|
"grad_norm": 0.08927760837098916, |
|
"learning_rate": 3.501104266499966e-05, |
|
"loss": 0.7422, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.7890956578517794, |
|
"grad_norm": 0.09440557420894752, |
|
"learning_rate": 3.294457754718211e-05, |
|
"loss": 0.7392, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.7956252040483185, |
|
"grad_norm": 0.08250816342761012, |
|
"learning_rate": 3.0938888914851694e-05, |
|
"loss": 0.7504, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.802154750244858, |
|
"grad_norm": 0.08438351494518612, |
|
"learning_rate": 2.8994237762100915e-05, |
|
"loss": 0.7379, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.8086842964413972, |
|
"grad_norm": 0.10056475520684106, |
|
"learning_rate": 2.711087714040239e-05, |
|
"loss": 0.7521, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.8152138426379367, |
|
"grad_norm": 0.08935947201137795, |
|
"learning_rate": 2.5289052125680943e-05, |
|
"loss": 0.7529, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.8217433888344758, |
|
"grad_norm": 0.08922224512386819, |
|
"learning_rate": 2.3528999786421755e-05, |
|
"loss": 0.7371, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.8282729350310154, |
|
"grad_norm": 0.08679697132221147, |
|
"learning_rate": 2.1830949152821968e-05, |
|
"loss": 0.7561, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.8348024812275547, |
|
"grad_norm": 0.08712187848603142, |
|
"learning_rate": 2.0195121186987355e-05, |
|
"loss": 0.7516, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.841332027424094, |
|
"grad_norm": 0.08201952130396374, |
|
"learning_rate": 1.862172875417939e-05, |
|
"loss": 0.7371, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.8478615736206334, |
|
"grad_norm": 0.08809762854634388, |
|
"learning_rate": 1.7110976595115845e-05, |
|
"loss": 0.7359, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.8543911198171728, |
|
"grad_norm": 0.08016826635038367, |
|
"learning_rate": 1.5663061299327985e-05, |
|
"loss": 0.7486, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.860920666013712, |
|
"grad_norm": 0.08363137595664318, |
|
"learning_rate": 1.4278171279579754e-05, |
|
"loss": 0.731, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.8674502122102514, |
|
"grad_norm": 0.07999197884334044, |
|
"learning_rate": 1.2956486747349394e-05, |
|
"loss": 0.7419, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.8739797584067908, |
|
"grad_norm": 0.07990240303203543, |
|
"learning_rate": 1.1698179689379752e-05, |
|
"loss": 0.7364, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.88050930460333, |
|
"grad_norm": 0.0863904909776114, |
|
"learning_rate": 1.0503413845297738e-05, |
|
"loss": 0.7431, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.8870388507998694, |
|
"grad_norm": 0.07905813893661988, |
|
"learning_rate": 9.372344686307655e-06, |
|
"loss": 0.7511, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.8935683969964088, |
|
"grad_norm": 0.07982748763621027, |
|
"learning_rate": 8.305119394960037e-06, |
|
"loss": 0.7394, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.9000979431929481, |
|
"grad_norm": 0.09388991256570764, |
|
"learning_rate": 7.301876845999367e-06, |
|
"loss": 0.7339, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.9066274893894875, |
|
"grad_norm": 0.07888739627906663, |
|
"learning_rate": 6.362747588292583e-06, |
|
"loss": 0.7327, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.9131570355860268, |
|
"grad_norm": 0.08652243743893587, |
|
"learning_rate": 5.487853827841227e-06, |
|
"loss": 0.7469, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.9196865817825661, |
|
"grad_norm": 0.07992175809299334, |
|
"learning_rate": 4.6773094118793265e-06, |
|
"loss": 0.7479, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.9262161279791055, |
|
"grad_norm": 0.0790185170290808, |
|
"learning_rate": 3.931219814058473e-06, |
|
"loss": 0.7335, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.9327456741756448, |
|
"grad_norm": 0.09824195237509104, |
|
"learning_rate": 3.2496821207230743e-06, |
|
"loss": 0.7314, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.9392752203721841, |
|
"grad_norm": 0.07967052039404712, |
|
"learning_rate": 2.6327850182769066e-06, |
|
"loss": 0.742, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.9458047665687235, |
|
"grad_norm": 0.08696261003807573, |
|
"learning_rate": 2.080608781642235e-06, |
|
"loss": 0.7398, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.9523343127652628, |
|
"grad_norm": 0.07725784126402066, |
|
"learning_rate": 1.5932252638143907e-06, |
|
"loss": 0.7381, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.9588638589618022, |
|
"grad_norm": 0.07780821927345753, |
|
"learning_rate": 1.1706978865113072e-06, |
|
"loss": 0.7403, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.9653934051583415, |
|
"grad_norm": 0.08447724747800898, |
|
"learning_rate": 8.130816319209533e-07, |
|
"loss": 0.7376, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.9719229513548808, |
|
"grad_norm": 0.0837974865903866, |
|
"learning_rate": 5.204230355465023e-07, |
|
"loss": 0.7424, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.9784524975514202, |
|
"grad_norm": 0.07922315988637033, |
|
"learning_rate": 2.927601801508972e-07, |
|
"loss": 0.7435, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.9849820437479595, |
|
"grad_norm": 0.07714110097350439, |
|
"learning_rate": 1.3012269080125984e-07, |
|
"loss": 0.7437, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.9915115899444988, |
|
"grad_norm": 0.07663167816335002, |
|
"learning_rate": 3.2531731013862865e-08, |
|
"loss": 0.7359, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.9980411361410382, |
|
"grad_norm": 0.08207935314632366, |
|
"learning_rate": 0.0, |
|
"loss": 0.7468, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.9980411361410382, |
|
"eval_loss": 1.1451562643051147, |
|
"eval_runtime": 112.2389, |
|
"eval_samples_per_second": 182.762, |
|
"eval_steps_per_second": 5.72, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.9980411361410382, |
|
"step": 1530, |
|
"total_flos": 150139935129600.0, |
|
"train_loss": 0.8556482659445869, |
|
"train_runtime": 3466.7868, |
|
"train_samples_per_second": 56.533, |
|
"train_steps_per_second": 0.441 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1530, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 150139935129600.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|