|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.1370319545187217, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008276432857438443, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.0924, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.016552865714876887, |
|
"grad_norm": 17.15061378479004, |
|
"learning_rate": 5e-06, |
|
"loss": 2.0976, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.024829298572315334, |
|
"grad_norm": 5.300135135650635, |
|
"learning_rate": 3e-05, |
|
"loss": 1.4782, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.033105731429753774, |
|
"grad_norm": 2.799682855606079, |
|
"learning_rate": 4.9999994694145566e-05, |
|
"loss": 1.3569, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04138216428719222, |
|
"grad_norm": 2.3753180503845215, |
|
"learning_rate": 4.9999808989476856e-05, |
|
"loss": 1.3787, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04965859714463067, |
|
"grad_norm": 2.270704984664917, |
|
"learning_rate": 4.99993579943386e-05, |
|
"loss": 1.3241, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05793503000206911, |
|
"grad_norm": 3.3539905548095703, |
|
"learning_rate": 4.999864171351664e-05, |
|
"loss": 1.3225, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06621146285950755, |
|
"grad_norm": 3.482184410095215, |
|
"learning_rate": 4.999766015461193e-05, |
|
"loss": 1.3114, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.074487895716946, |
|
"grad_norm": 4.038243770599365, |
|
"learning_rate": 4.999641332804046e-05, |
|
"loss": 1.2537, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08276432857438444, |
|
"grad_norm": 3.1982836723327637, |
|
"learning_rate": 4.999490124703319e-05, |
|
"loss": 1.2443, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09104076143182288, |
|
"grad_norm": 2.335792064666748, |
|
"learning_rate": 4.9993123927635874e-05, |
|
"loss": 1.2643, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09931719428926133, |
|
"grad_norm": 2.2586803436279297, |
|
"learning_rate": 4.99910813887089e-05, |
|
"loss": 1.2241, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10759362714669977, |
|
"grad_norm": 2.543644428253174, |
|
"learning_rate": 4.9988773651927064e-05, |
|
"loss": 1.2263, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11587006000413821, |
|
"grad_norm": 2.1221492290496826, |
|
"learning_rate": 4.998620074177938e-05, |
|
"loss": 1.2634, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12414649286157665, |
|
"grad_norm": 1.6611372232437134, |
|
"learning_rate": 4.998336268556879e-05, |
|
"loss": 1.2401, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1324229257190151, |
|
"grad_norm": 1.7521588802337646, |
|
"learning_rate": 4.998025951341191e-05, |
|
"loss": 1.242, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14069935857645355, |
|
"grad_norm": 1.445685625076294, |
|
"learning_rate": 4.997689125823867e-05, |
|
"loss": 1.2534, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.148975791433892, |
|
"grad_norm": 1.8729900121688843, |
|
"learning_rate": 4.997325795579197e-05, |
|
"loss": 1.2583, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.15725222429133043, |
|
"grad_norm": 1.734971284866333, |
|
"learning_rate": 4.9969359644627334e-05, |
|
"loss": 1.2543, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.16552865714876888, |
|
"grad_norm": 1.8854798078536987, |
|
"learning_rate": 4.9965196366112464e-05, |
|
"loss": 1.2386, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17380509000620734, |
|
"grad_norm": 1.6403270959854126, |
|
"learning_rate": 4.996076816442683e-05, |
|
"loss": 1.2014, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.18208152286364576, |
|
"grad_norm": 1.6998963356018066, |
|
"learning_rate": 4.995607508656118e-05, |
|
"loss": 1.2327, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.19035795572108422, |
|
"grad_norm": 2.0543465614318848, |
|
"learning_rate": 4.995111718231704e-05, |
|
"loss": 1.215, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.19863438857852267, |
|
"grad_norm": 2.155479907989502, |
|
"learning_rate": 4.9945894504306214e-05, |
|
"loss": 1.1843, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2069108214359611, |
|
"grad_norm": 1.2477573156356812, |
|
"learning_rate": 4.9940407107950175e-05, |
|
"loss": 1.171, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.21518725429339955, |
|
"grad_norm": 1.8000760078430176, |
|
"learning_rate": 4.993465505147954e-05, |
|
"loss": 1.2036, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.22346368715083798, |
|
"grad_norm": 1.8096606731414795, |
|
"learning_rate": 4.99286383959334e-05, |
|
"loss": 1.1794, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.23174012000827643, |
|
"grad_norm": 1.412726879119873, |
|
"learning_rate": 4.9922357205158695e-05, |
|
"loss": 1.1787, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.24001655286571488, |
|
"grad_norm": 1.3341314792633057, |
|
"learning_rate": 4.9915811545809557e-05, |
|
"loss": 1.1993, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2482929857231533, |
|
"grad_norm": 2.0190353393554688, |
|
"learning_rate": 4.990900148734653e-05, |
|
"loss": 1.1576, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2565694185805918, |
|
"grad_norm": 1.9342364072799683, |
|
"learning_rate": 4.9901927102035925e-05, |
|
"loss": 1.2081, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2648458514380302, |
|
"grad_norm": 2.2545788288116455, |
|
"learning_rate": 4.989458846494899e-05, |
|
"loss": 1.1935, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.27312228429546864, |
|
"grad_norm": 1.927686095237732, |
|
"learning_rate": 4.988698565396115e-05, |
|
"loss": 1.1508, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2813987171529071, |
|
"grad_norm": 1.3246594667434692, |
|
"learning_rate": 4.987911874975114e-05, |
|
"loss": 1.138, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.28967515001034555, |
|
"grad_norm": 1.9527952671051025, |
|
"learning_rate": 4.9870987835800194e-05, |
|
"loss": 1.2036, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.297951582867784, |
|
"grad_norm": 1.1734226942062378, |
|
"learning_rate": 4.9862592998391125e-05, |
|
"loss": 1.1692, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3062280157252224, |
|
"grad_norm": 2.067718982696533, |
|
"learning_rate": 4.9853934326607434e-05, |
|
"loss": 1.1401, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.31450444858266086, |
|
"grad_norm": 1.2361276149749756, |
|
"learning_rate": 4.984501191233235e-05, |
|
"loss": 1.1868, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3227808814400993, |
|
"grad_norm": 1.379475712776184, |
|
"learning_rate": 4.983582585024784e-05, |
|
"loss": 1.1458, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.33105731429753776, |
|
"grad_norm": 1.45447838306427, |
|
"learning_rate": 4.982637623783366e-05, |
|
"loss": 1.1424, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3393337471549762, |
|
"grad_norm": 1.4039075374603271, |
|
"learning_rate": 4.981666317536624e-05, |
|
"loss": 1.1405, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.34761018001241467, |
|
"grad_norm": 1.7842013835906982, |
|
"learning_rate": 4.980668676591769e-05, |
|
"loss": 1.1444, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.35588661286985307, |
|
"grad_norm": 1.3413935899734497, |
|
"learning_rate": 4.9796447115354685e-05, |
|
"loss": 1.1256, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.3641630457272915, |
|
"grad_norm": 1.3572813272476196, |
|
"learning_rate": 4.97859443323373e-05, |
|
"loss": 1.152, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.37243947858473, |
|
"grad_norm": 1.4994474649429321, |
|
"learning_rate": 4.977517852831794e-05, |
|
"loss": 1.1264, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.38071591144216843, |
|
"grad_norm": 1.2120469808578491, |
|
"learning_rate": 4.9764149817540074e-05, |
|
"loss": 1.1035, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.3889923442996069, |
|
"grad_norm": 1.2979780435562134, |
|
"learning_rate": 4.975285831703707e-05, |
|
"loss": 1.1299, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.39726877715704534, |
|
"grad_norm": 1.1150094270706177, |
|
"learning_rate": 4.974130414663094e-05, |
|
"loss": 1.1232, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.40554521001448374, |
|
"grad_norm": 1.1811398267745972, |
|
"learning_rate": 4.9729487428931084e-05, |
|
"loss": 1.1106, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4138216428719222, |
|
"grad_norm": 1.285284399986267, |
|
"learning_rate": 4.971740828933295e-05, |
|
"loss": 1.1226, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4138216428719222, |
|
"eval_loss": 1.124096393585205, |
|
"eval_runtime": 494.2301, |
|
"eval_samples_per_second": 39.109, |
|
"eval_steps_per_second": 6.519, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.42209807572936064, |
|
"grad_norm": 1.6036468744277954, |
|
"learning_rate": 4.970631279885502e-05, |
|
"loss": 1.1088, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.4303745085867991, |
|
"grad_norm": 1.0415977239608765, |
|
"learning_rate": 4.969373541308714e-05, |
|
"loss": 1.1154, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.43865094144423755, |
|
"grad_norm": 1.207888126373291, |
|
"learning_rate": 4.9680895984810636e-05, |
|
"loss": 1.1014, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.44692737430167595, |
|
"grad_norm": 1.2686097621917725, |
|
"learning_rate": 4.966779465027368e-05, |
|
"loss": 1.1097, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4552038071591144, |
|
"grad_norm": 1.795076847076416, |
|
"learning_rate": 4.965443154850371e-05, |
|
"loss": 1.091, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.46348024001655286, |
|
"grad_norm": 1.7811661958694458, |
|
"learning_rate": 4.964080682130594e-05, |
|
"loss": 1.0852, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.4717566728739913, |
|
"grad_norm": 1.7507492303848267, |
|
"learning_rate": 4.962692061326189e-05, |
|
"loss": 1.0961, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.48003310573142977, |
|
"grad_norm": 1.575803279876709, |
|
"learning_rate": 4.961277307172783e-05, |
|
"loss": 1.1022, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.4883095385888682, |
|
"grad_norm": 1.2174866199493408, |
|
"learning_rate": 4.959836434683322e-05, |
|
"loss": 1.0551, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.4965859714463066, |
|
"grad_norm": 1.272923231124878, |
|
"learning_rate": 4.9583694591479144e-05, |
|
"loss": 1.113, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5048624043037451, |
|
"grad_norm": 1.100860357284546, |
|
"learning_rate": 4.9568763961336626e-05, |
|
"loss": 1.0731, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5131388371611836, |
|
"grad_norm": 1.1055638790130615, |
|
"learning_rate": 4.955357261484503e-05, |
|
"loss": 1.0928, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.521415270018622, |
|
"grad_norm": 1.1676515340805054, |
|
"learning_rate": 4.953812071321037e-05, |
|
"loss": 1.1089, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5296917028760604, |
|
"grad_norm": 1.578491449356079, |
|
"learning_rate": 4.952240842040359e-05, |
|
"loss": 1.0972, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5379681357334989, |
|
"grad_norm": 1.2124993801116943, |
|
"learning_rate": 4.950643590315882e-05, |
|
"loss": 1.0518, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5462445685909373, |
|
"grad_norm": 1.1784847974777222, |
|
"learning_rate": 4.949020333097163e-05, |
|
"loss": 1.0882, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5545210014483758, |
|
"grad_norm": 1.2548474073410034, |
|
"learning_rate": 4.9473710876097197e-05, |
|
"loss": 1.0578, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5627974343058142, |
|
"grad_norm": 1.05464506149292, |
|
"learning_rate": 4.945695871354849e-05, |
|
"loss": 1.0833, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5710738671632526, |
|
"grad_norm": 1.4869188070297241, |
|
"learning_rate": 4.943994702109446e-05, |
|
"loss": 1.0485, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5793503000206911, |
|
"grad_norm": 1.298762559890747, |
|
"learning_rate": 4.942267597925805e-05, |
|
"loss": 1.0704, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.5876267328781295, |
|
"grad_norm": 1.2249423265457153, |
|
"learning_rate": 4.940514577131439e-05, |
|
"loss": 1.0614, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.595903165735568, |
|
"grad_norm": 1.1190897226333618, |
|
"learning_rate": 4.938735658328878e-05, |
|
"loss": 1.0466, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6041795985930064, |
|
"grad_norm": 1.0651410818099976, |
|
"learning_rate": 4.936930860395474e-05, |
|
"loss": 1.0352, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6124560314504448, |
|
"grad_norm": 1.2162879705429077, |
|
"learning_rate": 4.9351002024832026e-05, |
|
"loss": 1.0527, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6207324643078833, |
|
"grad_norm": 1.3244678974151611, |
|
"learning_rate": 4.933243704018454e-05, |
|
"loss": 1.0415, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6290088971653217, |
|
"grad_norm": 1.3377256393432617, |
|
"learning_rate": 4.931361384701835e-05, |
|
"loss": 1.0753, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6372853300227602, |
|
"grad_norm": 1.3565047979354858, |
|
"learning_rate": 4.9294532645079504e-05, |
|
"loss": 1.064, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6455617628801986, |
|
"grad_norm": 1.2356196641921997, |
|
"learning_rate": 4.9275193636852014e-05, |
|
"loss": 1.0539, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.6538381957376371, |
|
"grad_norm": 1.0150561332702637, |
|
"learning_rate": 4.9255597027555617e-05, |
|
"loss": 1.0389, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6621146285950755, |
|
"grad_norm": 1.0787653923034668, |
|
"learning_rate": 4.9235743025143656e-05, |
|
"loss": 1.0415, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6703910614525139, |
|
"grad_norm": 1.1886955499649048, |
|
"learning_rate": 4.921563184030084e-05, |
|
"loss": 1.027, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.6786674943099524, |
|
"grad_norm": 1.1936701536178589, |
|
"learning_rate": 4.919526368644103e-05, |
|
"loss": 1.022, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.6869439271673908, |
|
"grad_norm": 1.2435652017593384, |
|
"learning_rate": 4.917463877970496e-05, |
|
"loss": 1.0269, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.6952203600248293, |
|
"grad_norm": 1.1247020959854126, |
|
"learning_rate": 4.915375733895796e-05, |
|
"loss": 1.0319, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7034967928822677, |
|
"grad_norm": 1.8782734870910645, |
|
"learning_rate": 4.91326195857876e-05, |
|
"loss": 1.0327, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7117732257397061, |
|
"grad_norm": 1.1094200611114502, |
|
"learning_rate": 4.911122574450137e-05, |
|
"loss": 1.0139, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7200496585971446, |
|
"grad_norm": 1.0171282291412354, |
|
"learning_rate": 4.90895760421243e-05, |
|
"loss": 1.0061, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.728326091454583, |
|
"grad_norm": 1.22124445438385, |
|
"learning_rate": 4.906767070839653e-05, |
|
"loss": 1.0087, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7366025243120216, |
|
"grad_norm": 1.43118155002594, |
|
"learning_rate": 4.90455099757709e-05, |
|
"loss": 1.0265, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.74487895716946, |
|
"grad_norm": 1.6183593273162842, |
|
"learning_rate": 4.902309407941043e-05, |
|
"loss": 0.998, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7531553900268984, |
|
"grad_norm": 1.1645337343215942, |
|
"learning_rate": 4.9000423257185904e-05, |
|
"loss": 1.0293, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.7614318228843369, |
|
"grad_norm": 1.1429606676101685, |
|
"learning_rate": 4.897749774967326e-05, |
|
"loss": 1.0308, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.7697082557417753, |
|
"grad_norm": 1.0337284803390503, |
|
"learning_rate": 4.895431780015114e-05, |
|
"loss": 1.0067, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7779846885992138, |
|
"grad_norm": 1.1541541814804077, |
|
"learning_rate": 4.893088365459817e-05, |
|
"loss": 1.0004, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.7862611214566522, |
|
"grad_norm": 1.2892276048660278, |
|
"learning_rate": 4.8907195561690496e-05, |
|
"loss": 0.9858, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.7945375543140907, |
|
"grad_norm": 1.098703145980835, |
|
"learning_rate": 4.8883253772799046e-05, |
|
"loss": 0.9817, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8028139871715291, |
|
"grad_norm": 1.1893219947814941, |
|
"learning_rate": 4.885905854198688e-05, |
|
"loss": 0.983, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.8110904200289675, |
|
"grad_norm": 1.0653300285339355, |
|
"learning_rate": 4.8834610126006555e-05, |
|
"loss": 1.0175, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.819366852886406, |
|
"grad_norm": 1.2211158275604248, |
|
"learning_rate": 4.88099087842973e-05, |
|
"loss": 0.9644, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.8276432857438444, |
|
"grad_norm": 1.2194496393203735, |
|
"learning_rate": 4.878495477898235e-05, |
|
"loss": 0.9766, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8276432857438444, |
|
"eval_loss": 1.0402740240097046, |
|
"eval_runtime": 580.0141, |
|
"eval_samples_per_second": 33.325, |
|
"eval_steps_per_second": 5.555, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.7920015683199373, |
|
"grad_norm": 1.1055457592010498, |
|
"learning_rate": 4.888596070759588e-05, |
|
"loss": 0.7878, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.7998431680062733, |
|
"grad_norm": 1.0589972734451294, |
|
"learning_rate": 4.8863072734908147e-05, |
|
"loss": 0.774, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.8076847676926093, |
|
"grad_norm": 0.9427498579025269, |
|
"learning_rate": 4.8839957498448887e-05, |
|
"loss": 0.7741, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.8155263673789453, |
|
"grad_norm": 1.094714879989624, |
|
"learning_rate": 4.881661521835972e-05, |
|
"loss": 0.7552, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.8233679670652814, |
|
"grad_norm": 1.029213786125183, |
|
"learning_rate": 4.879304611694457e-05, |
|
"loss": 0.7632, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.8312095667516173, |
|
"grad_norm": 1.1656430959701538, |
|
"learning_rate": 4.876925041866754e-05, |
|
"loss": 0.7533, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.8390511664379533, |
|
"grad_norm": 1.0112115144729614, |
|
"learning_rate": 4.8745228350150734e-05, |
|
"loss": 0.7536, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.8468927661242893, |
|
"grad_norm": 1.1366935968399048, |
|
"learning_rate": 4.8720980140172144e-05, |
|
"loss": 0.7892, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.8547343658106253, |
|
"grad_norm": 1.4567644596099854, |
|
"learning_rate": 4.869650601966348e-05, |
|
"loss": 0.7585, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.8625759654969614, |
|
"grad_norm": 1.1882156133651733, |
|
"learning_rate": 4.8671806221707886e-05, |
|
"loss": 0.7565, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.8704175651832974, |
|
"grad_norm": 1.055330753326416, |
|
"learning_rate": 4.864688098153786e-05, |
|
"loss": 0.775, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.8782591648696334, |
|
"grad_norm": 1.073933720588684, |
|
"learning_rate": 4.862173053653286e-05, |
|
"loss": 0.7524, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.8861007645559694, |
|
"grad_norm": 1.1609364748001099, |
|
"learning_rate": 4.859635512621718e-05, |
|
"loss": 0.7492, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.8939423642423054, |
|
"grad_norm": 0.9947859048843384, |
|
"learning_rate": 4.857075499225756e-05, |
|
"loss": 0.7526, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.9017839639286415, |
|
"grad_norm": 6.8848652839660645, |
|
"learning_rate": 4.8544930378460954e-05, |
|
"loss": 0.7702, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.9096255636149775, |
|
"grad_norm": 1.043473243713379, |
|
"learning_rate": 4.851888153077219e-05, |
|
"loss": 0.7756, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.9174671633013135, |
|
"grad_norm": 1.2918435335159302, |
|
"learning_rate": 4.849260869727161e-05, |
|
"loss": 0.7663, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.9253087629876495, |
|
"grad_norm": 1.2701385021209717, |
|
"learning_rate": 4.846611212817273e-05, |
|
"loss": 0.7656, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.9331503626739855, |
|
"grad_norm": 1.0570497512817383, |
|
"learning_rate": 4.843939207581983e-05, |
|
"loss": 0.757, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.9409919623603215, |
|
"grad_norm": 1.145662784576416, |
|
"learning_rate": 4.841244879468561e-05, |
|
"loss": 0.7464, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.9488335620466575, |
|
"grad_norm": 1.1173900365829468, |
|
"learning_rate": 4.838528254136866e-05, |
|
"loss": 0.7375, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.9566751617329935, |
|
"grad_norm": 1.2657852172851562, |
|
"learning_rate": 4.835789357459114e-05, |
|
"loss": 0.7682, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.9645167614193295, |
|
"grad_norm": 1.141322135925293, |
|
"learning_rate": 4.833028215519623e-05, |
|
"loss": 0.7425, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.9723583611056655, |
|
"grad_norm": 1.0891972780227661, |
|
"learning_rate": 4.830244854614568e-05, |
|
"loss": 0.7735, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.9801999607920016, |
|
"grad_norm": 1.1772578954696655, |
|
"learning_rate": 4.827439301251729e-05, |
|
"loss": 0.7591, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.9880415604783376, |
|
"grad_norm": 1.1350594758987427, |
|
"learning_rate": 4.824611582150241e-05, |
|
"loss": 0.7522, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.9958831601646736, |
|
"grad_norm": 1.03608238697052, |
|
"learning_rate": 4.821761724240336e-05, |
|
"loss": 0.7608, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.0037247598510095, |
|
"grad_norm": 1.1283738613128662, |
|
"learning_rate": 4.81888975466309e-05, |
|
"loss": 0.7299, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.0115663595373456, |
|
"grad_norm": 1.297022819519043, |
|
"learning_rate": 4.815995700770162e-05, |
|
"loss": 0.6897, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.0194079592236815, |
|
"grad_norm": 1.198197603225708, |
|
"learning_rate": 4.813079590123535e-05, |
|
"loss": 0.7174, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.0272495589100177, |
|
"grad_norm": 1.2182512283325195, |
|
"learning_rate": 4.810141450495254e-05, |
|
"loss": 0.6865, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.0350911585963536, |
|
"grad_norm": 1.0651652812957764, |
|
"learning_rate": 4.807181309867157e-05, |
|
"loss": 0.6786, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.0429327582826897, |
|
"grad_norm": 1.1763614416122437, |
|
"learning_rate": 4.8041991964306164e-05, |
|
"loss": 0.6762, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.0507743579690256, |
|
"grad_norm": 1.0765550136566162, |
|
"learning_rate": 4.801195138586263e-05, |
|
"loss": 0.6623, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.0586159576553618, |
|
"grad_norm": 1.19382905960083, |
|
"learning_rate": 4.7981691649437214e-05, |
|
"loss": 0.6796, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.0664575573416977, |
|
"grad_norm": 1.323279619216919, |
|
"learning_rate": 4.795121304321332e-05, |
|
"loss": 0.6904, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.0742991570280338, |
|
"grad_norm": 1.0708096027374268, |
|
"learning_rate": 4.7920515857458816e-05, |
|
"loss": 0.6986, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.0821407567143697, |
|
"grad_norm": 0.9783353209495544, |
|
"learning_rate": 4.788960038452322e-05, |
|
"loss": 0.6743, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.0899823564007058, |
|
"grad_norm": 1.1598165035247803, |
|
"learning_rate": 4.785846691883498e-05, |
|
"loss": 0.6881, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.0978239560870418, |
|
"grad_norm": 0.9942623376846313, |
|
"learning_rate": 4.782711575689859e-05, |
|
"loss": 0.6658, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.1056655557733777, |
|
"grad_norm": 1.0791629552841187, |
|
"learning_rate": 4.7795547197291844e-05, |
|
"loss": 0.6896, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.1135071554597138, |
|
"grad_norm": 1.3438973426818848, |
|
"learning_rate": 4.7763761540662945e-05, |
|
"loss": 0.6592, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.1213487551460497, |
|
"grad_norm": 1.0666946172714233, |
|
"learning_rate": 4.773175908972765e-05, |
|
"loss": 0.6656, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.1291903548323858, |
|
"grad_norm": 0.9979410767555237, |
|
"learning_rate": 4.76995401492664e-05, |
|
"loss": 0.6668, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.1370319545187217, |
|
"grad_norm": 1.0160318613052368, |
|
"learning_rate": 4.766710502612138e-05, |
|
"loss": 0.6719, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.1448735542050579, |
|
"grad_norm": 1.1360750198364258, |
|
"learning_rate": 4.763445402919369e-05, |
|
"loss": 0.6651, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.1527151538913938, |
|
"grad_norm": 1.293106198310852, |
|
"learning_rate": 4.7601587469440255e-05, |
|
"loss": 0.6736, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.16055675357773, |
|
"grad_norm": 1.0604827404022217, |
|
"learning_rate": 4.756850565987101e-05, |
|
"loss": 0.6627, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.1683983532640658, |
|
"grad_norm": 1.276926875114441, |
|
"learning_rate": 4.753520891554584e-05, |
|
"loss": 0.6714, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.176239952950402, |
|
"grad_norm": 0.9686921238899231, |
|
"learning_rate": 4.7501697553571564e-05, |
|
"loss": 0.6781, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.176239952950402, |
|
"eval_loss": 1.0382379293441772, |
|
"eval_runtime": 250.4218, |
|
"eval_samples_per_second": 38.595, |
|
"eval_steps_per_second": 6.433, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.1840815526367379, |
|
"grad_norm": 1.0833640098571777, |
|
"learning_rate": 4.7467971893099e-05, |
|
"loss": 0.6692, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.191923152323074, |
|
"grad_norm": 1.011971116065979, |
|
"learning_rate": 4.743403225531983e-05, |
|
"loss": 0.6576, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.19976475200941, |
|
"grad_norm": 1.0864920616149902, |
|
"learning_rate": 4.740330389782858e-05, |
|
"loss": 0.6742, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.207606351695746, |
|
"grad_norm": 1.0627334117889404, |
|
"learning_rate": 4.736895859534605e-05, |
|
"loss": 0.6878, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.215447951382082, |
|
"grad_norm": 1.0697227716445923, |
|
"learning_rate": 4.7334400258525804e-05, |
|
"loss": 0.6952, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.2232895510684179, |
|
"grad_norm": 1.106345534324646, |
|
"learning_rate": 4.7299629216489685e-05, |
|
"loss": 0.6788, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.231131150754754, |
|
"grad_norm": 1.0687874555587769, |
|
"learning_rate": 4.7264645800385235e-05, |
|
"loss": 0.6828, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.23897275044109, |
|
"grad_norm": 1.0802325010299683, |
|
"learning_rate": 4.7229450343382585e-05, |
|
"loss": 0.6767, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.246814350127426, |
|
"grad_norm": 1.0141079425811768, |
|
"learning_rate": 4.7194043180671274e-05, |
|
"loss": 0.6676, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.254655949813762, |
|
"grad_norm": 1.02689790725708, |
|
"learning_rate": 4.715842464945706e-05, |
|
"loss": 0.6627, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.262497549500098, |
|
"grad_norm": 0.9644928574562073, |
|
"learning_rate": 4.712259508895868e-05, |
|
"loss": 0.6752, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.270339149186434, |
|
"grad_norm": 1.024220585823059, |
|
"learning_rate": 4.7086554840404676e-05, |
|
"loss": 0.6773, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.2781807488727701, |
|
"grad_norm": 1.1297568082809448, |
|
"learning_rate": 4.705030424703006e-05, |
|
"loss": 0.6787, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.286022348559106, |
|
"grad_norm": 1.1851943731307983, |
|
"learning_rate": 4.701384365407316e-05, |
|
"loss": 0.6784, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.2938639482454422, |
|
"grad_norm": 0.9990951418876648, |
|
"learning_rate": 4.6977173408772226e-05, |
|
"loss": 0.6575, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.301705547931778, |
|
"grad_norm": 0.9888284206390381, |
|
"learning_rate": 4.694029386036216e-05, |
|
"loss": 0.6774, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.309547147618114, |
|
"grad_norm": 1.090669870376587, |
|
"learning_rate": 4.6903205360071235e-05, |
|
"loss": 0.6822, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.31738874730445, |
|
"grad_norm": 1.0397796630859375, |
|
"learning_rate": 4.686590826111768e-05, |
|
"loss": 0.6787, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.3252303469907862, |
|
"grad_norm": 1.1437417268753052, |
|
"learning_rate": 4.682840291870635e-05, |
|
"loss": 0.6845, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.3330719466771221, |
|
"grad_norm": 1.1360801458358765, |
|
"learning_rate": 4.679068969002534e-05, |
|
"loss": 0.6715, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.340913546363458, |
|
"grad_norm": 0.9892112612724304, |
|
"learning_rate": 4.675276893424259e-05, |
|
"loss": 0.6758, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.3487551460497942, |
|
"grad_norm": 1.129503607749939, |
|
"learning_rate": 4.671464101250246e-05, |
|
"loss": 0.6822, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.35659674573613, |
|
"grad_norm": 1.2884234189987183, |
|
"learning_rate": 4.667630628792227e-05, |
|
"loss": 0.6657, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.3644383454224662, |
|
"grad_norm": 1.1253407001495361, |
|
"learning_rate": 4.663776512558887e-05, |
|
"loss": 0.6644, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.3722799451088021, |
|
"grad_norm": 1.035405158996582, |
|
"learning_rate": 4.659901789255517e-05, |
|
"loss": 0.692, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.3801215447951383, |
|
"grad_norm": 0.9973240494728088, |
|
"learning_rate": 4.656006495783661e-05, |
|
"loss": 0.6493, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.3879631444814742, |
|
"grad_norm": 1.12758469581604, |
|
"learning_rate": 4.6520906692407645e-05, |
|
"loss": 0.6827, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.3958047441678103, |
|
"grad_norm": 0.9674776196479797, |
|
"learning_rate": 4.6481543469198265e-05, |
|
"loss": 0.6778, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.4036463438541462, |
|
"grad_norm": 0.9787268042564392, |
|
"learning_rate": 4.644197566309039e-05, |
|
"loss": 0.6653, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.4114879435404823, |
|
"grad_norm": 1.0090129375457764, |
|
"learning_rate": 4.640220365091431e-05, |
|
"loss": 0.6685, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.4193295432268183, |
|
"grad_norm": 1.0591384172439575, |
|
"learning_rate": 4.6362227811445124e-05, |
|
"loss": 0.6795, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.4271711429131542, |
|
"grad_norm": 1.0188828706741333, |
|
"learning_rate": 4.63220485253991e-05, |
|
"loss": 0.652, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.4350127425994903, |
|
"grad_norm": 0.908219039440155, |
|
"learning_rate": 4.628166617543005e-05, |
|
"loss": 0.6611, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.4428543422858264, |
|
"grad_norm": 1.6317641735076904, |
|
"learning_rate": 4.6241081146125735e-05, |
|
"loss": 0.6893, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.4506959419721623, |
|
"grad_norm": 1.042119026184082, |
|
"learning_rate": 4.620029382400414e-05, |
|
"loss": 0.6805, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.4585375416584982, |
|
"grad_norm": 1.0308270454406738, |
|
"learning_rate": 4.615930459750979e-05, |
|
"loss": 0.6641, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.4663791413448344, |
|
"grad_norm": 1.0121833086013794, |
|
"learning_rate": 4.611811385701014e-05, |
|
"loss": 0.6506, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.4742207410311703, |
|
"grad_norm": 0.9907941222190857, |
|
"learning_rate": 4.607672199479175e-05, |
|
"loss": 0.6742, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.4820623407175064, |
|
"grad_norm": 1.0068496465682983, |
|
"learning_rate": 4.603512940505661e-05, |
|
"loss": 0.6496, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.4899039404038423, |
|
"grad_norm": 0.9669331908226013, |
|
"learning_rate": 4.599333648391837e-05, |
|
"loss": 0.6683, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.4977455400901785, |
|
"grad_norm": 0.9877241849899292, |
|
"learning_rate": 4.595134362939856e-05, |
|
"loss": 0.6746, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.5055871397765144, |
|
"grad_norm": 0.9874888062477112, |
|
"learning_rate": 4.590915124142282e-05, |
|
"loss": 0.6741, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.5134287394628503, |
|
"grad_norm": 0.9484206438064575, |
|
"learning_rate": 4.586675972181708e-05, |
|
"loss": 0.6678, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.5212703391491864, |
|
"grad_norm": 0.9802061915397644, |
|
"learning_rate": 4.582416947430371e-05, |
|
"loss": 0.6304, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.5291119388355225, |
|
"grad_norm": 0.9652764797210693, |
|
"learning_rate": 4.578138090449772e-05, |
|
"loss": 0.6663, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.5369535385218585, |
|
"grad_norm": 1.1324354410171509, |
|
"learning_rate": 4.573839441990286e-05, |
|
"loss": 0.6613, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.5447951382081944, |
|
"grad_norm": 0.989467978477478, |
|
"learning_rate": 4.5695210429907774e-05, |
|
"loss": 0.6856, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.5526367378945305, |
|
"grad_norm": 0.9154842495918274, |
|
"learning_rate": 4.5651829345782046e-05, |
|
"loss": 0.6706, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.5604783375808666, |
|
"grad_norm": 0.9855336546897888, |
|
"learning_rate": 4.560825158067233e-05, |
|
"loss": 0.6608, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.5683199372672025, |
|
"grad_norm": 0.9735700488090515, |
|
"learning_rate": 4.556447754959844e-05, |
|
"loss": 0.6524, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.5683199372672025, |
|
"eval_loss": 1.007663607597351, |
|
"eval_runtime": 244.3994, |
|
"eval_samples_per_second": 39.546, |
|
"eval_steps_per_second": 6.592, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.5761615369535384, |
|
"grad_norm": 0.8914473056793213, |
|
"learning_rate": 4.5520507669449296e-05, |
|
"loss": 0.6428, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.5840031366398746, |
|
"grad_norm": 1.0634649991989136, |
|
"learning_rate": 4.547634235897906e-05, |
|
"loss": 0.6756, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.5918447363262107, |
|
"grad_norm": 0.9682461619377136, |
|
"learning_rate": 4.5431982038803113e-05, |
|
"loss": 0.6728, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.5996863360125464, |
|
"grad_norm": 0.9319469928741455, |
|
"learning_rate": 4.5387427131394006e-05, |
|
"loss": 0.6549, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.6075279356988825, |
|
"grad_norm": 1.000883936882019, |
|
"learning_rate": 4.5342678061077495e-05, |
|
"loss": 0.6605, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.6153695353852187, |
|
"grad_norm": 0.9240787029266357, |
|
"learning_rate": 4.5297735254028475e-05, |
|
"loss": 0.6627, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.6232111350715546, |
|
"grad_norm": 0.935151994228363, |
|
"learning_rate": 4.5252599138266926e-05, |
|
"loss": 0.6577, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.6310527347578905, |
|
"grad_norm": 0.8398969769477844, |
|
"learning_rate": 4.520727014365383e-05, |
|
"loss": 0.6807, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.6388943344442266, |
|
"grad_norm": 1.0277074575424194, |
|
"learning_rate": 4.516174870188707e-05, |
|
"loss": 0.6708, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.6467359341305627, |
|
"grad_norm": 1.0168735980987549, |
|
"learning_rate": 4.511603524649737e-05, |
|
"loss": 0.6306, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.6545775338168986, |
|
"grad_norm": 0.9712296724319458, |
|
"learning_rate": 4.5070130212844075e-05, |
|
"loss": 0.6664, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.6624191335032346, |
|
"grad_norm": 0.8896380662918091, |
|
"learning_rate": 4.502403403811109e-05, |
|
"loss": 0.6614, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.6702607331895707, |
|
"grad_norm": 0.876569926738739, |
|
"learning_rate": 4.497774716130266e-05, |
|
"loss": 0.644, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.6781023328759068, |
|
"grad_norm": 0.8911204934120178, |
|
"learning_rate": 4.493127002323925e-05, |
|
"loss": 0.6564, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.6859439325622427, |
|
"grad_norm": 1.0690345764160156, |
|
"learning_rate": 4.488460306655325e-05, |
|
"loss": 0.655, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.6937855322485786, |
|
"grad_norm": 0.9396145343780518, |
|
"learning_rate": 4.4837746735684874e-05, |
|
"loss": 0.6873, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.7016271319349148, |
|
"grad_norm": 0.9179625511169434, |
|
"learning_rate": 4.479070147687783e-05, |
|
"loss": 0.6711, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.7094687316212507, |
|
"grad_norm": 0.8691576719284058, |
|
"learning_rate": 4.4743467738175145e-05, |
|
"loss": 0.6568, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.7173103313075866, |
|
"grad_norm": 0.8838081955909729, |
|
"learning_rate": 4.469604596941483e-05, |
|
"loss": 0.6531, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.7251519309939227, |
|
"grad_norm": 1.0800485610961914, |
|
"learning_rate": 4.4648436622225676e-05, |
|
"loss": 0.6915, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.7329935306802589, |
|
"grad_norm": 1.1134532690048218, |
|
"learning_rate": 4.460064015002284e-05, |
|
"loss": 0.6667, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.7408351303665948, |
|
"grad_norm": 0.8759159445762634, |
|
"learning_rate": 4.4552657008003676e-05, |
|
"loss": 0.6578, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.7486767300529307, |
|
"grad_norm": 0.9356112480163574, |
|
"learning_rate": 4.4504487653143235e-05, |
|
"loss": 0.6706, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.7565183297392668, |
|
"grad_norm": 1.0078978538513184, |
|
"learning_rate": 4.445613254419006e-05, |
|
"loss": 0.6486, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.764359929425603, |
|
"grad_norm": 0.984367311000824, |
|
"learning_rate": 4.440759214166172e-05, |
|
"loss": 0.6643, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.7722015291119388, |
|
"grad_norm": 1.0800858736038208, |
|
"learning_rate": 4.435886690784047e-05, |
|
"loss": 0.6621, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.7800431287982748, |
|
"grad_norm": 0.9246965050697327, |
|
"learning_rate": 4.430995730676882e-05, |
|
"loss": 0.6671, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.7878847284846109, |
|
"grad_norm": 1.0341116189956665, |
|
"learning_rate": 4.4260863804245146e-05, |
|
"loss": 0.6736, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.795726328170947, |
|
"grad_norm": 0.9403420686721802, |
|
"learning_rate": 4.421652280262614e-05, |
|
"loss": 0.6223, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.803567927857283, |
|
"grad_norm": 0.9806979894638062, |
|
"learning_rate": 4.4167081176887246e-05, |
|
"loss": 0.6622, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.8114095275436188, |
|
"grad_norm": 1.016072392463684, |
|
"learning_rate": 4.4117457010399885e-05, |
|
"loss": 0.6526, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.819251127229955, |
|
"grad_norm": 0.9248052835464478, |
|
"learning_rate": 4.4067650775767717e-05, |
|
"loss": 0.6459, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.8270927269162909, |
|
"grad_norm": 0.9529238343238831, |
|
"learning_rate": 4.401766294732834e-05, |
|
"loss": 0.6584, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.8349343266026268, |
|
"grad_norm": 0.9078419208526611, |
|
"learning_rate": 4.396749400114879e-05, |
|
"loss": 0.6832, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.842775926288963, |
|
"grad_norm": 0.9941418766975403, |
|
"learning_rate": 4.391714441502103e-05, |
|
"loss": 0.6664, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.850617525975299, |
|
"grad_norm": 0.944197952747345, |
|
"learning_rate": 4.387167573663176e-05, |
|
"loss": 0.6578, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.858459125661635, |
|
"grad_norm": 0.9415684938430786, |
|
"learning_rate": 4.382098425707695e-05, |
|
"loss": 0.6409, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.8663007253479709, |
|
"grad_norm": 0.9796119928359985, |
|
"learning_rate": 4.377011353288277e-05, |
|
"loss": 0.6593, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.874142325034307, |
|
"grad_norm": 0.9953365325927734, |
|
"learning_rate": 4.3719064048524696e-05, |
|
"loss": 0.6624, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.8819839247206431, |
|
"grad_norm": 0.8975547552108765, |
|
"learning_rate": 4.3667836290180586e-05, |
|
"loss": 0.6541, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.889825524406979, |
|
"grad_norm": 1.0561144351959229, |
|
"learning_rate": 4.361643074572617e-05, |
|
"loss": 0.6307, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.897667124093315, |
|
"grad_norm": 0.9564677476882935, |
|
"learning_rate": 4.356484790473032e-05, |
|
"loss": 0.6293, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.905508723779651, |
|
"grad_norm": 10.329718589782715, |
|
"learning_rate": 4.351308825845044e-05, |
|
"loss": 0.6628, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.9133503234659872, |
|
"grad_norm": 0.9198611378669739, |
|
"learning_rate": 4.3461152299827754e-05, |
|
"loss": 0.6617, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.9211919231523231, |
|
"grad_norm": 0.893352210521698, |
|
"learning_rate": 4.340904052348264e-05, |
|
"loss": 0.6266, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.929033522838659, |
|
"grad_norm": 0.9610190391540527, |
|
"learning_rate": 4.3356753425709885e-05, |
|
"loss": 0.6603, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.9368751225249952, |
|
"grad_norm": 0.9027713537216187, |
|
"learning_rate": 4.330429150447399e-05, |
|
"loss": 0.6759, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.944716722211331, |
|
"grad_norm": 0.8384199142456055, |
|
"learning_rate": 4.325165525940442e-05, |
|
"loss": 0.6429, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.952558321897667, |
|
"grad_norm": 0.8894764184951782, |
|
"learning_rate": 4.319884519179082e-05, |
|
"loss": 0.6529, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.9603999215840031, |
|
"grad_norm": 0.8820430040359497, |
|
"learning_rate": 4.3145861804578283e-05, |
|
"loss": 0.6591, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.9603999215840031, |
|
"eval_loss": 0.9828659892082214, |
|
"eval_runtime": 242.2012, |
|
"eval_samples_per_second": 39.905, |
|
"eval_steps_per_second": 6.651, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.9684375612624976, |
|
"grad_norm": 0.9603639841079712, |
|
"learning_rate": 4.3092705602362536e-05, |
|
"loss": 0.6308, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.9762791609488337, |
|
"grad_norm": 0.9399013519287109, |
|
"learning_rate": 4.303937709138512e-05, |
|
"loss": 0.6523, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.9841207606351696, |
|
"grad_norm": 0.8963415026664734, |
|
"learning_rate": 4.298587677952861e-05, |
|
"loss": 0.6764, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.9919623603215055, |
|
"grad_norm": 0.8300694227218628, |
|
"learning_rate": 4.2932205176311726e-05, |
|
"loss": 0.6513, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.9998039600078417, |
|
"grad_norm": 0.8571862578392029, |
|
"learning_rate": 4.287836279288453e-05, |
|
"loss": 0.6521, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.007645559694178, |
|
"grad_norm": 1.0097789764404297, |
|
"learning_rate": 4.282435014202354e-05, |
|
"loss": 0.3983, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.0154871593805135, |
|
"grad_norm": 0.9734821319580078, |
|
"learning_rate": 4.2770167738126834e-05, |
|
"loss": 0.3739, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.0233287590668496, |
|
"grad_norm": 1.0220587253570557, |
|
"learning_rate": 4.271581609720915e-05, |
|
"loss": 0.3803, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.0311703587531857, |
|
"grad_norm": 1.0062997341156006, |
|
"learning_rate": 4.266129573689699e-05, |
|
"loss": 0.3539, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.039011958439522, |
|
"grad_norm": 1.014708161354065, |
|
"learning_rate": 4.260660717642369e-05, |
|
"loss": 0.3699, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.0468535581258576, |
|
"grad_norm": 1.0316507816314697, |
|
"learning_rate": 4.255175093662446e-05, |
|
"loss": 0.3725, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.0546951578121937, |
|
"grad_norm": 1.0374858379364014, |
|
"learning_rate": 4.249672753993143e-05, |
|
"loss": 0.3732, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.06253675749853, |
|
"grad_norm": 0.9619321823120117, |
|
"learning_rate": 4.2441537510368675e-05, |
|
"loss": 0.369, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.0703783571848655, |
|
"grad_norm": 0.8847309350967407, |
|
"learning_rate": 4.238618137354723e-05, |
|
"loss": 0.3582, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.0782199568712016, |
|
"grad_norm": 0.8922093510627747, |
|
"learning_rate": 4.2330659656660075e-05, |
|
"loss": 0.3553, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.0860615565575378, |
|
"grad_norm": 0.9724310040473938, |
|
"learning_rate": 4.227497288847712e-05, |
|
"loss": 0.3771, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.093903156243874, |
|
"grad_norm": 1.001301646232605, |
|
"learning_rate": 4.221912159934016e-05, |
|
"loss": 0.3773, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.1017447559302096, |
|
"grad_norm": 0.971805214881897, |
|
"learning_rate": 4.216310632115784e-05, |
|
"loss": 0.3748, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.1095863556165457, |
|
"grad_norm": 0.9344645142555237, |
|
"learning_rate": 4.2106927587400566e-05, |
|
"loss": 0.3748, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.117427955302882, |
|
"grad_norm": 0.9709860682487488, |
|
"learning_rate": 4.205058593309548e-05, |
|
"loss": 0.3792, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.125269554989218, |
|
"grad_norm": 1.0208593606948853, |
|
"learning_rate": 4.199408189482127e-05, |
|
"loss": 0.3724, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.1331111546755537, |
|
"grad_norm": 0.995505154132843, |
|
"learning_rate": 4.193741601070315e-05, |
|
"loss": 0.3843, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.14095275436189, |
|
"grad_norm": 1.059967279434204, |
|
"learning_rate": 4.1880588820407673e-05, |
|
"loss": 0.377, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.148794354048226, |
|
"grad_norm": 1.062596082687378, |
|
"learning_rate": 4.1823600865137643e-05, |
|
"loss": 0.3779, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.156635953734562, |
|
"grad_norm": 0.9699451327323914, |
|
"learning_rate": 4.1766452687626926e-05, |
|
"loss": 0.3889, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.1644775534208978, |
|
"grad_norm": 1.0266555547714233, |
|
"learning_rate": 4.170914483213528e-05, |
|
"loss": 0.3711, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.172319153107234, |
|
"grad_norm": 0.9311341047286987, |
|
"learning_rate": 4.165167784444319e-05, |
|
"loss": 0.3577, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.18016075279357, |
|
"grad_norm": 0.9153704047203064, |
|
"learning_rate": 4.1594052271846663e-05, |
|
"loss": 0.3778, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.1880023524799057, |
|
"grad_norm": 0.9365727305412292, |
|
"learning_rate": 4.1536268663152005e-05, |
|
"loss": 0.3924, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.195843952166242, |
|
"grad_norm": 1.0049713850021362, |
|
"learning_rate": 4.147832756867062e-05, |
|
"loss": 0.4038, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.203685551852578, |
|
"grad_norm": 1.0050500631332397, |
|
"learning_rate": 4.142022954021374e-05, |
|
"loss": 0.4056, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.211527151538914, |
|
"grad_norm": 0.9921131730079651, |
|
"learning_rate": 4.1361975131087184e-05, |
|
"loss": 0.3959, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.21936875122525, |
|
"grad_norm": 0.9911596179008484, |
|
"learning_rate": 4.1303564896086086e-05, |
|
"loss": 0.4093, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.227210350911586, |
|
"grad_norm": 0.9569091796875, |
|
"learning_rate": 4.1244999391489636e-05, |
|
"loss": 0.3944, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.235051950597922, |
|
"grad_norm": 1.010050654411316, |
|
"learning_rate": 4.1186279175055715e-05, |
|
"loss": 0.393, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.242893550284258, |
|
"grad_norm": 0.9854335784912109, |
|
"learning_rate": 4.112740480601566e-05, |
|
"loss": 0.3923, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.250735149970594, |
|
"grad_norm": 0.9931651949882507, |
|
"learning_rate": 4.1068376845068904e-05, |
|
"loss": 0.4148, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.25857674965693, |
|
"grad_norm": 0.9877751469612122, |
|
"learning_rate": 4.100919585437762e-05, |
|
"loss": 0.3918, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.266418349343266, |
|
"grad_norm": 1.1037381887435913, |
|
"learning_rate": 4.094986239756139e-05, |
|
"loss": 0.386, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.2742599490296023, |
|
"grad_norm": 0.9635111093521118, |
|
"learning_rate": 4.089037703969184e-05, |
|
"loss": 0.404, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.282101548715938, |
|
"grad_norm": 1.041157603263855, |
|
"learning_rate": 4.083074034728725e-05, |
|
"loss": 0.4102, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.289943148402274, |
|
"grad_norm": 0.9768211245536804, |
|
"learning_rate": 4.0770952888307143e-05, |
|
"loss": 0.3885, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.29778474808861, |
|
"grad_norm": 1.0234442949295044, |
|
"learning_rate": 4.07110152321469e-05, |
|
"loss": 0.409, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.305626347774946, |
|
"grad_norm": 0.9377536773681641, |
|
"learning_rate": 4.0650927949632335e-05, |
|
"loss": 0.4072, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.313467947461282, |
|
"grad_norm": 0.8978007435798645, |
|
"learning_rate": 4.0590691613014244e-05, |
|
"loss": 0.4, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.321309547147618, |
|
"grad_norm": 46.86246109008789, |
|
"learning_rate": 4.053030679596297e-05, |
|
"loss": 0.4276, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.3291511468339543, |
|
"grad_norm": 0.9630893468856812, |
|
"learning_rate": 4.0469774073562927e-05, |
|
"loss": 0.4096, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.33699274652029, |
|
"grad_norm": 0.9448952674865723, |
|
"learning_rate": 4.040909402230714e-05, |
|
"loss": 0.4159, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.344834346206626, |
|
"grad_norm": 0.9361767768859863, |
|
"learning_rate": 4.034826722009172e-05, |
|
"loss": 0.4089, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.3526759458929623, |
|
"grad_norm": 0.9969122409820557, |
|
"learning_rate": 4.0287294246210414e-05, |
|
"loss": 0.422, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.3526759458929623, |
|
"eval_loss": 1.1007354259490967, |
|
"eval_runtime": 250.2946, |
|
"eval_samples_per_second": 38.615, |
|
"eval_steps_per_second": 6.436, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.3607135855714567, |
|
"grad_norm": 1.070020318031311, |
|
"learning_rate": 4.022617568134903e-05, |
|
"loss": 0.4181, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.3685551852577924, |
|
"grad_norm": 0.9444131255149841, |
|
"learning_rate": 4.01710449737401e-05, |
|
"loss": 0.4108, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.3763967849441285, |
|
"grad_norm": 0.9726797342300415, |
|
"learning_rate": 4.010965139076752e-05, |
|
"loss": 0.4116, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.3842383846304647, |
|
"grad_norm": 0.9778685569763184, |
|
"learning_rate": 4.004811390862488e-05, |
|
"loss": 0.4285, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.392079984316801, |
|
"grad_norm": 0.9821404814720154, |
|
"learning_rate": 3.998643311337419e-05, |
|
"loss": 0.4278, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.3999215840031365, |
|
"grad_norm": 0.979962944984436, |
|
"learning_rate": 3.9924609592442326e-05, |
|
"loss": 0.415, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.4077631836894726, |
|
"grad_norm": 1.1424775123596191, |
|
"learning_rate": 3.986264393461542e-05, |
|
"loss": 0.4253, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.4156047833758088, |
|
"grad_norm": 0.9540061354637146, |
|
"learning_rate": 3.9800536730033274e-05, |
|
"loss": 0.3971, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.423446383062145, |
|
"grad_norm": 0.914680004119873, |
|
"learning_rate": 3.973828857018374e-05, |
|
"loss": 0.4206, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.4312879827484806, |
|
"grad_norm": 1.0090687274932861, |
|
"learning_rate": 3.967590004789709e-05, |
|
"loss": 0.4158, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.4391295824348167, |
|
"grad_norm": 1.0544170141220093, |
|
"learning_rate": 3.961337175734032e-05, |
|
"loss": 0.4218, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.446971182121153, |
|
"grad_norm": 1.0221105813980103, |
|
"learning_rate": 3.9550704294011585e-05, |
|
"loss": 0.4092, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.4548127818074885, |
|
"grad_norm": 1.0691900253295898, |
|
"learning_rate": 3.948789825473443e-05, |
|
"loss": 0.4105, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.4626543814938247, |
|
"grad_norm": 1.0215855836868286, |
|
"learning_rate": 3.9424954237652166e-05, |
|
"loss": 0.4139, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.470495981180161, |
|
"grad_norm": 1.0506454706192017, |
|
"learning_rate": 3.936187284222218e-05, |
|
"loss": 0.4075, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.478337580866497, |
|
"grad_norm": 1.0375871658325195, |
|
"learning_rate": 3.929865466921016e-05, |
|
"loss": 0.423, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.4861791805528326, |
|
"grad_norm": 0.9551310539245605, |
|
"learning_rate": 3.923530032068445e-05, |
|
"loss": 0.4285, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.4940207802391687, |
|
"grad_norm": 0.9317300319671631, |
|
"learning_rate": 3.917181040001027e-05, |
|
"loss": 0.4125, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.501862379925505, |
|
"grad_norm": 0.9303199648857117, |
|
"learning_rate": 3.910818551184398e-05, |
|
"loss": 0.4047, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.5097039796118406, |
|
"grad_norm": 1.0755215883255005, |
|
"learning_rate": 3.904442626212733e-05, |
|
"loss": 0.4225, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.5175455792981767, |
|
"grad_norm": 1.0713340044021606, |
|
"learning_rate": 3.8980533258081684e-05, |
|
"loss": 0.4115, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.525387178984513, |
|
"grad_norm": 0.9300441145896912, |
|
"learning_rate": 3.891650710820225e-05, |
|
"loss": 0.4398, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.533228778670849, |
|
"grad_norm": 0.9100314378738403, |
|
"learning_rate": 3.885234842225225e-05, |
|
"loss": 0.4414, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.541070378357185, |
|
"grad_norm": 1.015506386756897, |
|
"learning_rate": 3.8788057811257126e-05, |
|
"loss": 0.4109, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.5489119780435208, |
|
"grad_norm": 1.0658026933670044, |
|
"learning_rate": 3.8723635887498766e-05, |
|
"loss": 0.4209, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.556753577729857, |
|
"grad_norm": 0.9921278953552246, |
|
"learning_rate": 3.865908326450961e-05, |
|
"loss": 0.4244, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.564595177416193, |
|
"grad_norm": 0.968429684638977, |
|
"learning_rate": 3.8594400557066835e-05, |
|
"loss": 0.4278, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.5724367771025287, |
|
"grad_norm": 1.021282434463501, |
|
"learning_rate": 3.852958838118649e-05, |
|
"loss": 0.4253, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.580278376788865, |
|
"grad_norm": 1.0124115943908691, |
|
"learning_rate": 3.846464735411767e-05, |
|
"loss": 0.4198, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.588119976475201, |
|
"grad_norm": 1.1336549520492554, |
|
"learning_rate": 3.839957809433657e-05, |
|
"loss": 0.4206, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.595961576161537, |
|
"grad_norm": 0.9238972663879395, |
|
"learning_rate": 3.833438122154065e-05, |
|
"loss": 0.4468, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.6038031758478732, |
|
"grad_norm": 0.9685710072517395, |
|
"learning_rate": 3.826905735664269e-05, |
|
"loss": 0.4393, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.611644775534209, |
|
"grad_norm": 1.1224950551986694, |
|
"learning_rate": 3.820360712176494e-05, |
|
"loss": 0.4407, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.619486375220545, |
|
"grad_norm": 0.9622184634208679, |
|
"learning_rate": 3.81380311402331e-05, |
|
"loss": 0.4289, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.6273279749068807, |
|
"grad_norm": 0.9730106592178345, |
|
"learning_rate": 3.807233003657049e-05, |
|
"loss": 0.4153, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.635169574593217, |
|
"grad_norm": 1.0938421487808228, |
|
"learning_rate": 3.800650443649203e-05, |
|
"loss": 0.4161, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.643011174279553, |
|
"grad_norm": 0.9540886282920837, |
|
"learning_rate": 3.794055496689829e-05, |
|
"loss": 0.4333, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.650852773965889, |
|
"grad_norm": 0.986900806427002, |
|
"learning_rate": 3.787448225586954e-05, |
|
"loss": 0.4217, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.6586943736522253, |
|
"grad_norm": 0.9545695781707764, |
|
"learning_rate": 3.780828693265976e-05, |
|
"loss": 0.4456, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.666535973338561, |
|
"grad_norm": 0.9835594892501831, |
|
"learning_rate": 3.7741969627690656e-05, |
|
"loss": 0.4397, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.674377573024897, |
|
"grad_norm": 0.9485548734664917, |
|
"learning_rate": 3.7675530972545636e-05, |
|
"loss": 0.4246, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.6822191727112332, |
|
"grad_norm": 0.9652374386787415, |
|
"learning_rate": 3.76089715999638e-05, |
|
"loss": 0.442, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.690060772397569, |
|
"grad_norm": 1.1110929250717163, |
|
"learning_rate": 3.7542292143833926e-05, |
|
"loss": 0.4426, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.697902372083905, |
|
"grad_norm": 1.0148861408233643, |
|
"learning_rate": 3.747549323918844e-05, |
|
"loss": 0.4619, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.705743971770241, |
|
"grad_norm": 1.0639394521713257, |
|
"learning_rate": 3.7408575522197334e-05, |
|
"loss": 0.4284, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.7135855714565773, |
|
"grad_norm": 1.1294188499450684, |
|
"learning_rate": 3.734153963016215e-05, |
|
"loss": 0.422, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.721427171142913, |
|
"grad_norm": 1.133299708366394, |
|
"learning_rate": 3.727438620150985e-05, |
|
"loss": 0.4356, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.729268770829249, |
|
"grad_norm": 1.0103888511657715, |
|
"learning_rate": 3.720711587578682e-05, |
|
"loss": 0.4293, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.7371103705155853, |
|
"grad_norm": 0.9841229319572449, |
|
"learning_rate": 3.7139729293652715e-05, |
|
"loss": 0.4314, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.744951970201921, |
|
"grad_norm": 1.0465657711029053, |
|
"learning_rate": 3.7072227096874366e-05, |
|
"loss": 0.4357, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.744951970201921, |
|
"eval_loss": 1.100440263748169, |
|
"eval_runtime": 251.9109, |
|
"eval_samples_per_second": 38.367, |
|
"eval_steps_per_second": 6.395, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.752793569888257, |
|
"grad_norm": 1.0336558818817139, |
|
"learning_rate": 3.7004609928319703e-05, |
|
"loss": 0.4326, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.760635169574593, |
|
"grad_norm": 1.018001675605774, |
|
"learning_rate": 3.693687843195158e-05, |
|
"loss": 0.4249, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.7684767692609293, |
|
"grad_norm": 0.970750093460083, |
|
"learning_rate": 3.686903325282169e-05, |
|
"loss": 0.4395, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.7763183689472655, |
|
"grad_norm": 0.9996511936187744, |
|
"learning_rate": 3.6801075037064394e-05, |
|
"loss": 0.4237, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 2.784159968633601, |
|
"grad_norm": 0.9123607873916626, |
|
"learning_rate": 3.673300443189057e-05, |
|
"loss": 0.4393, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.7920015683199373, |
|
"grad_norm": 0.979560375213623, |
|
"learning_rate": 3.666482208558147e-05, |
|
"loss": 0.4446, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 2.7998431680062734, |
|
"grad_norm": 1.0506279468536377, |
|
"learning_rate": 3.659652864748253e-05, |
|
"loss": 0.4172, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 2.807684767692609, |
|
"grad_norm": 1.0336925983428955, |
|
"learning_rate": 3.652812476799716e-05, |
|
"loss": 0.4408, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 2.8155263673789452, |
|
"grad_norm": 1.0965049266815186, |
|
"learning_rate": 3.645961109858062e-05, |
|
"loss": 0.4439, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 2.8233679670652814, |
|
"grad_norm": 0.9744886159896851, |
|
"learning_rate": 3.6390988291733704e-05, |
|
"loss": 0.4397, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.8312095667516175, |
|
"grad_norm": 0.925773561000824, |
|
"learning_rate": 3.6322257000996665e-05, |
|
"loss": 0.4444, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 2.839051166437953, |
|
"grad_norm": 1.040789008140564, |
|
"learning_rate": 3.6253417880942896e-05, |
|
"loss": 0.442, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 2.8468927661242893, |
|
"grad_norm": 1.1857627630233765, |
|
"learning_rate": 3.61844715871727e-05, |
|
"loss": 0.4325, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 2.8547343658106255, |
|
"grad_norm": 0.9822872281074524, |
|
"learning_rate": 3.6115418776307084e-05, |
|
"loss": 0.4277, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 2.862575965496961, |
|
"grad_norm": 0.9385848045349121, |
|
"learning_rate": 3.6046260105981485e-05, |
|
"loss": 0.421, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.8704175651832973, |
|
"grad_norm": 1.0122392177581787, |
|
"learning_rate": 3.5976996234839505e-05, |
|
"loss": 0.4475, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 2.8782591648696334, |
|
"grad_norm": 1.0014116764068604, |
|
"learning_rate": 3.590762782252665e-05, |
|
"loss": 0.4499, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 2.8861007645559695, |
|
"grad_norm": 0.9408068656921387, |
|
"learning_rate": 3.583815552968403e-05, |
|
"loss": 0.4349, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 2.8939423642423057, |
|
"grad_norm": 1.007498025894165, |
|
"learning_rate": 3.5768580017942096e-05, |
|
"loss": 0.4479, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 2.9017839639286414, |
|
"grad_norm": 1.084996223449707, |
|
"learning_rate": 3.569890194991429e-05, |
|
"loss": 0.4439, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.9096255636149775, |
|
"grad_norm": 1.1018431186676025, |
|
"learning_rate": 3.56291219891908e-05, |
|
"loss": 0.454, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 2.9174671633013136, |
|
"grad_norm": 1.0079305171966553, |
|
"learning_rate": 3.555924080033218e-05, |
|
"loss": 0.4449, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 2.9253087629876493, |
|
"grad_norm": 1.0269763469696045, |
|
"learning_rate": 3.5489259048863054e-05, |
|
"loss": 0.4339, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 2.9331503626739854, |
|
"grad_norm": 0.9952816963195801, |
|
"learning_rate": 3.5419177401265784e-05, |
|
"loss": 0.4551, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 2.9409919623603216, |
|
"grad_norm": 0.8597270846366882, |
|
"learning_rate": 3.5348996524974086e-05, |
|
"loss": 0.4441, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.9488335620466577, |
|
"grad_norm": 0.9447600245475769, |
|
"learning_rate": 3.5278717088366703e-05, |
|
"loss": 0.4505, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 2.9566751617329934, |
|
"grad_norm": 0.9347754120826721, |
|
"learning_rate": 3.520833976076104e-05, |
|
"loss": 0.4189, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 2.9645167614193295, |
|
"grad_norm": 1.0394740104675293, |
|
"learning_rate": 3.5137865212406785e-05, |
|
"loss": 0.4533, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 2.9723583611056656, |
|
"grad_norm": 0.9572747945785522, |
|
"learning_rate": 3.506729411447951e-05, |
|
"loss": 0.4338, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 2.9801999607920013, |
|
"grad_norm": 1.0237895250320435, |
|
"learning_rate": 3.49966271390743e-05, |
|
"loss": 0.4345, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.9880415604783375, |
|
"grad_norm": 1.1056692600250244, |
|
"learning_rate": 3.492586495919934e-05, |
|
"loss": 0.4607, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 2.9958831601646736, |
|
"grad_norm": 1.1012498140335083, |
|
"learning_rate": 3.485500824876953e-05, |
|
"loss": 0.4491, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 3.0037247598510097, |
|
"grad_norm": 1.2760201692581177, |
|
"learning_rate": 3.47840576826e-05, |
|
"loss": 0.3312, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 3.0115663595373454, |
|
"grad_norm": 0.9876531958580017, |
|
"learning_rate": 3.471301393639979e-05, |
|
"loss": 0.1973, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 3.0194079592236815, |
|
"grad_norm": 0.9299415349960327, |
|
"learning_rate": 3.46418776867653e-05, |
|
"loss": 0.1983, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3.0272495589100177, |
|
"grad_norm": 0.9327115416526794, |
|
"learning_rate": 3.457064961117396e-05, |
|
"loss": 0.1952, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 3.035091158596354, |
|
"grad_norm": 0.956949770450592, |
|
"learning_rate": 3.449933038797765e-05, |
|
"loss": 0.1963, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 3.0429327582826895, |
|
"grad_norm": 0.9961047768592834, |
|
"learning_rate": 3.4427920696396355e-05, |
|
"loss": 0.196, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 3.0507743579690256, |
|
"grad_norm": 0.9145923852920532, |
|
"learning_rate": 3.435642121651165e-05, |
|
"loss": 0.1983, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 3.0586159576553618, |
|
"grad_norm": 0.8782720565795898, |
|
"learning_rate": 3.42848326292602e-05, |
|
"loss": 0.1929, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.066457557341698, |
|
"grad_norm": 0.9555357694625854, |
|
"learning_rate": 3.421315561642732e-05, |
|
"loss": 0.2011, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 3.0742991570280336, |
|
"grad_norm": 0.8813580274581909, |
|
"learning_rate": 3.4141390860640454e-05, |
|
"loss": 0.1986, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 3.0821407567143697, |
|
"grad_norm": 0.9723597168922424, |
|
"learning_rate": 3.406953904536266e-05, |
|
"loss": 0.2079, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 3.089982356400706, |
|
"grad_norm": 0.9829353094100952, |
|
"learning_rate": 3.399760085488615e-05, |
|
"loss": 0.2018, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 3.0978239560870415, |
|
"grad_norm": 0.9612635970115662, |
|
"learning_rate": 3.392557697432573e-05, |
|
"loss": 0.1948, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.1056655557733777, |
|
"grad_norm": 1.003859043121338, |
|
"learning_rate": 3.385346808961229e-05, |
|
"loss": 0.2051, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 3.113507155459714, |
|
"grad_norm": 0.881211519241333, |
|
"learning_rate": 3.378127488748627e-05, |
|
"loss": 0.1965, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 3.12134875514605, |
|
"grad_norm": 1.045480728149414, |
|
"learning_rate": 3.3708998055491114e-05, |
|
"loss": 0.2073, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 3.1291903548323856, |
|
"grad_norm": 0.9101377725601196, |
|
"learning_rate": 3.363663828196675e-05, |
|
"loss": 0.2, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 3.1370319545187217, |
|
"grad_norm": 0.9816673398017883, |
|
"learning_rate": 3.3564196256043e-05, |
|
"loss": 0.2055, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.1370319545187217, |
|
"eval_loss": 1.250553846359253, |
|
"eval_runtime": 246.4616, |
|
"eval_samples_per_second": 39.215, |
|
"eval_steps_per_second": 6.537, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 8, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.911561497694909e+19, |
|
"train_batch_size": 6, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|