|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9404915912031049, |
|
"eval_steps": 500, |
|
"global_step": 1500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0129366106080207, |
|
"grad_norm": 4.402749538421631, |
|
"learning_rate": 3.8809831824062096e-07, |
|
"loss": 1.5887, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0258732212160414, |
|
"grad_norm": 2.115481376647949, |
|
"learning_rate": 7.761966364812419e-07, |
|
"loss": 1.6039, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03880983182406209, |
|
"grad_norm": 4.390326023101807, |
|
"learning_rate": 1.1642949547218628e-06, |
|
"loss": 1.2713, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0517464424320828, |
|
"grad_norm": 2.4916300773620605, |
|
"learning_rate": 1.5523932729624839e-06, |
|
"loss": 1.4527, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0646830530401035, |
|
"grad_norm": 1.259100317955017, |
|
"learning_rate": 1.940491591203105e-06, |
|
"loss": 1.3702, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07761966364812418, |
|
"grad_norm": 4.703212738037109, |
|
"learning_rate": 2.3285899094437256e-06, |
|
"loss": 1.6409, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09055627425614489, |
|
"grad_norm": 6.5729146003723145, |
|
"learning_rate": 2.7166882276843466e-06, |
|
"loss": 1.6568, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1034928848641656, |
|
"grad_norm": 4.498919486999512, |
|
"learning_rate": 3.1047865459249677e-06, |
|
"loss": 1.4528, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11642949547218628, |
|
"grad_norm": 1.7919808626174927, |
|
"learning_rate": 3.492884864165589e-06, |
|
"loss": 1.3359, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.129366106080207, |
|
"grad_norm": 1.8573850393295288, |
|
"learning_rate": 3.88098318240621e-06, |
|
"loss": 1.0611, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1423027166882277, |
|
"grad_norm": 1.8852155208587646, |
|
"learning_rate": 4.2690815006468305e-06, |
|
"loss": 0.8434, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15523932729624837, |
|
"grad_norm": 1.1542340517044067, |
|
"learning_rate": 4.657179818887451e-06, |
|
"loss": 0.839, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16817593790426907, |
|
"grad_norm": 2.5517771244049072, |
|
"learning_rate": 5.045278137128073e-06, |
|
"loss": 0.7732, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18111254851228978, |
|
"grad_norm": 2.299227476119995, |
|
"learning_rate": 5.433376455368693e-06, |
|
"loss": 0.7517, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19404915912031048, |
|
"grad_norm": 1.0255972146987915, |
|
"learning_rate": 5.821474773609315e-06, |
|
"loss": 0.74, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2069857697283312, |
|
"grad_norm": 6.213005542755127, |
|
"learning_rate": 6.2095730918499354e-06, |
|
"loss": 0.6561, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21992238033635186, |
|
"grad_norm": 1.065950632095337, |
|
"learning_rate": 6.597671410090556e-06, |
|
"loss": 0.6735, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.23285899094437257, |
|
"grad_norm": 1.7846940755844116, |
|
"learning_rate": 6.985769728331178e-06, |
|
"loss": 0.5362, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24579560155239327, |
|
"grad_norm": 1.3279622793197632, |
|
"learning_rate": 7.373868046571798e-06, |
|
"loss": 0.6241, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.258732212160414, |
|
"grad_norm": 2.3642659187316895, |
|
"learning_rate": 7.76196636481242e-06, |
|
"loss": 0.5634, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2716688227684347, |
|
"grad_norm": 1.5506231784820557, |
|
"learning_rate": 8.15006468305304e-06, |
|
"loss": 0.4842, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2846054333764554, |
|
"grad_norm": 1.9322525262832642, |
|
"learning_rate": 8.538163001293661e-06, |
|
"loss": 0.5793, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2975420439844761, |
|
"grad_norm": 2.907057523727417, |
|
"learning_rate": 8.926261319534283e-06, |
|
"loss": 0.6017, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.31047865459249674, |
|
"grad_norm": 1.1939823627471924, |
|
"learning_rate": 9.314359637774902e-06, |
|
"loss": 0.5463, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.32341526520051744, |
|
"grad_norm": 1.6144232749938965, |
|
"learning_rate": 9.702457956015523e-06, |
|
"loss": 0.5068, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.33635187580853815, |
|
"grad_norm": 1.599927306175232, |
|
"learning_rate": 1.0090556274256145e-05, |
|
"loss": 0.4741, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.34928848641655885, |
|
"grad_norm": 1.0453033447265625, |
|
"learning_rate": 1.0478654592496766e-05, |
|
"loss": 0.444, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.36222509702457956, |
|
"grad_norm": 1.6979962587356567, |
|
"learning_rate": 1.0866752910737387e-05, |
|
"loss": 0.5465, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.37516170763260026, |
|
"grad_norm": 1.794843316078186, |
|
"learning_rate": 1.1254851228978009e-05, |
|
"loss": 0.4947, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.38809831824062097, |
|
"grad_norm": 1.308774709701538, |
|
"learning_rate": 1.164294954721863e-05, |
|
"loss": 0.5153, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.40103492884864167, |
|
"grad_norm": 1.2728034257888794, |
|
"learning_rate": 1.203104786545925e-05, |
|
"loss": 0.5684, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4139715394566624, |
|
"grad_norm": 1.312259316444397, |
|
"learning_rate": 1.2419146183699871e-05, |
|
"loss": 0.4749, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4269081500646831, |
|
"grad_norm": 1.1889984607696533, |
|
"learning_rate": 1.2807244501940493e-05, |
|
"loss": 0.4884, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4398447606727037, |
|
"grad_norm": 1.4888752698898315, |
|
"learning_rate": 1.3195342820181112e-05, |
|
"loss": 0.4353, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.45278137128072443, |
|
"grad_norm": 1.481899380683899, |
|
"learning_rate": 1.3583441138421733e-05, |
|
"loss": 0.4649, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.46571798188874514, |
|
"grad_norm": 1.5792471170425415, |
|
"learning_rate": 1.3971539456662355e-05, |
|
"loss": 0.4314, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.47865459249676584, |
|
"grad_norm": 6.622572422027588, |
|
"learning_rate": 1.4359637774902976e-05, |
|
"loss": 0.6105, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.49159120310478654, |
|
"grad_norm": 2.20646333694458, |
|
"learning_rate": 1.4747736093143596e-05, |
|
"loss": 0.5326, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5045278137128072, |
|
"grad_norm": 1.741729974746704, |
|
"learning_rate": 1.5135834411384215e-05, |
|
"loss": 0.5013, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.517464424320828, |
|
"grad_norm": 1.5974177122116089, |
|
"learning_rate": 1.552393272962484e-05, |
|
"loss": 0.4441, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5304010349288486, |
|
"grad_norm": 1.5022289752960205, |
|
"learning_rate": 1.591203104786546e-05, |
|
"loss": 0.477, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5433376455368694, |
|
"grad_norm": 2.8963606357574463, |
|
"learning_rate": 1.630012936610608e-05, |
|
"loss": 0.4674, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.55627425614489, |
|
"grad_norm": 1.0391566753387451, |
|
"learning_rate": 1.66882276843467e-05, |
|
"loss": 0.4624, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5692108667529108, |
|
"grad_norm": 2.012328624725342, |
|
"learning_rate": 1.7076326002587322e-05, |
|
"loss": 0.4798, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.5821474773609314, |
|
"grad_norm": 1.6585012674331665, |
|
"learning_rate": 1.7464424320827943e-05, |
|
"loss": 0.3837, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5950840879689522, |
|
"grad_norm": 2.2663848400115967, |
|
"learning_rate": 1.7852522639068567e-05, |
|
"loss": 0.4888, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6080206985769728, |
|
"grad_norm": 1.234695553779602, |
|
"learning_rate": 1.8240620957309184e-05, |
|
"loss": 0.3898, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6209573091849935, |
|
"grad_norm": 1.9593554735183716, |
|
"learning_rate": 1.8628719275549805e-05, |
|
"loss": 0.4126, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6338939197930142, |
|
"grad_norm": 1.539140224456787, |
|
"learning_rate": 1.901681759379043e-05, |
|
"loss": 0.3865, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6468305304010349, |
|
"grad_norm": 1.70127534866333, |
|
"learning_rate": 1.9404915912031046e-05, |
|
"loss": 0.3849, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6468305304010349, |
|
"eval_loss": 0.49836617708206177, |
|
"eval_runtime": 53.9941, |
|
"eval_samples_per_second": 3.704, |
|
"eval_steps_per_second": 1.852, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6597671410090556, |
|
"grad_norm": 1.2516582012176514, |
|
"learning_rate": 1.979301423027167e-05, |
|
"loss": 0.4551, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6727037516170763, |
|
"grad_norm": 2.031634569168091, |
|
"learning_rate": 2.018111254851229e-05, |
|
"loss": 0.4455, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.685640362225097, |
|
"grad_norm": 1.4083653688430786, |
|
"learning_rate": 2.056921086675291e-05, |
|
"loss": 0.3805, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6985769728331177, |
|
"grad_norm": 1.8752052783966064, |
|
"learning_rate": 2.0957309184993532e-05, |
|
"loss": 0.4295, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.7115135834411385, |
|
"grad_norm": 2.337958574295044, |
|
"learning_rate": 2.1345407503234156e-05, |
|
"loss": 0.3906, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7244501940491591, |
|
"grad_norm": 1.7030471563339233, |
|
"learning_rate": 2.1733505821474773e-05, |
|
"loss": 0.4151, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.7373868046571799, |
|
"grad_norm": 2.75461745262146, |
|
"learning_rate": 2.2121604139715397e-05, |
|
"loss": 0.4353, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7503234152652005, |
|
"grad_norm": 1.3487416505813599, |
|
"learning_rate": 2.2509702457956018e-05, |
|
"loss": 0.3256, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7632600258732212, |
|
"grad_norm": 2.2425625324249268, |
|
"learning_rate": 2.2897800776196635e-05, |
|
"loss": 0.3754, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7761966364812419, |
|
"grad_norm": 2.3429338932037354, |
|
"learning_rate": 2.328589909443726e-05, |
|
"loss": 0.4642, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7891332470892626, |
|
"grad_norm": 1.7922463417053223, |
|
"learning_rate": 2.3673997412677876e-05, |
|
"loss": 0.4097, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.8020698576972833, |
|
"grad_norm": 1.4449383020401, |
|
"learning_rate": 2.40620957309185e-05, |
|
"loss": 0.4068, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.815006468305304, |
|
"grad_norm": 2.2355751991271973, |
|
"learning_rate": 2.445019404915912e-05, |
|
"loss": 0.4196, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8279430789133247, |
|
"grad_norm": 1.9109554290771484, |
|
"learning_rate": 2.4838292367399742e-05, |
|
"loss": 0.4531, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.8408796895213454, |
|
"grad_norm": 2.0991272926330566, |
|
"learning_rate": 2.5226390685640362e-05, |
|
"loss": 0.4161, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.8538163001293662, |
|
"grad_norm": 3.535731554031372, |
|
"learning_rate": 2.5614489003880986e-05, |
|
"loss": 0.3607, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8667529107373868, |
|
"grad_norm": 1.3505184650421143, |
|
"learning_rate": 2.6002587322121604e-05, |
|
"loss": 0.401, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.8796895213454075, |
|
"grad_norm": 1.0390079021453857, |
|
"learning_rate": 2.6390685640362224e-05, |
|
"loss": 0.411, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8926261319534282, |
|
"grad_norm": 4.166248798370361, |
|
"learning_rate": 2.677878395860285e-05, |
|
"loss": 0.456, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.9055627425614489, |
|
"grad_norm": 1.9140669107437134, |
|
"learning_rate": 2.7166882276843466e-05, |
|
"loss": 0.4109, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.9184993531694696, |
|
"grad_norm": 1.4948009252548218, |
|
"learning_rate": 2.755498059508409e-05, |
|
"loss": 0.4113, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9314359637774903, |
|
"grad_norm": 1.2608532905578613, |
|
"learning_rate": 2.794307891332471e-05, |
|
"loss": 0.4074, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.944372574385511, |
|
"grad_norm": 1.6317986249923706, |
|
"learning_rate": 2.833117723156533e-05, |
|
"loss": 0.4994, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.9573091849935317, |
|
"grad_norm": 2.5210256576538086, |
|
"learning_rate": 2.871927554980595e-05, |
|
"loss": 0.3803, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.9702457956015524, |
|
"grad_norm": 1.3882018327713013, |
|
"learning_rate": 2.9107373868046576e-05, |
|
"loss": 0.3962, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.9831824062095731, |
|
"grad_norm": 2.999624013900757, |
|
"learning_rate": 2.9495472186287193e-05, |
|
"loss": 0.3772, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9961190168175937, |
|
"grad_norm": 1.3309364318847656, |
|
"learning_rate": 2.9883570504527814e-05, |
|
"loss": 0.3445, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.0090556274256144, |
|
"grad_norm": 1.9435688257217407, |
|
"learning_rate": 2.99996206183792e-05, |
|
"loss": 0.3399, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.0219922380336353, |
|
"grad_norm": 1.2193248271942139, |
|
"learning_rate": 2.999776246888373e-05, |
|
"loss": 0.3159, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.034928848641656, |
|
"grad_norm": 1.2065479755401611, |
|
"learning_rate": 2.999435606075697e-05, |
|
"loss": 0.3086, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.0478654592496766, |
|
"grad_norm": 2.735750198364258, |
|
"learning_rate": 2.9989401745651436e-05, |
|
"loss": 0.3718, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.0608020698576972, |
|
"grad_norm": 2.377659320831299, |
|
"learning_rate": 2.998290003501415e-05, |
|
"loss": 0.2935, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.073738680465718, |
|
"grad_norm": 1.961264967918396, |
|
"learning_rate": 2.997485160003388e-05, |
|
"loss": 0.3441, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.0866752910737387, |
|
"grad_norm": 1.8460434675216675, |
|
"learning_rate": 2.9965257271571833e-05, |
|
"loss": 0.3485, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.0996119016817594, |
|
"grad_norm": 1.7050330638885498, |
|
"learning_rate": 2.995411804007586e-05, |
|
"loss": 0.3739, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.11254851228978, |
|
"grad_norm": 2.2543892860412598, |
|
"learning_rate": 2.994143505547826e-05, |
|
"loss": 0.307, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.1254851228978007, |
|
"grad_norm": 3.6111979484558105, |
|
"learning_rate": 2.9927209627077032e-05, |
|
"loss": 0.2837, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.1384217335058215, |
|
"grad_norm": 1.4579052925109863, |
|
"learning_rate": 2.9911443223400712e-05, |
|
"loss": 0.2957, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.1513583441138422, |
|
"grad_norm": 2.5956053733825684, |
|
"learning_rate": 2.9894137472056805e-05, |
|
"loss": 0.2659, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.1642949547218628, |
|
"grad_norm": 2.876161813735962, |
|
"learning_rate": 2.9875294159563723e-05, |
|
"loss": 0.3203, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.1772315653298835, |
|
"grad_norm": 2.269209384918213, |
|
"learning_rate": 2.9854915231166383e-05, |
|
"loss": 0.3527, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.1901681759379044, |
|
"grad_norm": 1.9031386375427246, |
|
"learning_rate": 2.983300279063539e-05, |
|
"loss": 0.3527, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.203104786545925, |
|
"grad_norm": 1.621424913406372, |
|
"learning_rate": 2.9809559100049852e-05, |
|
"loss": 0.2666, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.2160413971539457, |
|
"grad_norm": 4.12821626663208, |
|
"learning_rate": 2.9784586579563867e-05, |
|
"loss": 0.2957, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.2289780077619663, |
|
"grad_norm": 2.230564832687378, |
|
"learning_rate": 2.9758087807156683e-05, |
|
"loss": 0.3305, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.2419146183699872, |
|
"grad_norm": 2.618791103363037, |
|
"learning_rate": 2.9730065518366562e-05, |
|
"loss": 0.3508, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.2548512289780078, |
|
"grad_norm": 2.047571897506714, |
|
"learning_rate": 2.9700522606008392e-05, |
|
"loss": 0.2711, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.2677878395860285, |
|
"grad_norm": 2.2815372943878174, |
|
"learning_rate": 2.9669462119875037e-05, |
|
"loss": 0.3719, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.2807244501940491, |
|
"grad_norm": 2.5117783546447754, |
|
"learning_rate": 2.963688726642252e-05, |
|
"loss": 0.2837, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.2936610608020698, |
|
"grad_norm": 1.2821422815322876, |
|
"learning_rate": 2.9602801408439003e-05, |
|
"loss": 0.3029, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.2936610608020698, |
|
"eval_loss": 0.46682319045066833, |
|
"eval_runtime": 53.9865, |
|
"eval_samples_per_second": 3.705, |
|
"eval_steps_per_second": 1.852, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.3065976714100906, |
|
"grad_norm": 1.465987205505371, |
|
"learning_rate": 2.956720806469762e-05, |
|
"loss": 0.3424, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.3195342820181113, |
|
"grad_norm": 2.9073352813720703, |
|
"learning_rate": 2.9530110909593264e-05, |
|
"loss": 0.3734, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.332470892626132, |
|
"grad_norm": 1.9318575859069824, |
|
"learning_rate": 2.949151377276323e-05, |
|
"loss": 0.3153, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.3454075032341526, |
|
"grad_norm": 2.4026870727539062, |
|
"learning_rate": 2.94514206386919e-05, |
|
"loss": 0.2888, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.3583441138421732, |
|
"grad_norm": 1.4837349653244019, |
|
"learning_rate": 2.9409835646299393e-05, |
|
"loss": 0.221, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.371280724450194, |
|
"grad_norm": 1.4667383432388306, |
|
"learning_rate": 2.9366763088514306e-05, |
|
"loss": 0.2935, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.3842173350582148, |
|
"grad_norm": 1.7328715324401855, |
|
"learning_rate": 2.932220741183055e-05, |
|
"loss": 0.3634, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.3971539456662354, |
|
"grad_norm": 1.5297213792800903, |
|
"learning_rate": 2.9276173215848297e-05, |
|
"loss": 0.282, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.4100905562742563, |
|
"grad_norm": 1.0233724117279053, |
|
"learning_rate": 2.9228665252799187e-05, |
|
"loss": 0.3941, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.4230271668822767, |
|
"grad_norm": 1.3520108461380005, |
|
"learning_rate": 2.917968842705572e-05, |
|
"loss": 0.3211, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.4359637774902976, |
|
"grad_norm": 2.3806064128875732, |
|
"learning_rate": 2.9129247794624977e-05, |
|
"loss": 0.2752, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.4489003880983182, |
|
"grad_norm": 1.543283224105835, |
|
"learning_rate": 2.907734856262666e-05, |
|
"loss": 0.3198, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.4618369987063389, |
|
"grad_norm": 4.661839962005615, |
|
"learning_rate": 2.9023996088755573e-05, |
|
"loss": 0.3588, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.4747736093143597, |
|
"grad_norm": 0.8752655386924744, |
|
"learning_rate": 2.8969195880728497e-05, |
|
"loss": 0.3085, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.4877102199223804, |
|
"grad_norm": 1.2036000490188599, |
|
"learning_rate": 2.891295359571565e-05, |
|
"loss": 0.2737, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.500646830530401, |
|
"grad_norm": 1.5076147317886353, |
|
"learning_rate": 2.8855275039756653e-05, |
|
"loss": 0.2985, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.5135834411384217, |
|
"grad_norm": 1.462025761604309, |
|
"learning_rate": 2.879616616716118e-05, |
|
"loss": 0.3275, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.5265200517464423, |
|
"grad_norm": 3.9805407524108887, |
|
"learning_rate": 2.873563307989427e-05, |
|
"loss": 0.267, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.5394566623544632, |
|
"grad_norm": 1.7805012464523315, |
|
"learning_rate": 2.8673682026946385e-05, |
|
"loss": 0.2511, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.5523932729624839, |
|
"grad_norm": 2.0648598670959473, |
|
"learning_rate": 2.8610319403688362e-05, |
|
"loss": 0.3148, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.5653298835705045, |
|
"grad_norm": 1.9050356149673462, |
|
"learning_rate": 2.854555175121114e-05, |
|
"loss": 0.3273, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.5782664941785254, |
|
"grad_norm": 2.525097370147705, |
|
"learning_rate": 2.847938575565055e-05, |
|
"loss": 0.2969, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.5912031047865458, |
|
"grad_norm": 2.5983824729919434, |
|
"learning_rate": 2.8411828247497072e-05, |
|
"loss": 0.2368, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.6041397153945667, |
|
"grad_norm": 1.126298189163208, |
|
"learning_rate": 2.83428862008907e-05, |
|
"loss": 0.2437, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.6170763260025873, |
|
"grad_norm": 2.4499635696411133, |
|
"learning_rate": 2.827256673290099e-05, |
|
"loss": 0.269, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.630012936610608, |
|
"grad_norm": 1.302224040031433, |
|
"learning_rate": 2.8200877102792354e-05, |
|
"loss": 0.2963, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.6429495472186288, |
|
"grad_norm": 3.0382964611053467, |
|
"learning_rate": 2.8127824711274648e-05, |
|
"loss": 0.2381, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.6558861578266493, |
|
"grad_norm": 1.313506007194519, |
|
"learning_rate": 2.8053417099739195e-05, |
|
"loss": 0.2552, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.6688227684346701, |
|
"grad_norm": 1.7527904510498047, |
|
"learning_rate": 2.7977661949480256e-05, |
|
"loss": 0.2691, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.6817593790426908, |
|
"grad_norm": 2.4050045013427734, |
|
"learning_rate": 2.7900567080902077e-05, |
|
"loss": 0.2874, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.6946959896507114, |
|
"grad_norm": 1.7919877767562866, |
|
"learning_rate": 2.7822140452711557e-05, |
|
"loss": 0.2413, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.7076326002587323, |
|
"grad_norm": 2.414724349975586, |
|
"learning_rate": 2.774239016109666e-05, |
|
"loss": 0.291, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.720569210866753, |
|
"grad_norm": 1.6505646705627441, |
|
"learning_rate": 2.766132443889063e-05, |
|
"loss": 0.3146, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.7335058214747736, |
|
"grad_norm": 2.0308473110198975, |
|
"learning_rate": 2.757895165472208e-05, |
|
"loss": 0.3366, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.7464424320827943, |
|
"grad_norm": 1.5665441751480103, |
|
"learning_rate": 2.7495280312151075e-05, |
|
"loss": 0.3427, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.759379042690815, |
|
"grad_norm": 1.963440179824829, |
|
"learning_rate": 2.74103190487913e-05, |
|
"loss": 0.2648, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.7723156532988358, |
|
"grad_norm": 2.129335403442383, |
|
"learning_rate": 2.732407663541837e-05, |
|
"loss": 0.3138, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.7852522639068564, |
|
"grad_norm": 2.803375005722046, |
|
"learning_rate": 2.7236561975064396e-05, |
|
"loss": 0.3078, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.798188874514877, |
|
"grad_norm": 2.7263729572296143, |
|
"learning_rate": 2.7147784102098906e-05, |
|
"loss": 0.269, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.811125485122898, |
|
"grad_norm": 2.3668291568756104, |
|
"learning_rate": 2.7057752181296207e-05, |
|
"loss": 0.2619, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.8240620957309184, |
|
"grad_norm": 2.556081533432007, |
|
"learning_rate": 2.6966475506889274e-05, |
|
"loss": 0.3413, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.8369987063389392, |
|
"grad_norm": 2.335906744003296, |
|
"learning_rate": 2.687396350161028e-05, |
|
"loss": 0.3316, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.84993531694696, |
|
"grad_norm": 1.4382126331329346, |
|
"learning_rate": 2.6780225715717873e-05, |
|
"loss": 0.2713, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.8628719275549805, |
|
"grad_norm": 1.600113868713379, |
|
"learning_rate": 2.6685271826011267e-05, |
|
"loss": 0.373, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.8758085381630014, |
|
"grad_norm": 5.542492866516113, |
|
"learning_rate": 2.658911163483128e-05, |
|
"loss": 0.3024, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.8887451487710218, |
|
"grad_norm": 2.238302707672119, |
|
"learning_rate": 2.649175506904843e-05, |
|
"loss": 0.2843, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.9016817593790427, |
|
"grad_norm": 2.0862436294555664, |
|
"learning_rate": 2.6393212179038136e-05, |
|
"loss": 0.3868, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.9146183699870634, |
|
"grad_norm": 1.8203462362289429, |
|
"learning_rate": 2.6293493137643202e-05, |
|
"loss": 0.2422, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.927554980595084, |
|
"grad_norm": 1.6436854600906372, |
|
"learning_rate": 2.619260823912364e-05, |
|
"loss": 0.2936, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.9404915912031049, |
|
"grad_norm": 1.905551791191101, |
|
"learning_rate": 2.609056789809399e-05, |
|
"loss": 0.336, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.9404915912031049, |
|
"eval_loss": 0.4133800268173218, |
|
"eval_runtime": 54.0053, |
|
"eval_samples_per_second": 3.703, |
|
"eval_steps_per_second": 1.852, |
|
"step": 1500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3865, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.751317925557699e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|