Grogros's picture
Training in progress, step 2000, checkpoint
6996b48 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 7.8508341511285575,
"eval_steps": 500,
"global_step": 2000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.03925417075564279,
"grad_norm": 81.5,
"learning_rate": 8.000000000000001e-07,
"loss": 0.3818,
"step": 10
},
{
"epoch": 0.07850834151128558,
"grad_norm": 322.0,
"learning_rate": 1.6000000000000001e-06,
"loss": 0.367,
"step": 20
},
{
"epoch": 0.11776251226692837,
"grad_norm": 784.0,
"learning_rate": 2.4000000000000003e-06,
"loss": 0.4472,
"step": 30
},
{
"epoch": 0.15701668302257116,
"grad_norm": 350.0,
"learning_rate": 3.2000000000000003e-06,
"loss": 0.6806,
"step": 40
},
{
"epoch": 0.19627085377821393,
"grad_norm": 186.0,
"learning_rate": 4.000000000000001e-06,
"loss": 0.9139,
"step": 50
},
{
"epoch": 0.23552502453385674,
"grad_norm": 199.0,
"learning_rate": 4.800000000000001e-06,
"loss": 0.8205,
"step": 60
},
{
"epoch": 0.2747791952894995,
"grad_norm": 118.0,
"learning_rate": 5.600000000000001e-06,
"loss": 0.9017,
"step": 70
},
{
"epoch": 0.3140333660451423,
"grad_norm": 616.0,
"learning_rate": 6.4000000000000006e-06,
"loss": 3.1966,
"step": 80
},
{
"epoch": 0.35328753680078506,
"grad_norm": 692.0,
"learning_rate": 7.2000000000000005e-06,
"loss": 1.0627,
"step": 90
},
{
"epoch": 0.39254170755642787,
"grad_norm": 75.5,
"learning_rate": 8.000000000000001e-06,
"loss": 7.3782,
"step": 100
},
{
"epoch": 0.43179587831207067,
"grad_norm": 908.0,
"learning_rate": 8.8e-06,
"loss": 1.1091,
"step": 110
},
{
"epoch": 0.47105004906771347,
"grad_norm": 61.5,
"learning_rate": 9.600000000000001e-06,
"loss": 0.9262,
"step": 120
},
{
"epoch": 0.5103042198233563,
"grad_norm": 45.25,
"learning_rate": 1.04e-05,
"loss": 0.6774,
"step": 130
},
{
"epoch": 0.549558390578999,
"grad_norm": 39.75,
"learning_rate": 1.1200000000000001e-05,
"loss": 0.8204,
"step": 140
},
{
"epoch": 0.5888125613346418,
"grad_norm": 38.0,
"learning_rate": 1.2e-05,
"loss": 0.6443,
"step": 150
},
{
"epoch": 0.6280667320902846,
"grad_norm": 31.25,
"learning_rate": 1.2800000000000001e-05,
"loss": 0.607,
"step": 160
},
{
"epoch": 0.6673209028459274,
"grad_norm": 33.5,
"learning_rate": 1.3600000000000002e-05,
"loss": 0.674,
"step": 170
},
{
"epoch": 0.7065750736015701,
"grad_norm": 37.5,
"learning_rate": 1.4400000000000001e-05,
"loss": 0.6806,
"step": 180
},
{
"epoch": 0.745829244357213,
"grad_norm": 36.25,
"learning_rate": 1.5200000000000002e-05,
"loss": 0.6131,
"step": 190
},
{
"epoch": 0.7850834151128557,
"grad_norm": 43.25,
"learning_rate": 1.6000000000000003e-05,
"loss": 0.5914,
"step": 200
},
{
"epoch": 0.8243375858684985,
"grad_norm": 27.75,
"learning_rate": 1.6800000000000002e-05,
"loss": 0.5488,
"step": 210
},
{
"epoch": 0.8635917566241413,
"grad_norm": 47.75,
"learning_rate": 1.76e-05,
"loss": 0.7107,
"step": 220
},
{
"epoch": 0.9028459273797841,
"grad_norm": 16.375,
"learning_rate": 1.8400000000000003e-05,
"loss": 1.2491,
"step": 230
},
{
"epoch": 0.9421000981354269,
"grad_norm": 35.5,
"learning_rate": 1.9200000000000003e-05,
"loss": 0.6448,
"step": 240
},
{
"epoch": 0.9813542688910697,
"grad_norm": 22.875,
"learning_rate": 2e-05,
"loss": 0.5682,
"step": 250
},
{
"epoch": 1.0206084396467126,
"grad_norm": 32.75,
"learning_rate": 1.9999025240093045e-05,
"loss": 0.5689,
"step": 260
},
{
"epoch": 1.0598626104023552,
"grad_norm": 28.375,
"learning_rate": 1.9996101150403543e-05,
"loss": 0.4836,
"step": 270
},
{
"epoch": 1.099116781157998,
"grad_norm": 18.25,
"learning_rate": 1.9991228300988586e-05,
"loss": 0.519,
"step": 280
},
{
"epoch": 1.138370951913641,
"grad_norm": 26.125,
"learning_rate": 1.9984407641819812e-05,
"loss": 0.4941,
"step": 290
},
{
"epoch": 1.1776251226692835,
"grad_norm": 20.5,
"learning_rate": 1.9975640502598243e-05,
"loss": 0.4484,
"step": 300
},
{
"epoch": 1.2168792934249264,
"grad_norm": 24.625,
"learning_rate": 1.9964928592495046e-05,
"loss": 0.5169,
"step": 310
},
{
"epoch": 1.2561334641805693,
"grad_norm": 14.125,
"learning_rate": 1.9952273999818312e-05,
"loss": 0.4417,
"step": 320
},
{
"epoch": 1.295387634936212,
"grad_norm": 26.25,
"learning_rate": 1.9937679191605964e-05,
"loss": 0.4361,
"step": 330
},
{
"epoch": 1.3346418056918548,
"grad_norm": 23.125,
"learning_rate": 1.9921147013144782e-05,
"loss": 0.4454,
"step": 340
},
{
"epoch": 1.3738959764474976,
"grad_norm": 23.625,
"learning_rate": 1.9902680687415704e-05,
"loss": 0.446,
"step": 350
},
{
"epoch": 1.4131501472031402,
"grad_norm": 13.9375,
"learning_rate": 1.988228381446553e-05,
"loss": 0.4006,
"step": 360
},
{
"epoch": 1.452404317958783,
"grad_norm": 31.5,
"learning_rate": 1.985996037070505e-05,
"loss": 0.4577,
"step": 370
},
{
"epoch": 1.491658488714426,
"grad_norm": 47.5,
"learning_rate": 1.983571470813386e-05,
"loss": 0.4207,
"step": 380
},
{
"epoch": 1.5309126594700686,
"grad_norm": 12.3125,
"learning_rate": 1.9809551553491918e-05,
"loss": 0.4251,
"step": 390
},
{
"epoch": 1.5701668302257115,
"grad_norm": 20.75,
"learning_rate": 1.9781476007338058e-05,
"loss": 0.3968,
"step": 400
},
{
"epoch": 1.6094210009813543,
"grad_norm": 15.0,
"learning_rate": 1.9751493543055634e-05,
"loss": 0.4019,
"step": 410
},
{
"epoch": 1.648675171736997,
"grad_norm": 11.5,
"learning_rate": 1.9719610005785466e-05,
"loss": 0.3894,
"step": 420
},
{
"epoch": 1.6879293424926398,
"grad_norm": 20.375,
"learning_rate": 1.9685831611286312e-05,
"loss": 0.3923,
"step": 430
},
{
"epoch": 1.7271835132482827,
"grad_norm": 16.75,
"learning_rate": 1.9650164944723116e-05,
"loss": 0.374,
"step": 440
},
{
"epoch": 1.7664376840039253,
"grad_norm": 21.375,
"learning_rate": 1.961261695938319e-05,
"loss": 0.402,
"step": 450
},
{
"epoch": 1.8056918547595682,
"grad_norm": 14.25,
"learning_rate": 1.9573194975320672e-05,
"loss": 0.3646,
"step": 460
},
{
"epoch": 1.844946025515211,
"grad_norm": 15.0625,
"learning_rate": 1.9531906677929472e-05,
"loss": 0.3451,
"step": 470
},
{
"epoch": 1.8842001962708537,
"grad_norm": 12.75,
"learning_rate": 1.9488760116444966e-05,
"loss": 0.3373,
"step": 480
},
{
"epoch": 1.9234543670264965,
"grad_norm": 12.5,
"learning_rate": 1.944376370237481e-05,
"loss": 0.3693,
"step": 490
},
{
"epoch": 1.9627085377821394,
"grad_norm": 2.34375,
"learning_rate": 1.9396926207859085e-05,
"loss": 0.3595,
"step": 500
},
{
"epoch": 2.001962708537782,
"grad_norm": 19.75,
"learning_rate": 1.9348256763960146e-05,
"loss": 0.3795,
"step": 510
},
{
"epoch": 2.041216879293425,
"grad_norm": 9.5625,
"learning_rate": 1.9297764858882516e-05,
"loss": 0.3388,
"step": 520
},
{
"epoch": 2.0804710500490677,
"grad_norm": 9.625,
"learning_rate": 1.9245460336123136e-05,
"loss": 0.3306,
"step": 530
},
{
"epoch": 2.1197252208047104,
"grad_norm": 25.25,
"learning_rate": 1.9191353392552346e-05,
"loss": 0.3471,
"step": 540
},
{
"epoch": 2.1589793915603535,
"grad_norm": 12.6875,
"learning_rate": 1.913545457642601e-05,
"loss": 0.3268,
"step": 550
},
{
"epoch": 2.198233562315996,
"grad_norm": 18.375,
"learning_rate": 1.907777478532909e-05,
"loss": 0.3138,
"step": 560
},
{
"epoch": 2.2374877330716387,
"grad_norm": 11.375,
"learning_rate": 1.901832526405114e-05,
"loss": 0.3271,
"step": 570
},
{
"epoch": 2.276741903827282,
"grad_norm": 3.484375,
"learning_rate": 1.895711760239413e-05,
"loss": 0.3189,
"step": 580
},
{
"epoch": 2.3159960745829244,
"grad_norm": 21.0,
"learning_rate": 1.889416373291298e-05,
"loss": 0.3413,
"step": 590
},
{
"epoch": 2.355250245338567,
"grad_norm": 12.1875,
"learning_rate": 1.8829475928589272e-05,
"loss": 0.3222,
"step": 600
},
{
"epoch": 2.39450441609421,
"grad_norm": 4.03125,
"learning_rate": 1.8763066800438638e-05,
"loss": 0.3238,
"step": 610
},
{
"epoch": 2.433758586849853,
"grad_norm": 8.5625,
"learning_rate": 1.869494929505219e-05,
"loss": 0.3133,
"step": 620
},
{
"epoch": 2.4730127576054954,
"grad_norm": 22.25,
"learning_rate": 1.8625136692072577e-05,
"loss": 0.3337,
"step": 630
},
{
"epoch": 2.5122669283611385,
"grad_norm": 6.28125,
"learning_rate": 1.855364260160507e-05,
"loss": 0.3016,
"step": 640
},
{
"epoch": 2.551521099116781,
"grad_norm": 8.0,
"learning_rate": 1.848048096156426e-05,
"loss": 0.3138,
"step": 650
},
{
"epoch": 2.590775269872424,
"grad_norm": 13.5,
"learning_rate": 1.8405666034956842e-05,
"loss": 0.295,
"step": 660
},
{
"epoch": 2.630029440628067,
"grad_norm": 18.125,
"learning_rate": 1.8329212407100996e-05,
"loss": 0.3002,
"step": 670
},
{
"epoch": 2.6692836113837095,
"grad_norm": 17.375,
"learning_rate": 1.8251134982782952e-05,
"loss": 0.3026,
"step": 680
},
{
"epoch": 2.708537782139352,
"grad_norm": 8.75,
"learning_rate": 1.8171448983351284e-05,
"loss": 0.2936,
"step": 690
},
{
"epoch": 2.7477919528949952,
"grad_norm": 13.375,
"learning_rate": 1.8090169943749477e-05,
"loss": 0.3181,
"step": 700
},
{
"epoch": 2.787046123650638,
"grad_norm": 22.0,
"learning_rate": 1.8007313709487334e-05,
"loss": 0.2988,
"step": 710
},
{
"epoch": 2.8263002944062805,
"grad_norm": 10.0,
"learning_rate": 1.792289643355191e-05,
"loss": 0.3096,
"step": 720
},
{
"epoch": 2.8655544651619236,
"grad_norm": 21.25,
"learning_rate": 1.78369345732584e-05,
"loss": 0.2987,
"step": 730
},
{
"epoch": 2.904808635917566,
"grad_norm": 3.84375,
"learning_rate": 1.7749444887041797e-05,
"loss": 0.2787,
"step": 740
},
{
"epoch": 2.944062806673209,
"grad_norm": 10.4375,
"learning_rate": 1.766044443118978e-05,
"loss": 0.2763,
"step": 750
},
{
"epoch": 2.983316977428852,
"grad_norm": 12.6875,
"learning_rate": 1.7569950556517566e-05,
"loss": 0.2841,
"step": 760
},
{
"epoch": 3.0225711481844946,
"grad_norm": 8.25,
"learning_rate": 1.747798090498532e-05,
"loss": 0.2799,
"step": 770
},
{
"epoch": 3.061825318940137,
"grad_norm": 15.5,
"learning_rate": 1.7384553406258842e-05,
"loss": 0.2664,
"step": 780
},
{
"epoch": 3.1010794896957803,
"grad_norm": 19.125,
"learning_rate": 1.7289686274214116e-05,
"loss": 0.262,
"step": 790
},
{
"epoch": 3.140333660451423,
"grad_norm": 12.875,
"learning_rate": 1.7193398003386514e-05,
"loss": 0.2588,
"step": 800
},
{
"epoch": 3.1795878312070656,
"grad_norm": 4.71875,
"learning_rate": 1.709570736536521e-05,
"loss": 0.264,
"step": 810
},
{
"epoch": 3.2188420019627086,
"grad_norm": 5.375,
"learning_rate": 1.6996633405133656e-05,
"loss": 0.2622,
"step": 820
},
{
"epoch": 3.2580961727183513,
"grad_norm": 8.9375,
"learning_rate": 1.68961954373567e-05,
"loss": 0.2576,
"step": 830
},
{
"epoch": 3.297350343473994,
"grad_norm": 6.4375,
"learning_rate": 1.6794413042615168e-05,
"loss": 0.2602,
"step": 840
},
{
"epoch": 3.336604514229637,
"grad_norm": 6.34375,
"learning_rate": 1.6691306063588583e-05,
"loss": 0.2495,
"step": 850
},
{
"epoch": 3.3758586849852796,
"grad_norm": 7.4375,
"learning_rate": 1.6586894601186804e-05,
"loss": 0.2568,
"step": 860
},
{
"epoch": 3.4151128557409223,
"grad_norm": 12.25,
"learning_rate": 1.6481199010631312e-05,
"loss": 0.2429,
"step": 870
},
{
"epoch": 3.4543670264965654,
"grad_norm": 12.875,
"learning_rate": 1.63742398974869e-05,
"loss": 0.2647,
"step": 880
},
{
"epoch": 3.493621197252208,
"grad_norm": 9.625,
"learning_rate": 1.6266038113644605e-05,
"loss": 0.2571,
"step": 890
},
{
"epoch": 3.5328753680078506,
"grad_norm": 5.0625,
"learning_rate": 1.6156614753256583e-05,
"loss": 0.2565,
"step": 900
},
{
"epoch": 3.5721295387634937,
"grad_norm": 11.4375,
"learning_rate": 1.6045991148623752e-05,
"loss": 0.2591,
"step": 910
},
{
"epoch": 3.6113837095191363,
"grad_norm": 7.71875,
"learning_rate": 1.5934188866037017e-05,
"loss": 0.2364,
"step": 920
},
{
"epoch": 3.650637880274779,
"grad_norm": 12.75,
"learning_rate": 1.5821229701572897e-05,
"loss": 0.2508,
"step": 930
},
{
"epoch": 3.689892051030422,
"grad_norm": 12.375,
"learning_rate": 1.570713567684432e-05,
"loss": 0.2251,
"step": 940
},
{
"epoch": 3.7291462217860647,
"grad_norm": 4.5,
"learning_rate": 1.5591929034707468e-05,
"loss": 0.2359,
"step": 950
},
{
"epoch": 3.7684003925417073,
"grad_norm": 8.625,
"learning_rate": 1.5475632234925505e-05,
"loss": 0.2498,
"step": 960
},
{
"epoch": 3.8076545632973504,
"grad_norm": 8.625,
"learning_rate": 1.5358267949789968e-05,
"loss": 0.2337,
"step": 970
},
{
"epoch": 3.846908734052993,
"grad_norm": 8.1875,
"learning_rate": 1.5239859059700794e-05,
"loss": 0.254,
"step": 980
},
{
"epoch": 3.8861629048086357,
"grad_norm": 7.0625,
"learning_rate": 1.5120428648705716e-05,
"loss": 0.249,
"step": 990
},
{
"epoch": 3.9254170755642788,
"grad_norm": 3.640625,
"learning_rate": 1.5000000000000002e-05,
"loss": 0.2464,
"step": 1000
},
{
"epoch": 3.9646712463199214,
"grad_norm": 13.5625,
"learning_rate": 1.4878596591387329e-05,
"loss": 0.2452,
"step": 1010
},
{
"epoch": 4.003925417075564,
"grad_norm": 20.0,
"learning_rate": 1.4756242090702756e-05,
"loss": 0.2338,
"step": 1020
},
{
"epoch": 4.043179587831207,
"grad_norm": 5.65625,
"learning_rate": 1.463296035119862e-05,
"loss": 0.2043,
"step": 1030
},
{
"epoch": 4.08243375858685,
"grad_norm": 7.375,
"learning_rate": 1.4508775406894308e-05,
"loss": 0.2217,
"step": 1040
},
{
"epoch": 4.121687929342492,
"grad_norm": 4.0,
"learning_rate": 1.4383711467890776e-05,
"loss": 0.2288,
"step": 1050
},
{
"epoch": 4.1609421000981355,
"grad_norm": 6.15625,
"learning_rate": 1.4257792915650728e-05,
"loss": 0.2023,
"step": 1060
},
{
"epoch": 4.200196270853779,
"grad_norm": 9.25,
"learning_rate": 1.413104429824542e-05,
"loss": 0.2005,
"step": 1070
},
{
"epoch": 4.239450441609421,
"grad_norm": 12.5625,
"learning_rate": 1.4003490325568953e-05,
"loss": 0.2098,
"step": 1080
},
{
"epoch": 4.278704612365064,
"grad_norm": 14.4375,
"learning_rate": 1.3875155864521031e-05,
"loss": 0.2059,
"step": 1090
},
{
"epoch": 4.317958783120707,
"grad_norm": 7.5,
"learning_rate": 1.3746065934159123e-05,
"loss": 0.1923,
"step": 1100
},
{
"epoch": 4.357212953876349,
"grad_norm": 4.53125,
"learning_rate": 1.3616245700820922e-05,
"loss": 0.2082,
"step": 1110
},
{
"epoch": 4.396467124631992,
"grad_norm": 1.9375,
"learning_rate": 1.3485720473218153e-05,
"loss": 0.213,
"step": 1120
},
{
"epoch": 4.435721295387635,
"grad_norm": 6.59375,
"learning_rate": 1.3354515697502552e-05,
"loss": 0.2213,
"step": 1130
},
{
"epoch": 4.4749754661432775,
"grad_norm": 10.1875,
"learning_rate": 1.3222656952305113e-05,
"loss": 0.1971,
"step": 1140
},
{
"epoch": 4.5142296368989205,
"grad_norm": 3.9375,
"learning_rate": 1.3090169943749475e-05,
"loss": 0.2035,
"step": 1150
},
{
"epoch": 4.553483807654564,
"grad_norm": 3.171875,
"learning_rate": 1.2957080500440469e-05,
"loss": 0.1887,
"step": 1160
},
{
"epoch": 4.592737978410206,
"grad_norm": 5.15625,
"learning_rate": 1.2823414568428767e-05,
"loss": 0.1914,
"step": 1170
},
{
"epoch": 4.631992149165849,
"grad_norm": 3.0,
"learning_rate": 1.2689198206152657e-05,
"loss": 0.2014,
"step": 1180
},
{
"epoch": 4.671246319921492,
"grad_norm": 3.3125,
"learning_rate": 1.2554457579357906e-05,
"loss": 0.1934,
"step": 1190
},
{
"epoch": 4.710500490677134,
"grad_norm": 4.46875,
"learning_rate": 1.2419218955996677e-05,
"loss": 0.1914,
"step": 1200
},
{
"epoch": 4.749754661432777,
"grad_norm": 5.65625,
"learning_rate": 1.2283508701106559e-05,
"loss": 0.2069,
"step": 1210
},
{
"epoch": 4.78900883218842,
"grad_norm": 2.078125,
"learning_rate": 1.2147353271670634e-05,
"loss": 0.2163,
"step": 1220
},
{
"epoch": 4.8282630029440625,
"grad_norm": 5.25,
"learning_rate": 1.2010779211459649e-05,
"loss": 0.2,
"step": 1230
},
{
"epoch": 4.867517173699706,
"grad_norm": 10.375,
"learning_rate": 1.187381314585725e-05,
"loss": 0.1848,
"step": 1240
},
{
"epoch": 4.906771344455349,
"grad_norm": 3.828125,
"learning_rate": 1.1736481776669307e-05,
"loss": 0.1913,
"step": 1250
},
{
"epoch": 4.946025515210991,
"grad_norm": 5.1875,
"learning_rate": 1.159881187691835e-05,
"loss": 0.1963,
"step": 1260
},
{
"epoch": 4.985279685966634,
"grad_norm": 7.96875,
"learning_rate": 1.1460830285624119e-05,
"loss": 0.1911,
"step": 1270
},
{
"epoch": 5.024533856722277,
"grad_norm": 3.703125,
"learning_rate": 1.1322563902571227e-05,
"loss": 0.1775,
"step": 1280
},
{
"epoch": 5.063788027477919,
"grad_norm": 5.84375,
"learning_rate": 1.1184039683065014e-05,
"loss": 0.1645,
"step": 1290
},
{
"epoch": 5.103042198233562,
"grad_norm": 5.40625,
"learning_rate": 1.1045284632676535e-05,
"loss": 0.1624,
"step": 1300
},
{
"epoch": 5.142296368989205,
"grad_norm": 8.3125,
"learning_rate": 1.0906325801977804e-05,
"loss": 0.1761,
"step": 1310
},
{
"epoch": 5.181550539744848,
"grad_norm": 2.09375,
"learning_rate": 1.0767190281268187e-05,
"loss": 0.1761,
"step": 1320
},
{
"epoch": 5.220804710500491,
"grad_norm": 4.71875,
"learning_rate": 1.0627905195293135e-05,
"loss": 0.1744,
"step": 1330
},
{
"epoch": 5.260058881256134,
"grad_norm": 8.125,
"learning_rate": 1.0488497697956134e-05,
"loss": 0.1787,
"step": 1340
},
{
"epoch": 5.299313052011776,
"grad_norm": 5.6875,
"learning_rate": 1.0348994967025012e-05,
"loss": 0.1651,
"step": 1350
},
{
"epoch": 5.338567222767419,
"grad_norm": 5.1875,
"learning_rate": 1.0209424198833571e-05,
"loss": 0.1684,
"step": 1360
},
{
"epoch": 5.377821393523062,
"grad_norm": 5.71875,
"learning_rate": 1.0069812602979617e-05,
"loss": 0.1699,
"step": 1370
},
{
"epoch": 5.417075564278704,
"grad_norm": 4.78125,
"learning_rate": 9.930187397020385e-06,
"loss": 0.1677,
"step": 1380
},
{
"epoch": 5.456329735034347,
"grad_norm": 3.0,
"learning_rate": 9.790575801166432e-06,
"loss": 0.1612,
"step": 1390
},
{
"epoch": 5.4955839057899905,
"grad_norm": 2.21875,
"learning_rate": 9.651005032974994e-06,
"loss": 0.1623,
"step": 1400
},
{
"epoch": 5.534838076545633,
"grad_norm": 1.515625,
"learning_rate": 9.511502302043867e-06,
"loss": 0.1674,
"step": 1410
},
{
"epoch": 5.574092247301276,
"grad_norm": 2.390625,
"learning_rate": 9.372094804706867e-06,
"loss": 0.1629,
"step": 1420
},
{
"epoch": 5.613346418056919,
"grad_norm": 5.59375,
"learning_rate": 9.232809718731815e-06,
"loss": 0.1625,
"step": 1430
},
{
"epoch": 5.652600588812561,
"grad_norm": 1.453125,
"learning_rate": 9.093674198022201e-06,
"loss": 0.1667,
"step": 1440
},
{
"epoch": 5.691854759568204,
"grad_norm": 2.640625,
"learning_rate": 8.954715367323468e-06,
"loss": 0.1702,
"step": 1450
},
{
"epoch": 5.731108930323847,
"grad_norm": 1.3125,
"learning_rate": 8.815960316934991e-06,
"loss": 0.1646,
"step": 1460
},
{
"epoch": 5.770363101079489,
"grad_norm": 3.359375,
"learning_rate": 8.677436097428775e-06,
"loss": 0.1635,
"step": 1470
},
{
"epoch": 5.809617271835132,
"grad_norm": 5.3125,
"learning_rate": 8.539169714375885e-06,
"loss": 0.1533,
"step": 1480
},
{
"epoch": 5.8488714425907755,
"grad_norm": 1.7890625,
"learning_rate": 8.401188123081653e-06,
"loss": 0.1535,
"step": 1490
},
{
"epoch": 5.888125613346418,
"grad_norm": 3.453125,
"learning_rate": 8.263518223330698e-06,
"loss": 0.1586,
"step": 1500
},
{
"epoch": 5.927379784102061,
"grad_norm": 2.28125,
"learning_rate": 8.126186854142752e-06,
"loss": 0.17,
"step": 1510
},
{
"epoch": 5.966633954857704,
"grad_norm": 2.5,
"learning_rate": 7.989220788540356e-06,
"loss": 0.1553,
"step": 1520
},
{
"epoch": 6.005888125613346,
"grad_norm": 1.25,
"learning_rate": 7.852646728329368e-06,
"loss": 0.145,
"step": 1530
},
{
"epoch": 6.045142296368989,
"grad_norm": 2.765625,
"learning_rate": 7.716491298893443e-06,
"loss": 0.1419,
"step": 1540
},
{
"epoch": 6.084396467124632,
"grad_norm": 5.5625,
"learning_rate": 7.580781044003324e-06,
"loss": 0.1442,
"step": 1550
},
{
"epoch": 6.123650637880274,
"grad_norm": 4.71875,
"learning_rate": 7.445542420642097e-06,
"loss": 0.1397,
"step": 1560
},
{
"epoch": 6.1629048086359175,
"grad_norm": 2.359375,
"learning_rate": 7.310801793847344e-06,
"loss": 0.1453,
"step": 1570
},
{
"epoch": 6.202158979391561,
"grad_norm": 3.0625,
"learning_rate": 7.176585431571235e-06,
"loss": 0.1383,
"step": 1580
},
{
"epoch": 6.241413150147203,
"grad_norm": 2.171875,
"learning_rate": 7.042919499559538e-06,
"loss": 0.1474,
"step": 1590
},
{
"epoch": 6.280667320902846,
"grad_norm": 1.0859375,
"learning_rate": 6.909830056250527e-06,
"loss": 0.1485,
"step": 1600
},
{
"epoch": 6.319921491658489,
"grad_norm": 2.515625,
"learning_rate": 6.777343047694891e-06,
"loss": 0.1457,
"step": 1610
},
{
"epoch": 6.359175662414131,
"grad_norm": 5.5,
"learning_rate": 6.645484302497452e-06,
"loss": 0.1458,
"step": 1620
},
{
"epoch": 6.398429833169774,
"grad_norm": 1.453125,
"learning_rate": 6.5142795267818505e-06,
"loss": 0.1425,
"step": 1630
},
{
"epoch": 6.437684003925417,
"grad_norm": 3.8125,
"learning_rate": 6.383754299179079e-06,
"loss": 0.1405,
"step": 1640
},
{
"epoch": 6.4769381746810595,
"grad_norm": 1.1875,
"learning_rate": 6.25393406584088e-06,
"loss": 0.1386,
"step": 1650
},
{
"epoch": 6.516192345436703,
"grad_norm": 1.7109375,
"learning_rate": 6.124844135478971e-06,
"loss": 0.1444,
"step": 1660
},
{
"epoch": 6.555446516192346,
"grad_norm": 1.28125,
"learning_rate": 5.996509674431053e-06,
"loss": 0.1341,
"step": 1670
},
{
"epoch": 6.594700686947988,
"grad_norm": 0.890625,
"learning_rate": 5.868955701754584e-06,
"loss": 0.1479,
"step": 1680
},
{
"epoch": 6.633954857703631,
"grad_norm": 0.9140625,
"learning_rate": 5.742207084349274e-06,
"loss": 0.1399,
"step": 1690
},
{
"epoch": 6.673209028459274,
"grad_norm": 3.34375,
"learning_rate": 5.616288532109225e-06,
"loss": 0.1424,
"step": 1700
},
{
"epoch": 6.712463199214916,
"grad_norm": 1.171875,
"learning_rate": 5.491224593105695e-06,
"loss": 0.1358,
"step": 1710
},
{
"epoch": 6.751717369970559,
"grad_norm": 2.140625,
"learning_rate": 5.367039648801386e-06,
"loss": 0.1385,
"step": 1720
},
{
"epoch": 6.790971540726202,
"grad_norm": 1.03125,
"learning_rate": 5.243757909297247e-06,
"loss": 0.1356,
"step": 1730
},
{
"epoch": 6.8302257114818445,
"grad_norm": 1.96875,
"learning_rate": 5.121403408612672e-06,
"loss": 0.1349,
"step": 1740
},
{
"epoch": 6.869479882237488,
"grad_norm": 1.5,
"learning_rate": 5.000000000000003e-06,
"loss": 0.1376,
"step": 1750
},
{
"epoch": 6.908734052993131,
"grad_norm": 4.25,
"learning_rate": 4.879571351294287e-06,
"loss": 0.143,
"step": 1760
},
{
"epoch": 6.947988223748773,
"grad_norm": 1.2109375,
"learning_rate": 4.76014094029921e-06,
"loss": 0.1417,
"step": 1770
},
{
"epoch": 6.987242394504416,
"grad_norm": 1.265625,
"learning_rate": 4.641732050210032e-06,
"loss": 0.1371,
"step": 1780
},
{
"epoch": 7.026496565260059,
"grad_norm": 0.9375,
"learning_rate": 4.524367765074499e-06,
"loss": 0.1392,
"step": 1790
},
{
"epoch": 7.065750736015701,
"grad_norm": 1.9140625,
"learning_rate": 4.408070965292534e-06,
"loss": 0.1295,
"step": 1800
},
{
"epoch": 7.105004906771344,
"grad_norm": 0.8984375,
"learning_rate": 4.292864323155684e-06,
"loss": 0.126,
"step": 1810
},
{
"epoch": 7.144259077526987,
"grad_norm": 0.80078125,
"learning_rate": 4.178770298427107e-06,
"loss": 0.1215,
"step": 1820
},
{
"epoch": 7.18351324828263,
"grad_norm": 1.1953125,
"learning_rate": 4.065811133962987e-06,
"loss": 0.1375,
"step": 1830
},
{
"epoch": 7.222767419038273,
"grad_norm": 1.734375,
"learning_rate": 3.954008851376252e-06,
"loss": 0.1284,
"step": 1840
},
{
"epoch": 7.262021589793916,
"grad_norm": 1.2421875,
"learning_rate": 3.8433852467434175e-06,
"loss": 0.1303,
"step": 1850
},
{
"epoch": 7.301275760549558,
"grad_norm": 0.8984375,
"learning_rate": 3.7339618863553983e-06,
"loss": 0.1273,
"step": 1860
},
{
"epoch": 7.340529931305201,
"grad_norm": 0.89453125,
"learning_rate": 3.625760102513103e-06,
"loss": 0.1303,
"step": 1870
},
{
"epoch": 7.379784102060844,
"grad_norm": 2.265625,
"learning_rate": 3.5188009893686916e-06,
"loss": 0.1321,
"step": 1880
},
{
"epoch": 7.419038272816486,
"grad_norm": 1.6484375,
"learning_rate": 3.4131053988131947e-06,
"loss": 0.1284,
"step": 1890
},
{
"epoch": 7.458292443572129,
"grad_norm": 0.7109375,
"learning_rate": 3.308693936411421e-06,
"loss": 0.1376,
"step": 1900
},
{
"epoch": 7.4975466143277725,
"grad_norm": 1.7421875,
"learning_rate": 3.2055869573848374e-06,
"loss": 0.1271,
"step": 1910
},
{
"epoch": 7.536800785083415,
"grad_norm": 0.69921875,
"learning_rate": 3.103804562643302e-06,
"loss": 0.1264,
"step": 1920
},
{
"epoch": 7.576054955839058,
"grad_norm": 1.234375,
"learning_rate": 3.003366594866345e-06,
"loss": 0.1293,
"step": 1930
},
{
"epoch": 7.615309126594701,
"grad_norm": 0.98046875,
"learning_rate": 2.9042926346347932e-06,
"loss": 0.1272,
"step": 1940
},
{
"epoch": 7.654563297350343,
"grad_norm": 0.87109375,
"learning_rate": 2.8066019966134907e-06,
"loss": 0.1332,
"step": 1950
},
{
"epoch": 7.693817468105986,
"grad_norm": 0.83984375,
"learning_rate": 2.7103137257858867e-06,
"loss": 0.134,
"step": 1960
},
{
"epoch": 7.733071638861629,
"grad_norm": 1.1015625,
"learning_rate": 2.615446593741161e-06,
"loss": 0.1314,
"step": 1970
},
{
"epoch": 7.772325809617271,
"grad_norm": 1.2265625,
"learning_rate": 2.522019095014683e-06,
"loss": 0.1194,
"step": 1980
},
{
"epoch": 7.8115799803729145,
"grad_norm": 0.78515625,
"learning_rate": 2.4300494434824373e-06,
"loss": 0.1306,
"step": 1990
},
{
"epoch": 7.8508341511285575,
"grad_norm": 1.171875,
"learning_rate": 2.339555568810221e-06,
"loss": 0.1278,
"step": 2000
}
],
"logging_steps": 10,
"max_steps": 2500,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 7.65313243152384e+17,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}