Llama-3-8B-Ins-Sentiment / trainer_state.json
sahil-theloops's picture
Upload folder using huggingface_hub
45f421b verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.8720626631853787,
"eval_steps": 250,
"global_step": 4400,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0032637075718015664,
"grad_norm": 0.1396484375,
"learning_rate": 3.7499999999999997e-06,
"loss": 0.9583,
"step": 5
},
{
"epoch": 0.006527415143603133,
"grad_norm": 0.166015625,
"learning_rate": 7.499999999999999e-06,
"loss": 0.9966,
"step": 10
},
{
"epoch": 0.0097911227154047,
"grad_norm": 0.1611328125,
"learning_rate": 1.1249999999999999e-05,
"loss": 0.8656,
"step": 15
},
{
"epoch": 0.013054830287206266,
"grad_norm": 0.142578125,
"learning_rate": 1.4999999999999999e-05,
"loss": 0.9513,
"step": 20
},
{
"epoch": 0.016318537859007835,
"grad_norm": 0.1474609375,
"learning_rate": 1.875e-05,
"loss": 0.9421,
"step": 25
},
{
"epoch": 0.0195822454308094,
"grad_norm": 0.234375,
"learning_rate": 2.2499999999999998e-05,
"loss": 0.8782,
"step": 30
},
{
"epoch": 0.022845953002610966,
"grad_norm": 0.1982421875,
"learning_rate": 2.6249999999999998e-05,
"loss": 0.9951,
"step": 35
},
{
"epoch": 0.02610966057441253,
"grad_norm": 0.1494140625,
"learning_rate": 2.9999999999999997e-05,
"loss": 0.7883,
"step": 40
},
{
"epoch": 0.0293733681462141,
"grad_norm": 0.1328125,
"learning_rate": 3.375e-05,
"loss": 0.7147,
"step": 45
},
{
"epoch": 0.03263707571801567,
"grad_norm": 0.11474609375,
"learning_rate": 3.75e-05,
"loss": 0.7809,
"step": 50
},
{
"epoch": 0.03590078328981723,
"grad_norm": 0.197265625,
"learning_rate": 4.125e-05,
"loss": 0.6191,
"step": 55
},
{
"epoch": 0.0391644908616188,
"grad_norm": 0.19140625,
"learning_rate": 4.4999999999999996e-05,
"loss": 0.8668,
"step": 60
},
{
"epoch": 0.04242819843342036,
"grad_norm": 0.173828125,
"learning_rate": 4.875e-05,
"loss": 0.7034,
"step": 65
},
{
"epoch": 0.04569190600522193,
"grad_norm": 0.1865234375,
"learning_rate": 5.2499999999999995e-05,
"loss": 0.6424,
"step": 70
},
{
"epoch": 0.0489556135770235,
"grad_norm": 0.29296875,
"learning_rate": 5.625e-05,
"loss": 0.6463,
"step": 75
},
{
"epoch": 0.05221932114882506,
"grad_norm": 0.310546875,
"learning_rate": 5.9999999999999995e-05,
"loss": 0.6884,
"step": 80
},
{
"epoch": 0.05548302872062663,
"grad_norm": 0.1923828125,
"learning_rate": 6.374999999999999e-05,
"loss": 0.7838,
"step": 85
},
{
"epoch": 0.0587467362924282,
"grad_norm": 0.1240234375,
"learning_rate": 6.75e-05,
"loss": 0.48,
"step": 90
},
{
"epoch": 0.06201044386422976,
"grad_norm": 0.111328125,
"learning_rate": 7.125e-05,
"loss": 0.5492,
"step": 95
},
{
"epoch": 0.06527415143603134,
"grad_norm": 0.12353515625,
"learning_rate": 7.5e-05,
"loss": 0.4949,
"step": 100
},
{
"epoch": 0.0685378590078329,
"grad_norm": NaN,
"learning_rate": 7.499977113046922e-05,
"loss": 0.5546,
"step": 105
},
{
"epoch": 0.07180156657963446,
"grad_norm": 0.1455078125,
"learning_rate": 7.499908452467055e-05,
"loss": 0.7387,
"step": 110
},
{
"epoch": 0.07506527415143603,
"grad_norm": 0.13671875,
"learning_rate": 7.499794019098496e-05,
"loss": 0.5367,
"step": 115
},
{
"epoch": 0.0783289817232376,
"grad_norm": 0.21484375,
"learning_rate": 7.499633814338062e-05,
"loss": 0.4813,
"step": 120
},
{
"epoch": 0.08159268929503917,
"grad_norm": 0.23828125,
"learning_rate": 7.499427840141272e-05,
"loss": 0.5815,
"step": 125
},
{
"epoch": 0.08485639686684072,
"grad_norm": 0.171875,
"learning_rate": 7.499176099022322e-05,
"loss": 0.4612,
"step": 130
},
{
"epoch": 0.0881201044386423,
"grad_norm": 0.11474609375,
"learning_rate": 7.498878594054062e-05,
"loss": 0.3535,
"step": 135
},
{
"epoch": 0.09138381201044386,
"grad_norm": 0.1513671875,
"learning_rate": 7.498535328867947e-05,
"loss": 0.3999,
"step": 140
},
{
"epoch": 0.09464751958224543,
"grad_norm": 0.125,
"learning_rate": 7.498146307654004e-05,
"loss": 0.4473,
"step": 145
},
{
"epoch": 0.097911227154047,
"grad_norm": 0.17578125,
"learning_rate": 7.497711535160767e-05,
"loss": 0.5172,
"step": 150
},
{
"epoch": 0.10117493472584857,
"grad_norm": 0.1025390625,
"learning_rate": 7.497231016695234e-05,
"loss": 0.5024,
"step": 155
},
{
"epoch": 0.10443864229765012,
"grad_norm": 0.09326171875,
"learning_rate": 7.496704758122795e-05,
"loss": 0.5156,
"step": 160
},
{
"epoch": 0.1077023498694517,
"grad_norm": 0.087890625,
"learning_rate": 7.496132765867157e-05,
"loss": 0.5329,
"step": 165
},
{
"epoch": 0.11096605744125326,
"grad_norm": 0.09130859375,
"learning_rate": 7.495515046910274e-05,
"loss": 0.4888,
"step": 170
},
{
"epoch": 0.11422976501305483,
"grad_norm": 0.099609375,
"learning_rate": 7.494851608792255e-05,
"loss": 0.6306,
"step": 175
},
{
"epoch": 0.1174934725848564,
"grad_norm": 0.11669921875,
"learning_rate": 7.494142459611273e-05,
"loss": 0.4552,
"step": 180
},
{
"epoch": 0.12075718015665797,
"grad_norm": 0.1259765625,
"learning_rate": 7.49338760802347e-05,
"loss": 0.5988,
"step": 185
},
{
"epoch": 0.12402088772845953,
"grad_norm": 0.134765625,
"learning_rate": 7.492587063242849e-05,
"loss": 0.4453,
"step": 190
},
{
"epoch": 0.1272845953002611,
"grad_norm": 0.15234375,
"learning_rate": 7.491740835041156e-05,
"loss": 0.499,
"step": 195
},
{
"epoch": 0.13054830287206268,
"grad_norm": 0.09814453125,
"learning_rate": 7.490848933747772e-05,
"loss": 0.4354,
"step": 200
},
{
"epoch": 0.13381201044386423,
"grad_norm": 0.07177734375,
"learning_rate": 7.489911370249579e-05,
"loss": 0.5372,
"step": 205
},
{
"epoch": 0.1370757180156658,
"grad_norm": 0.0693359375,
"learning_rate": 7.488928155990828e-05,
"loss": 0.6053,
"step": 210
},
{
"epoch": 0.14033942558746737,
"grad_norm": 0.0810546875,
"learning_rate": 7.487899302973e-05,
"loss": 0.4278,
"step": 215
},
{
"epoch": 0.14360313315926893,
"grad_norm": 0.0712890625,
"learning_rate": 7.486824823754662e-05,
"loss": 0.5667,
"step": 220
},
{
"epoch": 0.1468668407310705,
"grad_norm": 0.1591796875,
"learning_rate": 7.485704731451311e-05,
"loss": 0.8726,
"step": 225
},
{
"epoch": 0.15013054830287206,
"grad_norm": 0.0859375,
"learning_rate": 7.484539039735212e-05,
"loss": 0.429,
"step": 230
},
{
"epoch": 0.15339425587467362,
"grad_norm": 0.10546875,
"learning_rate": 7.483327762835236e-05,
"loss": 0.401,
"step": 235
},
{
"epoch": 0.1566579634464752,
"grad_norm": 0.078125,
"learning_rate": 7.482070915536683e-05,
"loss": 0.4806,
"step": 240
},
{
"epoch": 0.15992167101827676,
"grad_norm": 0.0771484375,
"learning_rate": 7.480768513181102e-05,
"loss": 0.62,
"step": 245
},
{
"epoch": 0.16318537859007834,
"grad_norm": 0.0625,
"learning_rate": 7.479420571666103e-05,
"loss": 0.571,
"step": 250
},
{
"epoch": 0.16318537859007834,
"eval_loss": 0.47697925567626953,
"eval_runtime": 97.2401,
"eval_samples_per_second": 1.707,
"eval_steps_per_second": 0.216,
"step": 250
},
{
"epoch": 0.1664490861618799,
"grad_norm": 0.0908203125,
"learning_rate": 7.47802710744517e-05,
"loss": 0.3673,
"step": 255
},
{
"epoch": 0.16971279373368145,
"grad_norm": 0.162109375,
"learning_rate": 7.476588137527446e-05,
"loss": 0.4231,
"step": 260
},
{
"epoch": 0.17297650130548303,
"grad_norm": NaN,
"learning_rate": 7.475103679477539e-05,
"loss": 0.5686,
"step": 265
},
{
"epoch": 0.1762402088772846,
"grad_norm": 0.1591796875,
"learning_rate": 7.473573751415299e-05,
"loss": 0.3383,
"step": 270
},
{
"epoch": 0.17950391644908617,
"grad_norm": 0.08056640625,
"learning_rate": 7.471998372015603e-05,
"loss": 0.4023,
"step": 275
},
{
"epoch": 0.18276762402088773,
"grad_norm": 0.07568359375,
"learning_rate": 7.470377560508124e-05,
"loss": 0.4471,
"step": 280
},
{
"epoch": 0.1860313315926893,
"grad_norm": 0.09521484375,
"learning_rate": 7.468711336677091e-05,
"loss": 0.4911,
"step": 285
},
{
"epoch": 0.18929503916449086,
"grad_norm": 0.12060546875,
"learning_rate": 7.466999720861061e-05,
"loss": 0.4399,
"step": 290
},
{
"epoch": 0.19255874673629242,
"grad_norm": 0.05126953125,
"learning_rate": 7.465242733952656e-05,
"loss": 0.3883,
"step": 295
},
{
"epoch": 0.195822454308094,
"grad_norm": 0.10595703125,
"learning_rate": 7.463440397398319e-05,
"loss": 0.5332,
"step": 300
},
{
"epoch": 0.19908616187989556,
"grad_norm": 0.07421875,
"learning_rate": 7.461592733198044e-05,
"loss": 0.3769,
"step": 305
},
{
"epoch": 0.20234986945169714,
"grad_norm": 0.11083984375,
"learning_rate": 7.459699763905112e-05,
"loss": 0.6075,
"step": 310
},
{
"epoch": 0.2056135770234987,
"grad_norm": 0.06640625,
"learning_rate": 7.45776151262582e-05,
"loss": 0.3442,
"step": 315
},
{
"epoch": 0.20887728459530025,
"grad_norm": 0.0634765625,
"learning_rate": 7.455778003019187e-05,
"loss": 0.3987,
"step": 320
},
{
"epoch": 0.21214099216710183,
"grad_norm": 0.0498046875,
"learning_rate": 7.453749259296675e-05,
"loss": 0.377,
"step": 325
},
{
"epoch": 0.2154046997389034,
"grad_norm": 0.0732421875,
"learning_rate": 7.451675306221892e-05,
"loss": 0.4471,
"step": 330
},
{
"epoch": 0.21866840731070497,
"grad_norm": 0.11181640625,
"learning_rate": 7.449556169110285e-05,
"loss": 0.5509,
"step": 335
},
{
"epoch": 0.22193211488250653,
"grad_norm": 0.0947265625,
"learning_rate": 7.447391873828839e-05,
"loss": 0.5118,
"step": 340
},
{
"epoch": 0.22519582245430808,
"grad_norm": 0.0546875,
"learning_rate": 7.44518244679575e-05,
"loss": 0.4309,
"step": 345
},
{
"epoch": 0.22845953002610966,
"grad_norm": 0.08447265625,
"learning_rate": 7.442927914980116e-05,
"loss": 0.3991,
"step": 350
},
{
"epoch": 0.23172323759791122,
"grad_norm": 0.087890625,
"learning_rate": 7.440628305901597e-05,
"loss": 0.5483,
"step": 355
},
{
"epoch": 0.2349869451697128,
"grad_norm": 0.1279296875,
"learning_rate": 7.438283647630083e-05,
"loss": 0.4699,
"step": 360
},
{
"epoch": 0.23825065274151436,
"grad_norm": 0.046630859375,
"learning_rate": 7.435893968785353e-05,
"loss": 0.4702,
"step": 365
},
{
"epoch": 0.24151436031331594,
"grad_norm": 0.095703125,
"learning_rate": 7.433459298536721e-05,
"loss": 0.4805,
"step": 370
},
{
"epoch": 0.2447780678851175,
"grad_norm": 0.07177734375,
"learning_rate": 7.430979666602687e-05,
"loss": 0.4445,
"step": 375
},
{
"epoch": 0.24804177545691905,
"grad_norm": 0.06787109375,
"learning_rate": 7.428455103250569e-05,
"loss": 0.5559,
"step": 380
},
{
"epoch": 0.25130548302872063,
"grad_norm": 0.07666015625,
"learning_rate": 7.425885639296131e-05,
"loss": 0.5209,
"step": 385
},
{
"epoch": 0.2545691906005222,
"grad_norm": 0.03955078125,
"learning_rate": 7.423271306103217e-05,
"loss": 0.3874,
"step": 390
},
{
"epoch": 0.25783289817232374,
"grad_norm": 0.08154296875,
"learning_rate": 7.420612135583353e-05,
"loss": 0.4156,
"step": 395
},
{
"epoch": 0.26109660574412535,
"grad_norm": 0.0625,
"learning_rate": 7.417908160195377e-05,
"loss": 0.36,
"step": 400
},
{
"epoch": 0.2643603133159269,
"grad_norm": 0.10302734375,
"learning_rate": 7.415159412945024e-05,
"loss": 0.4679,
"step": 405
},
{
"epoch": 0.26762402088772846,
"grad_norm": 0.09130859375,
"learning_rate": 7.412365927384536e-05,
"loss": 0.4482,
"step": 410
},
{
"epoch": 0.27088772845953,
"grad_norm": 0.053955078125,
"learning_rate": 7.409527737612241e-05,
"loss": 0.4426,
"step": 415
},
{
"epoch": 0.2741514360313316,
"grad_norm": 0.047119140625,
"learning_rate": 7.406644878272151e-05,
"loss": 0.454,
"step": 420
},
{
"epoch": 0.2774151436031332,
"grad_norm": 0.064453125,
"learning_rate": 7.403717384553527e-05,
"loss": 0.4397,
"step": 425
},
{
"epoch": 0.28067885117493474,
"grad_norm": 0.07763671875,
"learning_rate": 7.400745292190453e-05,
"loss": 0.5127,
"step": 430
},
{
"epoch": 0.2839425587467363,
"grad_norm": 0.08056640625,
"learning_rate": 7.397728637461407e-05,
"loss": 0.4335,
"step": 435
},
{
"epoch": 0.28720626631853785,
"grad_norm": 0.07763671875,
"learning_rate": 7.394667457188805e-05,
"loss": 0.6522,
"step": 440
},
{
"epoch": 0.2904699738903394,
"grad_norm": 0.08544921875,
"learning_rate": 7.391561788738562e-05,
"loss": 0.5314,
"step": 445
},
{
"epoch": 0.293733681462141,
"grad_norm": 0.130859375,
"learning_rate": 7.388411670019631e-05,
"loss": 0.4786,
"step": 450
},
{
"epoch": 0.29699738903394257,
"grad_norm": 0.07177734375,
"learning_rate": 7.385217139483543e-05,
"loss": 0.4069,
"step": 455
},
{
"epoch": 0.3002610966057441,
"grad_norm": 0.09765625,
"learning_rate": 7.381978236123936e-05,
"loss": 0.4885,
"step": 460
},
{
"epoch": 0.3035248041775457,
"grad_norm": 0.08935546875,
"learning_rate": 7.378694999476078e-05,
"loss": 0.4406,
"step": 465
},
{
"epoch": 0.30678851174934724,
"grad_norm": 0.11962890625,
"learning_rate": 7.375367469616387e-05,
"loss": 0.8256,
"step": 470
},
{
"epoch": 0.31005221932114885,
"grad_norm": 0.0791015625,
"learning_rate": 7.37199568716194e-05,
"loss": 0.4636,
"step": 475
},
{
"epoch": 0.3133159268929504,
"grad_norm": 0.10009765625,
"learning_rate": 7.36857969326998e-05,
"loss": 0.6288,
"step": 480
},
{
"epoch": 0.31657963446475196,
"grad_norm": 0.05810546875,
"learning_rate": 7.365119529637406e-05,
"loss": 0.3789,
"step": 485
},
{
"epoch": 0.3198433420365535,
"grad_norm": 0.11767578125,
"learning_rate": 7.361615238500275e-05,
"loss": 0.6206,
"step": 490
},
{
"epoch": 0.32310704960835507,
"grad_norm": 0.0458984375,
"learning_rate": 7.358066862633276e-05,
"loss": 0.3703,
"step": 495
},
{
"epoch": 0.3263707571801567,
"grad_norm": 0.057373046875,
"learning_rate": 7.35447444534922e-05,
"loss": 0.401,
"step": 500
},
{
"epoch": 0.3263707571801567,
"eval_loss": 0.46315205097198486,
"eval_runtime": 97.1749,
"eval_samples_per_second": 1.708,
"eval_steps_per_second": 0.216,
"step": 500
},
{
"epoch": 0.32963446475195823,
"grad_norm": 0.04833984375,
"learning_rate": 7.350838030498496e-05,
"loss": 0.4603,
"step": 505
},
{
"epoch": 0.3328981723237598,
"grad_norm": 0.056396484375,
"learning_rate": 7.347157662468547e-05,
"loss": 0.4309,
"step": 510
},
{
"epoch": 0.33616187989556134,
"grad_norm": 0.057373046875,
"learning_rate": 7.343433386183327e-05,
"loss": 0.3169,
"step": 515
},
{
"epoch": 0.3394255874673629,
"grad_norm": 0.09521484375,
"learning_rate": 7.339665247102748e-05,
"loss": 0.5101,
"step": 520
},
{
"epoch": 0.3426892950391645,
"grad_norm": 0.053955078125,
"learning_rate": 7.33585329122213e-05,
"loss": 0.459,
"step": 525
},
{
"epoch": 0.34595300261096606,
"grad_norm": 0.056640625,
"learning_rate": 7.331997565071634e-05,
"loss": 0.3522,
"step": 530
},
{
"epoch": 0.3492167101827676,
"grad_norm": 0.076171875,
"learning_rate": 7.328098115715699e-05,
"loss": 0.4109,
"step": 535
},
{
"epoch": 0.3524804177545692,
"grad_norm": 0.053466796875,
"learning_rate": 7.324154990752467e-05,
"loss": 0.4155,
"step": 540
},
{
"epoch": 0.35574412532637073,
"grad_norm": 0.091796875,
"learning_rate": 7.3201682383132e-05,
"loss": 0.4897,
"step": 545
},
{
"epoch": 0.35900783289817234,
"grad_norm": 0.080078125,
"learning_rate": 7.316137907061691e-05,
"loss": 0.4048,
"step": 550
},
{
"epoch": 0.3622715404699739,
"grad_norm": 0.08642578125,
"learning_rate": 7.312064046193679e-05,
"loss": 0.5676,
"step": 555
},
{
"epoch": 0.36553524804177545,
"grad_norm": 0.060546875,
"learning_rate": 7.307946705436233e-05,
"loss": 0.3708,
"step": 560
},
{
"epoch": 0.368798955613577,
"grad_norm": 0.1162109375,
"learning_rate": 7.30378593504716e-05,
"loss": 0.4704,
"step": 565
},
{
"epoch": 0.3720626631853786,
"grad_norm": 0.052490234375,
"learning_rate": 7.299581785814385e-05,
"loss": 0.5599,
"step": 570
},
{
"epoch": 0.37532637075718017,
"grad_norm": 0.048828125,
"learning_rate": 7.295334309055327e-05,
"loss": 0.3238,
"step": 575
},
{
"epoch": 0.3785900783289817,
"grad_norm": 0.1240234375,
"learning_rate": 7.291043556616283e-05,
"loss": 0.7988,
"step": 580
},
{
"epoch": 0.3818537859007833,
"grad_norm": 0.09814453125,
"learning_rate": 7.286709580871783e-05,
"loss": 0.4842,
"step": 585
},
{
"epoch": 0.38511749347258484,
"grad_norm": 0.07177734375,
"learning_rate": 7.282332434723962e-05,
"loss": 0.5732,
"step": 590
},
{
"epoch": 0.38838120104438645,
"grad_norm": 0.0654296875,
"learning_rate": 7.277912171601907e-05,
"loss": 0.4025,
"step": 595
},
{
"epoch": 0.391644908616188,
"grad_norm": 0.05029296875,
"learning_rate": 7.273448845461008e-05,
"loss": 0.3352,
"step": 600
},
{
"epoch": 0.39490861618798956,
"grad_norm": 0.045166015625,
"learning_rate": 7.268942510782298e-05,
"loss": 0.3121,
"step": 605
},
{
"epoch": 0.3981723237597911,
"grad_norm": 0.056396484375,
"learning_rate": 7.264393222571783e-05,
"loss": 0.3868,
"step": 610
},
{
"epoch": 0.40143603133159267,
"grad_norm": 0.0556640625,
"learning_rate": 7.259801036359786e-05,
"loss": 0.4367,
"step": 615
},
{
"epoch": 0.4046997389033943,
"grad_norm": 0.0947265625,
"learning_rate": 7.255166008200252e-05,
"loss": 0.472,
"step": 620
},
{
"epoch": 0.40796344647519583,
"grad_norm": 0.08544921875,
"learning_rate": 7.250488194670071e-05,
"loss": 0.6129,
"step": 625
},
{
"epoch": 0.4112271540469974,
"grad_norm": 0.06689453125,
"learning_rate": 7.245767652868393e-05,
"loss": 0.4201,
"step": 630
},
{
"epoch": 0.41449086161879894,
"grad_norm": 0.2236328125,
"learning_rate": 7.241004440415919e-05,
"loss": 0.43,
"step": 635
},
{
"epoch": 0.4177545691906005,
"grad_norm": 0.154296875,
"learning_rate": 7.236198615454207e-05,
"loss": 0.9362,
"step": 640
},
{
"epoch": 0.4210182767624021,
"grad_norm": 0.08642578125,
"learning_rate": 7.231350236644957e-05,
"loss": 0.5425,
"step": 645
},
{
"epoch": 0.42428198433420367,
"grad_norm": 0.0859375,
"learning_rate": 7.2264593631693e-05,
"loss": 0.4919,
"step": 650
},
{
"epoch": 0.4275456919060052,
"grad_norm": 0.11328125,
"learning_rate": 7.221526054727071e-05,
"loss": 0.475,
"step": 655
},
{
"epoch": 0.4308093994778068,
"grad_norm": 0.059326171875,
"learning_rate": 7.216550371536083e-05,
"loss": 0.4382,
"step": 660
},
{
"epoch": 0.43407310704960833,
"grad_norm": 0.07861328125,
"learning_rate": 7.21153237433139e-05,
"loss": 0.4489,
"step": 665
},
{
"epoch": 0.43733681462140994,
"grad_norm": 0.05859375,
"learning_rate": 7.20647212436455e-05,
"loss": 0.4689,
"step": 670
},
{
"epoch": 0.4406005221932115,
"grad_norm": 0.0546875,
"learning_rate": 7.201369683402869e-05,
"loss": 0.3707,
"step": 675
},
{
"epoch": 0.44386422976501305,
"grad_norm": 0.158203125,
"learning_rate": 7.196225113728656e-05,
"loss": 0.4597,
"step": 680
},
{
"epoch": 0.4471279373368146,
"grad_norm": 0.06787109375,
"learning_rate": 7.191038478138458e-05,
"loss": 0.5113,
"step": 685
},
{
"epoch": 0.45039164490861616,
"grad_norm": 0.07861328125,
"learning_rate": 7.18580983994229e-05,
"loss": 0.3574,
"step": 690
},
{
"epoch": 0.4536553524804178,
"grad_norm": 0.1298828125,
"learning_rate": 7.180539262962875e-05,
"loss": 0.853,
"step": 695
},
{
"epoch": 0.45691906005221933,
"grad_norm": 0.038818359375,
"learning_rate": 7.175226811534853e-05,
"loss": 0.6318,
"step": 700
},
{
"epoch": 0.4601827676240209,
"grad_norm": 0.130859375,
"learning_rate": 7.169872550503992e-05,
"loss": 0.5808,
"step": 705
},
{
"epoch": 0.46344647519582244,
"grad_norm": 0.08984375,
"learning_rate": 7.164476545226415e-05,
"loss": 0.5037,
"step": 710
},
{
"epoch": 0.466710182767624,
"grad_norm": 0.109375,
"learning_rate": 7.159038861567783e-05,
"loss": 0.5246,
"step": 715
},
{
"epoch": 0.4699738903394256,
"grad_norm": 0.109375,
"learning_rate": 7.153559565902503e-05,
"loss": 0.5053,
"step": 720
},
{
"epoch": 0.47323759791122716,
"grad_norm": 0.07275390625,
"learning_rate": 7.148038725112913e-05,
"loss": 0.472,
"step": 725
},
{
"epoch": 0.4765013054830287,
"grad_norm": 0.1416015625,
"learning_rate": 7.142476406588466e-05,
"loss": 0.5306,
"step": 730
},
{
"epoch": 0.47976501305483027,
"grad_norm": 0.07421875,
"learning_rate": 7.136872678224905e-05,
"loss": 0.5424,
"step": 735
},
{
"epoch": 0.4830287206266319,
"grad_norm": 0.08154296875,
"learning_rate": 7.131227608423442e-05,
"loss": 0.3975,
"step": 740
},
{
"epoch": 0.48629242819843344,
"grad_norm": 0.059814453125,
"learning_rate": 7.125541266089917e-05,
"loss": 0.6061,
"step": 745
},
{
"epoch": 0.489556135770235,
"grad_norm": 0.05224609375,
"learning_rate": 7.119813720633953e-05,
"loss": 0.465,
"step": 750
},
{
"epoch": 0.489556135770235,
"eval_loss": 0.4532851278781891,
"eval_runtime": 97.0469,
"eval_samples_per_second": 1.711,
"eval_steps_per_second": 0.216,
"step": 750
},
{
"epoch": 0.49281984334203655,
"grad_norm": NaN,
"learning_rate": 7.114045041968121e-05,
"loss": 0.5156,
"step": 755
},
{
"epoch": 0.4960835509138381,
"grad_norm": 0.1796875,
"learning_rate": 7.108235300507073e-05,
"loss": 0.4453,
"step": 760
},
{
"epoch": 0.4993472584856397,
"grad_norm": 0.0693359375,
"learning_rate": 7.102384567166694e-05,
"loss": 0.6377,
"step": 765
},
{
"epoch": 0.5026109660574413,
"grad_norm": 0.09814453125,
"learning_rate": 7.096492913363228e-05,
"loss": 0.463,
"step": 770
},
{
"epoch": 0.5058746736292428,
"grad_norm": 0.0888671875,
"learning_rate": 7.090560411012412e-05,
"loss": 0.4592,
"step": 775
},
{
"epoch": 0.5091383812010444,
"grad_norm": 0.0732421875,
"learning_rate": 7.084587132528592e-05,
"loss": 0.422,
"step": 780
},
{
"epoch": 0.512402088772846,
"grad_norm": 0.1064453125,
"learning_rate": 7.078573150823847e-05,
"loss": 0.5943,
"step": 785
},
{
"epoch": 0.5156657963446475,
"grad_norm": 0.07470703125,
"learning_rate": 7.072518539307092e-05,
"loss": 0.3763,
"step": 790
},
{
"epoch": 0.5189295039164491,
"grad_norm": 0.1416015625,
"learning_rate": 7.066423371883183e-05,
"loss": 0.5364,
"step": 795
},
{
"epoch": 0.5221932114882507,
"grad_norm": 0.0595703125,
"learning_rate": 7.060287722952024e-05,
"loss": 0.4057,
"step": 800
},
{
"epoch": 0.5254569190600522,
"grad_norm": 0.10205078125,
"learning_rate": 7.054111667407642e-05,
"loss": 0.3785,
"step": 805
},
{
"epoch": 0.5287206266318538,
"grad_norm": 0.07177734375,
"learning_rate": 7.04789528063729e-05,
"loss": 0.5555,
"step": 810
},
{
"epoch": 0.5319843342036553,
"grad_norm": 0.095703125,
"learning_rate": 7.041638638520513e-05,
"loss": 0.4257,
"step": 815
},
{
"epoch": 0.5352480417754569,
"grad_norm": 0.130859375,
"learning_rate": 7.035341817428235e-05,
"loss": 0.4849,
"step": 820
},
{
"epoch": 0.5385117493472585,
"grad_norm": 0.06103515625,
"learning_rate": 7.029004894221812e-05,
"loss": 0.3831,
"step": 825
},
{
"epoch": 0.54177545691906,
"grad_norm": 0.08349609375,
"learning_rate": 7.022627946252105e-05,
"loss": 0.5633,
"step": 830
},
{
"epoch": 0.5450391644908616,
"grad_norm": 0.0927734375,
"learning_rate": 7.016211051358534e-05,
"loss": 0.4286,
"step": 835
},
{
"epoch": 0.5483028720626631,
"grad_norm": 0.053466796875,
"learning_rate": 7.009754287868123e-05,
"loss": 0.4714,
"step": 840
},
{
"epoch": 0.5515665796344648,
"grad_norm": 0.06201171875,
"learning_rate": 7.003257734594548e-05,
"loss": 0.5155,
"step": 845
},
{
"epoch": 0.5548302872062664,
"grad_norm": 0.08544921875,
"learning_rate": 6.996721470837176e-05,
"loss": 0.3779,
"step": 850
},
{
"epoch": 0.5580939947780679,
"grad_norm": 0.0888671875,
"learning_rate": 6.990145576380089e-05,
"loss": 0.5331,
"step": 855
},
{
"epoch": 0.5613577023498695,
"grad_norm": 0.09130859375,
"learning_rate": 6.983530131491127e-05,
"loss": 0.536,
"step": 860
},
{
"epoch": 0.564621409921671,
"grad_norm": 0.058837890625,
"learning_rate": 6.976875216920886e-05,
"loss": 0.4476,
"step": 865
},
{
"epoch": 0.5678851174934726,
"grad_norm": 0.0556640625,
"learning_rate": 6.970180913901751e-05,
"loss": 0.495,
"step": 870
},
{
"epoch": 0.5711488250652742,
"grad_norm": 0.0556640625,
"learning_rate": 6.963447304146893e-05,
"loss": 0.6087,
"step": 875
},
{
"epoch": 0.5744125326370757,
"grad_norm": 0.0439453125,
"learning_rate": 6.956674469849279e-05,
"loss": 0.3776,
"step": 880
},
{
"epoch": 0.5776762402088773,
"grad_norm": 0.07373046875,
"learning_rate": 6.949862493680663e-05,
"loss": 0.3869,
"step": 885
},
{
"epoch": 0.5809399477806788,
"grad_norm": 0.09912109375,
"learning_rate": 6.943011458790582e-05,
"loss": 0.5347,
"step": 890
},
{
"epoch": 0.5842036553524804,
"grad_norm": 0.07373046875,
"learning_rate": 6.936121448805336e-05,
"loss": 0.4809,
"step": 895
},
{
"epoch": 0.587467362924282,
"grad_norm": 0.1630859375,
"learning_rate": 6.929192547826971e-05,
"loss": 0.4091,
"step": 900
},
{
"epoch": 0.5907310704960835,
"grad_norm": 0.09375,
"learning_rate": 6.922224840432248e-05,
"loss": 0.5521,
"step": 905
},
{
"epoch": 0.5939947780678851,
"grad_norm": 0.09326171875,
"learning_rate": 6.915218411671621e-05,
"loss": 0.4078,
"step": 910
},
{
"epoch": 0.5972584856396866,
"grad_norm": 0.1767578125,
"learning_rate": 6.908173347068183e-05,
"loss": 0.5531,
"step": 915
},
{
"epoch": 0.6005221932114883,
"grad_norm": 0.052001953125,
"learning_rate": 6.901089732616634e-05,
"loss": 0.46,
"step": 920
},
{
"epoch": 0.6037859007832899,
"grad_norm": 0.0966796875,
"learning_rate": 6.893967654782231e-05,
"loss": 0.3918,
"step": 925
},
{
"epoch": 0.6070496083550914,
"grad_norm": 0.05615234375,
"learning_rate": 6.886807200499723e-05,
"loss": 0.4585,
"step": 930
},
{
"epoch": 0.610313315926893,
"grad_norm": 0.099609375,
"learning_rate": 6.879608457172302e-05,
"loss": 0.3973,
"step": 935
},
{
"epoch": 0.6135770234986945,
"grad_norm": 0.0791015625,
"learning_rate": 6.872371512670529e-05,
"loss": 0.3886,
"step": 940
},
{
"epoch": 0.6168407310704961,
"grad_norm": 0.07080078125,
"learning_rate": 6.865096455331263e-05,
"loss": 0.577,
"step": 945
},
{
"epoch": 0.6201044386422977,
"grad_norm": 0.1015625,
"learning_rate": 6.857783373956578e-05,
"loss": 0.4755,
"step": 950
},
{
"epoch": 0.6233681462140992,
"grad_norm": 0.13671875,
"learning_rate": 6.850432357812692e-05,
"loss": 0.5983,
"step": 955
},
{
"epoch": 0.6266318537859008,
"grad_norm": 0.03466796875,
"learning_rate": 6.843043496628861e-05,
"loss": 0.3643,
"step": 960
},
{
"epoch": 0.6298955613577023,
"grad_norm": 0.09814453125,
"learning_rate": 6.835616880596298e-05,
"loss": 0.3388,
"step": 965
},
{
"epoch": 0.6331592689295039,
"grad_norm": 0.0830078125,
"learning_rate": 6.82815260036706e-05,
"loss": 0.4281,
"step": 970
},
{
"epoch": 0.6364229765013055,
"grad_norm": 0.05810546875,
"learning_rate": 6.820650747052951e-05,
"loss": 0.3822,
"step": 975
},
{
"epoch": 0.639686684073107,
"grad_norm": 0.103515625,
"learning_rate": 6.81311141222441e-05,
"loss": 0.493,
"step": 980
},
{
"epoch": 0.6429503916449086,
"grad_norm": 0.078125,
"learning_rate": 6.805534687909379e-05,
"loss": 0.3711,
"step": 985
},
{
"epoch": 0.6462140992167101,
"grad_norm": 0.060546875,
"learning_rate": 6.7979206665922e-05,
"loss": 0.3482,
"step": 990
},
{
"epoch": 0.6494778067885117,
"grad_norm": 0.05078125,
"learning_rate": 6.790269441212471e-05,
"loss": 0.3724,
"step": 995
},
{
"epoch": 0.6527415143603134,
"grad_norm": 0.080078125,
"learning_rate": 6.782581105163919e-05,
"loss": 0.4655,
"step": 1000
},
{
"epoch": 0.6527415143603134,
"eval_loss": 0.44582512974739075,
"eval_runtime": 97.0933,
"eval_samples_per_second": 1.71,
"eval_steps_per_second": 0.216,
"step": 1000
},
{
"epoch": 0.6560052219321149,
"grad_norm": 0.06591796875,
"learning_rate": 6.774855752293254e-05,
"loss": 0.4789,
"step": 1005
},
{
"epoch": 0.6592689295039165,
"grad_norm": 0.1015625,
"learning_rate": 6.767093476899033e-05,
"loss": 0.4196,
"step": 1010
},
{
"epoch": 0.662532637075718,
"grad_norm": 0.052734375,
"learning_rate": 6.759294373730496e-05,
"loss": 0.3802,
"step": 1015
},
{
"epoch": 0.6657963446475196,
"grad_norm": 0.07861328125,
"learning_rate": 6.751458537986426e-05,
"loss": 0.4858,
"step": 1020
},
{
"epoch": 0.6690600522193212,
"grad_norm": 0.1015625,
"learning_rate": 6.74358606531397e-05,
"loss": 0.4663,
"step": 1025
},
{
"epoch": 0.6723237597911227,
"grad_norm": 0.07080078125,
"learning_rate": 6.735677051807481e-05,
"loss": 0.5681,
"step": 1030
},
{
"epoch": 0.6755874673629243,
"grad_norm": 0.09326171875,
"learning_rate": 6.727731594007343e-05,
"loss": 0.5687,
"step": 1035
},
{
"epoch": 0.6788511749347258,
"grad_norm": 0.06103515625,
"learning_rate": 6.719749788898796e-05,
"loss": 0.3671,
"step": 1040
},
{
"epoch": 0.6821148825065274,
"grad_norm": 0.1552734375,
"learning_rate": 6.711731733910743e-05,
"loss": 0.5654,
"step": 1045
},
{
"epoch": 0.685378590078329,
"grad_norm": 0.07275390625,
"learning_rate": 6.703677526914571e-05,
"loss": 0.4642,
"step": 1050
},
{
"epoch": 0.6886422976501305,
"grad_norm": 0.046142578125,
"learning_rate": 6.69558726622295e-05,
"loss": 0.4356,
"step": 1055
},
{
"epoch": 0.6919060052219321,
"grad_norm": 0.08203125,
"learning_rate": 6.687461050588637e-05,
"loss": 0.4206,
"step": 1060
},
{
"epoch": 0.6951697127937336,
"grad_norm": 0.054931640625,
"learning_rate": 6.679298979203266e-05,
"loss": 0.4315,
"step": 1065
},
{
"epoch": 0.6984334203655352,
"grad_norm": 0.056884765625,
"learning_rate": 6.671101151696142e-05,
"loss": 0.6175,
"step": 1070
},
{
"epoch": 0.7016971279373369,
"grad_norm": 0.078125,
"learning_rate": 6.662867668133022e-05,
"loss": 0.5087,
"step": 1075
},
{
"epoch": 0.7049608355091384,
"grad_norm": 0.06494140625,
"learning_rate": 6.654598629014891e-05,
"loss": 0.5919,
"step": 1080
},
{
"epoch": 0.70822454308094,
"grad_norm": 0.080078125,
"learning_rate": 6.646294135276744e-05,
"loss": 0.4144,
"step": 1085
},
{
"epoch": 0.7114882506527415,
"grad_norm": 0.07763671875,
"learning_rate": 6.637954288286343e-05,
"loss": 0.514,
"step": 1090
},
{
"epoch": 0.7147519582245431,
"grad_norm": 0.109375,
"learning_rate": 6.62957918984299e-05,
"loss": 0.4498,
"step": 1095
},
{
"epoch": 0.7180156657963447,
"grad_norm": 0.061767578125,
"learning_rate": 6.621168942176275e-05,
"loss": 0.3705,
"step": 1100
},
{
"epoch": 0.7212793733681462,
"grad_norm": 0.30859375,
"learning_rate": 6.612723647944835e-05,
"loss": 0.4258,
"step": 1105
},
{
"epoch": 0.7245430809399478,
"grad_norm": 0.049072265625,
"learning_rate": 6.6042434102351e-05,
"loss": 0.4407,
"step": 1110
},
{
"epoch": 0.7278067885117493,
"grad_norm": 0.056396484375,
"learning_rate": 6.595728332560032e-05,
"loss": 0.4246,
"step": 1115
},
{
"epoch": 0.7310704960835509,
"grad_norm": 0.07958984375,
"learning_rate": 6.587178518857858e-05,
"loss": 0.5558,
"step": 1120
},
{
"epoch": 0.7343342036553525,
"grad_norm": 0.06884765625,
"learning_rate": 6.578594073490814e-05,
"loss": 0.4846,
"step": 1125
},
{
"epoch": 0.737597911227154,
"grad_norm": 0.10986328125,
"learning_rate": 6.569975101243855e-05,
"loss": 0.3695,
"step": 1130
},
{
"epoch": 0.7408616187989556,
"grad_norm": 0.09619140625,
"learning_rate": 6.561321707323392e-05,
"loss": 0.499,
"step": 1135
},
{
"epoch": 0.7441253263707572,
"grad_norm": 0.06982421875,
"learning_rate": 6.552633997355995e-05,
"loss": 0.4023,
"step": 1140
},
{
"epoch": 0.7473890339425587,
"grad_norm": 0.10693359375,
"learning_rate": 6.543912077387108e-05,
"loss": 0.4374,
"step": 1145
},
{
"epoch": 0.7506527415143603,
"grad_norm": 0.07421875,
"learning_rate": 6.535156053879758e-05,
"loss": 0.5137,
"step": 1150
},
{
"epoch": 0.7539164490861618,
"grad_norm": 0.11865234375,
"learning_rate": 6.52636603371325e-05,
"loss": 0.4295,
"step": 1155
},
{
"epoch": 0.7571801566579635,
"grad_norm": 0.05908203125,
"learning_rate": 6.517542124181867e-05,
"loss": 0.3519,
"step": 1160
},
{
"epoch": 0.7604438642297651,
"grad_norm": 0.09814453125,
"learning_rate": 6.508684432993559e-05,
"loss": 0.4373,
"step": 1165
},
{
"epoch": 0.7637075718015666,
"grad_norm": 0.0830078125,
"learning_rate": 6.499793068268623e-05,
"loss": 0.4145,
"step": 1170
},
{
"epoch": 0.7669712793733682,
"grad_norm": 0.0693359375,
"learning_rate": 6.490868138538394e-05,
"loss": 0.4133,
"step": 1175
},
{
"epoch": 0.7702349869451697,
"grad_norm": 0.119140625,
"learning_rate": 6.481909752743909e-05,
"loss": 0.4096,
"step": 1180
},
{
"epoch": 0.7734986945169713,
"grad_norm": 0.087890625,
"learning_rate": 6.472918020234584e-05,
"loss": 0.4889,
"step": 1185
},
{
"epoch": 0.7767624020887729,
"grad_norm": 0.078125,
"learning_rate": 6.463893050766879e-05,
"loss": 0.3739,
"step": 1190
},
{
"epoch": 0.7800261096605744,
"grad_norm": 0.06640625,
"learning_rate": 6.454834954502954e-05,
"loss": 0.3955,
"step": 1195
},
{
"epoch": 0.783289817232376,
"grad_norm": 0.05615234375,
"learning_rate": 6.44574384200933e-05,
"loss": 0.475,
"step": 1200
},
{
"epoch": 0.7865535248041775,
"grad_norm": 0.146484375,
"learning_rate": 6.436619824255533e-05,
"loss": 0.4624,
"step": 1205
},
{
"epoch": 0.7898172323759791,
"grad_norm": 0.08984375,
"learning_rate": 6.427463012612748e-05,
"loss": 0.5408,
"step": 1210
},
{
"epoch": 0.7930809399477807,
"grad_norm": 0.091796875,
"learning_rate": 6.418273518852448e-05,
"loss": 0.4784,
"step": 1215
},
{
"epoch": 0.7963446475195822,
"grad_norm": 0.138671875,
"learning_rate": 6.409051455145043e-05,
"loss": 0.4466,
"step": 1220
},
{
"epoch": 0.7996083550913838,
"grad_norm": 0.130859375,
"learning_rate": 6.399796934058499e-05,
"loss": 0.429,
"step": 1225
},
{
"epoch": 0.8028720626631853,
"grad_norm": 0.0791015625,
"learning_rate": 6.39051006855697e-05,
"loss": 0.4353,
"step": 1230
},
{
"epoch": 0.806135770234987,
"grad_norm": 0.09423828125,
"learning_rate": 6.381190971999419e-05,
"loss": 0.5023,
"step": 1235
},
{
"epoch": 0.8093994778067886,
"grad_norm": 0.10693359375,
"learning_rate": 6.371839758138234e-05,
"loss": 0.5856,
"step": 1240
},
{
"epoch": 0.8126631853785901,
"grad_norm": 0.06396484375,
"learning_rate": 6.362456541117837e-05,
"loss": 0.4508,
"step": 1245
},
{
"epoch": 0.8159268929503917,
"grad_norm": 0.05810546875,
"learning_rate": 6.353041435473294e-05,
"loss": 0.406,
"step": 1250
},
{
"epoch": 0.8159268929503917,
"eval_loss": 0.44355788826942444,
"eval_runtime": 97.2181,
"eval_samples_per_second": 1.708,
"eval_steps_per_second": 0.216,
"step": 1250
},
{
"epoch": 0.8191906005221932,
"grad_norm": 0.087890625,
"learning_rate": 6.343594556128913e-05,
"loss": 0.4317,
"step": 1255
},
{
"epoch": 0.8224543080939948,
"grad_norm": 0.0771484375,
"learning_rate": 6.334116018396848e-05,
"loss": 0.391,
"step": 1260
},
{
"epoch": 0.8257180156657964,
"grad_norm": 0.06591796875,
"learning_rate": 6.324605937975685e-05,
"loss": 0.5945,
"step": 1265
},
{
"epoch": 0.8289817232375979,
"grad_norm": 0.10009765625,
"learning_rate": 6.31506443094903e-05,
"loss": 0.4871,
"step": 1270
},
{
"epoch": 0.8322454308093995,
"grad_norm": 0.10546875,
"learning_rate": 6.305491613784096e-05,
"loss": 0.6679,
"step": 1275
},
{
"epoch": 0.835509138381201,
"grad_norm": 0.04736328125,
"learning_rate": 6.295887603330279e-05,
"loss": 0.3991,
"step": 1280
},
{
"epoch": 0.8387728459530026,
"grad_norm": 0.08837890625,
"learning_rate": 6.286252516817732e-05,
"loss": 0.3772,
"step": 1285
},
{
"epoch": 0.8420365535248042,
"grad_norm": 0.04150390625,
"learning_rate": 6.276586471855933e-05,
"loss": 0.3838,
"step": 1290
},
{
"epoch": 0.8453002610966057,
"grad_norm": 0.0712890625,
"learning_rate": 6.266889586432253e-05,
"loss": 0.4486,
"step": 1295
},
{
"epoch": 0.8485639686684073,
"grad_norm": 0.09326171875,
"learning_rate": 6.257161978910512e-05,
"loss": 0.5229,
"step": 1300
},
{
"epoch": 0.8518276762402088,
"grad_norm": 0.061279296875,
"learning_rate": 6.247403768029532e-05,
"loss": 0.3971,
"step": 1305
},
{
"epoch": 0.8550913838120104,
"grad_norm": 0.068359375,
"learning_rate": 6.237615072901696e-05,
"loss": 0.3869,
"step": 1310
},
{
"epoch": 0.858355091383812,
"grad_norm": 0.09521484375,
"learning_rate": 6.227796013011487e-05,
"loss": 0.4435,
"step": 1315
},
{
"epoch": 0.8616187989556136,
"grad_norm": NaN,
"learning_rate": 6.217946708214033e-05,
"loss": 0.5546,
"step": 1320
},
{
"epoch": 0.8648825065274152,
"grad_norm": 0.1240234375,
"learning_rate": 6.20806727873364e-05,
"loss": 0.4012,
"step": 1325
},
{
"epoch": 0.8681462140992167,
"grad_norm": 0.19140625,
"learning_rate": 6.19815784516233e-05,
"loss": 0.4893,
"step": 1330
},
{
"epoch": 0.8714099216710183,
"grad_norm": 0.07666015625,
"learning_rate": 6.188218528458363e-05,
"loss": 0.4179,
"step": 1335
},
{
"epoch": 0.8746736292428199,
"grad_norm": 0.04931640625,
"learning_rate": 6.178249449944768e-05,
"loss": 0.4676,
"step": 1340
},
{
"epoch": 0.8779373368146214,
"grad_norm": 0.049560546875,
"learning_rate": 6.168250731307853e-05,
"loss": 0.3883,
"step": 1345
},
{
"epoch": 0.881201044386423,
"grad_norm": 0.083984375,
"learning_rate": 6.158222494595731e-05,
"loss": 0.5093,
"step": 1350
},
{
"epoch": 0.8844647519582245,
"grad_norm": 0.061767578125,
"learning_rate": 6.148164862216815e-05,
"loss": 0.4054,
"step": 1355
},
{
"epoch": 0.8877284595300261,
"grad_norm": 0.064453125,
"learning_rate": 6.138077956938341e-05,
"loss": 0.4264,
"step": 1360
},
{
"epoch": 0.8909921671018277,
"grad_norm": 0.06591796875,
"learning_rate": 6.127961901884854e-05,
"loss": 0.5763,
"step": 1365
},
{
"epoch": 0.8942558746736292,
"grad_norm": 0.12158203125,
"learning_rate": 6.117816820536717e-05,
"loss": 0.453,
"step": 1370
},
{
"epoch": 0.8975195822454308,
"grad_norm": 0.06103515625,
"learning_rate": 6.107642836728597e-05,
"loss": 0.4166,
"step": 1375
},
{
"epoch": 0.9007832898172323,
"grad_norm": 0.059814453125,
"learning_rate": 6.097440074647955e-05,
"loss": 0.3718,
"step": 1380
},
{
"epoch": 0.9040469973890339,
"grad_norm": 0.10205078125,
"learning_rate": 6.087208658833532e-05,
"loss": 0.4553,
"step": 1385
},
{
"epoch": 0.9073107049608355,
"grad_norm": 0.07421875,
"learning_rate": 6.076948714173824e-05,
"loss": 0.4116,
"step": 1390
},
{
"epoch": 0.910574412532637,
"grad_norm": 0.07177734375,
"learning_rate": 6.066660365905563e-05,
"loss": 0.4693,
"step": 1395
},
{
"epoch": 0.9138381201044387,
"grad_norm": 0.052734375,
"learning_rate": 6.056343739612187e-05,
"loss": 0.4375,
"step": 1400
},
{
"epoch": 0.9171018276762402,
"grad_norm": 0.060302734375,
"learning_rate": 6.0459989612223045e-05,
"loss": 0.4129,
"step": 1405
},
{
"epoch": 0.9203655352480418,
"grad_norm": 0.0947265625,
"learning_rate": 6.035626157008159e-05,
"loss": 0.5467,
"step": 1410
},
{
"epoch": 0.9236292428198434,
"grad_norm": 0.09423828125,
"learning_rate": 6.025225453584089e-05,
"loss": 0.5297,
"step": 1415
},
{
"epoch": 0.9268929503916449,
"grad_norm": 0.080078125,
"learning_rate": 6.0147969779049794e-05,
"loss": 0.3658,
"step": 1420
},
{
"epoch": 0.9301566579634465,
"grad_norm": 0.0634765625,
"learning_rate": 6.0043408572647165e-05,
"loss": 0.3903,
"step": 1425
},
{
"epoch": 0.933420365535248,
"grad_norm": 0.06005859375,
"learning_rate": 5.9938572192946285e-05,
"loss": 0.4358,
"step": 1430
},
{
"epoch": 0.9366840731070496,
"grad_norm": 0.11279296875,
"learning_rate": 5.983346191961932e-05,
"loss": 0.5026,
"step": 1435
},
{
"epoch": 0.9399477806788512,
"grad_norm": 0.064453125,
"learning_rate": 5.9728079035681674e-05,
"loss": 0.5702,
"step": 1440
},
{
"epoch": 0.9432114882506527,
"grad_norm": 0.10498046875,
"learning_rate": 5.962242482747635e-05,
"loss": 0.5427,
"step": 1445
},
{
"epoch": 0.9464751958224543,
"grad_norm": 0.1044921875,
"learning_rate": 5.951650058465822e-05,
"loss": 0.4387,
"step": 1450
},
{
"epoch": 0.9497389033942559,
"grad_norm": 0.07861328125,
"learning_rate": 5.9410307600178336e-05,
"loss": 0.5693,
"step": 1455
},
{
"epoch": 0.9530026109660574,
"grad_norm": 0.1083984375,
"learning_rate": 5.930384717026806e-05,
"loss": 0.5604,
"step": 1460
},
{
"epoch": 0.956266318537859,
"grad_norm": 0.26171875,
"learning_rate": 5.919712059442334e-05,
"loss": 0.4111,
"step": 1465
},
{
"epoch": 0.9595300261096605,
"grad_norm": 0.0888671875,
"learning_rate": 5.909012917538876e-05,
"loss": 0.5463,
"step": 1470
},
{
"epoch": 0.9627937336814621,
"grad_norm": 0.08740234375,
"learning_rate": 5.8982874219141704e-05,
"loss": 0.4476,
"step": 1475
},
{
"epoch": 0.9660574412532638,
"grad_norm": 0.1455078125,
"learning_rate": 5.88753570348764e-05,
"loss": 0.4974,
"step": 1480
},
{
"epoch": 0.9693211488250653,
"grad_norm": 0.06396484375,
"learning_rate": 5.8767578934987916e-05,
"loss": 0.4921,
"step": 1485
},
{
"epoch": 0.9725848563968669,
"grad_norm": 0.064453125,
"learning_rate": 5.865954123505613e-05,
"loss": 0.4618,
"step": 1490
},
{
"epoch": 0.9758485639686684,
"grad_norm": 0.07080078125,
"learning_rate": 5.855124525382973e-05,
"loss": 0.478,
"step": 1495
},
{
"epoch": 0.97911227154047,
"grad_norm": 0.07470703125,
"learning_rate": 5.8442692313210084e-05,
"loss": 0.4921,
"step": 1500
},
{
"epoch": 0.97911227154047,
"eval_loss": 0.4450150430202484,
"eval_runtime": 97.0546,
"eval_samples_per_second": 1.71,
"eval_steps_per_second": 0.216,
"step": 1500
},
{
"epoch": 0.9823759791122716,
"grad_norm": 0.06640625,
"learning_rate": 5.8333883738235076e-05,
"loss": 0.5148,
"step": 1505
},
{
"epoch": 0.9856396866840731,
"grad_norm": 0.0478515625,
"learning_rate": 5.822482085706298e-05,
"loss": 0.6192,
"step": 1510
},
{
"epoch": 0.9889033942558747,
"grad_norm": 0.1103515625,
"learning_rate": 5.8115505000956215e-05,
"loss": 0.4433,
"step": 1515
},
{
"epoch": 0.9921671018276762,
"grad_norm": 0.0966796875,
"learning_rate": 5.8005937504265115e-05,
"loss": 0.3957,
"step": 1520
},
{
"epoch": 0.9954308093994778,
"grad_norm": 0.04150390625,
"learning_rate": 5.789611970441162e-05,
"loss": 0.3821,
"step": 1525
},
{
"epoch": 0.9986945169712794,
"grad_norm": 0.06591796875,
"learning_rate": 5.7786052941872984e-05,
"loss": 0.3636,
"step": 1530
},
{
"epoch": 1.001958224543081,
"grad_norm": 0.0751953125,
"learning_rate": 5.767573856016539e-05,
"loss": 0.458,
"step": 1535
},
{
"epoch": 1.0052219321148825,
"grad_norm": 0.09716796875,
"learning_rate": 5.756517790582752e-05,
"loss": 0.3841,
"step": 1540
},
{
"epoch": 1.0084856396866841,
"grad_norm": 0.09716796875,
"learning_rate": 5.7454372328404215e-05,
"loss": 0.4227,
"step": 1545
},
{
"epoch": 1.0117493472584855,
"grad_norm": 0.1171875,
"learning_rate": 5.734332318042986e-05,
"loss": 0.4105,
"step": 1550
},
{
"epoch": 1.0150130548302871,
"grad_norm": 0.047119140625,
"learning_rate": 5.7232031817412024e-05,
"loss": 0.3316,
"step": 1555
},
{
"epoch": 1.0182767624020888,
"grad_norm": 0.09814453125,
"learning_rate": 5.7120499597814814e-05,
"loss": 0.5457,
"step": 1560
},
{
"epoch": 1.0215404699738904,
"grad_norm": 0.059814453125,
"learning_rate": 5.700872788304232e-05,
"loss": 0.4402,
"step": 1565
},
{
"epoch": 1.024804177545692,
"grad_norm": 0.0810546875,
"learning_rate": 5.6896718037422e-05,
"loss": 0.4161,
"step": 1570
},
{
"epoch": 1.0280678851174936,
"grad_norm": 0.1708984375,
"learning_rate": 5.6784471428188035e-05,
"loss": 0.3828,
"step": 1575
},
{
"epoch": 1.031331592689295,
"grad_norm": 0.06396484375,
"learning_rate": 5.667198942546462e-05,
"loss": 0.3445,
"step": 1580
},
{
"epoch": 1.0345953002610966,
"grad_norm": 0.04296875,
"learning_rate": 5.655927340224927e-05,
"loss": 0.3913,
"step": 1585
},
{
"epoch": 1.0378590078328982,
"grad_norm": 0.2021484375,
"learning_rate": 5.644632473439602e-05,
"loss": 0.3671,
"step": 1590
},
{
"epoch": 1.0411227154046998,
"grad_norm": 0.080078125,
"learning_rate": 5.633314480059867e-05,
"loss": 0.3709,
"step": 1595
},
{
"epoch": 1.0443864229765012,
"grad_norm": 0.07373046875,
"learning_rate": 5.621973498237393e-05,
"loss": 0.3355,
"step": 1600
},
{
"epoch": 1.0476501305483028,
"grad_norm": 0.1162109375,
"learning_rate": 5.610609666404457e-05,
"loss": 0.3738,
"step": 1605
},
{
"epoch": 1.0509138381201044,
"grad_norm": 0.072265625,
"learning_rate": 5.59922312327225e-05,
"loss": 0.3342,
"step": 1610
},
{
"epoch": 1.054177545691906,
"grad_norm": 0.10888671875,
"learning_rate": 5.587814007829189e-05,
"loss": 0.4449,
"step": 1615
},
{
"epoch": 1.0574412532637076,
"grad_norm": 0.06201171875,
"learning_rate": 5.576382459339214e-05,
"loss": 0.5308,
"step": 1620
},
{
"epoch": 1.0607049608355092,
"grad_norm": 0.048828125,
"learning_rate": 5.5649286173400923e-05,
"loss": 0.3753,
"step": 1625
},
{
"epoch": 1.0639686684073106,
"grad_norm": 0.12353515625,
"learning_rate": 5.5534526216417155e-05,
"loss": 0.422,
"step": 1630
},
{
"epoch": 1.0672323759791122,
"grad_norm": 0.053955078125,
"learning_rate": 5.5419546123243896e-05,
"loss": 0.4433,
"step": 1635
},
{
"epoch": 1.0704960835509139,
"grad_norm": 0.0830078125,
"learning_rate": 5.530434729737127e-05,
"loss": 0.3864,
"step": 1640
},
{
"epoch": 1.0737597911227155,
"grad_norm": 0.14453125,
"learning_rate": 5.5188931144959356e-05,
"loss": 0.4461,
"step": 1645
},
{
"epoch": 1.0770234986945169,
"grad_norm": 0.10302734375,
"learning_rate": 5.507329907482098e-05,
"loss": 0.3522,
"step": 1650
},
{
"epoch": 1.0802872062663185,
"grad_norm": 0.07958984375,
"learning_rate": 5.4957452498404546e-05,
"loss": 0.4492,
"step": 1655
},
{
"epoch": 1.08355091383812,
"grad_norm": 0.10107421875,
"learning_rate": 5.4841392829776824e-05,
"loss": 0.4852,
"step": 1660
},
{
"epoch": 1.0868146214099217,
"grad_norm": 0.11669921875,
"learning_rate": 5.472512148560563e-05,
"loss": 0.3427,
"step": 1665
},
{
"epoch": 1.0900783289817233,
"grad_norm": 0.07568359375,
"learning_rate": 5.46086398851426e-05,
"loss": 0.4092,
"step": 1670
},
{
"epoch": 1.093342036553525,
"grad_norm": 0.11474609375,
"learning_rate": 5.449194945020581e-05,
"loss": 0.4882,
"step": 1675
},
{
"epoch": 1.0966057441253263,
"grad_norm": 0.060302734375,
"learning_rate": 5.437505160516248e-05,
"loss": 0.3359,
"step": 1680
},
{
"epoch": 1.099869451697128,
"grad_norm": 0.1201171875,
"learning_rate": 5.4257947776911545e-05,
"loss": 0.3242,
"step": 1685
},
{
"epoch": 1.1031331592689295,
"grad_norm": 0.0927734375,
"learning_rate": 5.414063939486623e-05,
"loss": 0.3366,
"step": 1690
},
{
"epoch": 1.1063968668407311,
"grad_norm": 0.07080078125,
"learning_rate": 5.4023127890936645e-05,
"loss": 0.3718,
"step": 1695
},
{
"epoch": 1.1096605744125327,
"grad_norm": 0.06298828125,
"learning_rate": 5.3905414699512265e-05,
"loss": 0.4508,
"step": 1700
},
{
"epoch": 1.1129242819843341,
"grad_norm": 0.051513671875,
"learning_rate": 5.378750125744444e-05,
"loss": 0.3937,
"step": 1705
},
{
"epoch": 1.1161879895561357,
"grad_norm": 0.08203125,
"learning_rate": 5.366938900402887e-05,
"loss": 0.3259,
"step": 1710
},
{
"epoch": 1.1194516971279374,
"grad_norm": 0.0634765625,
"learning_rate": 5.3551079380988005e-05,
"loss": 0.4968,
"step": 1715
},
{
"epoch": 1.122715404699739,
"grad_norm": 0.11083984375,
"learning_rate": 5.343257383245346e-05,
"loss": 0.4245,
"step": 1720
},
{
"epoch": 1.1259791122715406,
"grad_norm": 0.058349609375,
"learning_rate": 5.3313873804948404e-05,
"loss": 0.4106,
"step": 1725
},
{
"epoch": 1.129242819843342,
"grad_norm": 0.064453125,
"learning_rate": 5.319498074736988e-05,
"loss": 0.4104,
"step": 1730
},
{
"epoch": 1.1325065274151436,
"grad_norm": 0.0673828125,
"learning_rate": 5.3075896110971115e-05,
"loss": 0.3383,
"step": 1735
},
{
"epoch": 1.1357702349869452,
"grad_norm": 0.12890625,
"learning_rate": 5.2956621349343856e-05,
"loss": 0.4316,
"step": 1740
},
{
"epoch": 1.1390339425587468,
"grad_norm": 0.1337890625,
"learning_rate": 5.2837157918400574e-05,
"loss": 0.4306,
"step": 1745
},
{
"epoch": 1.1422976501305482,
"grad_norm": 0.056396484375,
"learning_rate": 5.271750727635669e-05,
"loss": 0.5231,
"step": 1750
},
{
"epoch": 1.1422976501305482,
"eval_loss": 0.4393230080604553,
"eval_runtime": 97.053,
"eval_samples_per_second": 1.71,
"eval_steps_per_second": 0.216,
"step": 1750
},
{
"epoch": 1.1455613577023498,
"grad_norm": 0.05419921875,
"learning_rate": 5.2597670883712805e-05,
"loss": 0.5465,
"step": 1755
},
{
"epoch": 1.1488250652741514,
"grad_norm": 0.1357421875,
"learning_rate": 5.247765020323686e-05,
"loss": 0.4918,
"step": 1760
},
{
"epoch": 1.152088772845953,
"grad_norm": 0.07470703125,
"learning_rate": 5.2357446699946307e-05,
"loss": 0.3916,
"step": 1765
},
{
"epoch": 1.1553524804177546,
"grad_norm": 0.1259765625,
"learning_rate": 5.223706184109015e-05,
"loss": 0.345,
"step": 1770
},
{
"epoch": 1.1586161879895562,
"grad_norm": 0.1103515625,
"learning_rate": 5.211649709613114e-05,
"loss": 0.4214,
"step": 1775
},
{
"epoch": 1.1618798955613576,
"grad_norm": 0.0712890625,
"learning_rate": 5.1995753936727766e-05,
"loss": 0.3919,
"step": 1780
},
{
"epoch": 1.1651436031331592,
"grad_norm": 0.09228515625,
"learning_rate": 5.187483383671628e-05,
"loss": 0.3803,
"step": 1785
},
{
"epoch": 1.1684073107049608,
"grad_norm": 0.11328125,
"learning_rate": 5.175373827209279e-05,
"loss": 0.38,
"step": 1790
},
{
"epoch": 1.1716710182767625,
"grad_norm": 0.1376953125,
"learning_rate": 5.1632468720995166e-05,
"loss": 0.5457,
"step": 1795
},
{
"epoch": 1.174934725848564,
"grad_norm": 0.12890625,
"learning_rate": 5.1511026663685e-05,
"loss": 0.5518,
"step": 1800
},
{
"epoch": 1.1781984334203655,
"grad_norm": 0.0625,
"learning_rate": 5.1389413582529605e-05,
"loss": 0.3359,
"step": 1805
},
{
"epoch": 1.181462140992167,
"grad_norm": 0.09912109375,
"learning_rate": 5.1267630961983835e-05,
"loss": 0.4177,
"step": 1810
},
{
"epoch": 1.1847258485639687,
"grad_norm": 0.17578125,
"learning_rate": 5.114568028857202e-05,
"loss": 0.4588,
"step": 1815
},
{
"epoch": 1.1879895561357703,
"grad_norm": 0.06298828125,
"learning_rate": 5.102356305086983e-05,
"loss": 0.5688,
"step": 1820
},
{
"epoch": 1.191253263707572,
"grad_norm": 0.0654296875,
"learning_rate": 5.090128073948603e-05,
"loss": 0.3719,
"step": 1825
},
{
"epoch": 1.1945169712793733,
"grad_norm": 0.064453125,
"learning_rate": 5.0778834847044394e-05,
"loss": 0.5584,
"step": 1830
},
{
"epoch": 1.197780678851175,
"grad_norm": 0.08056640625,
"learning_rate": 5.065622686816537e-05,
"loss": 0.4056,
"step": 1835
},
{
"epoch": 1.2010443864229765,
"grad_norm": 0.1494140625,
"learning_rate": 5.053345829944795e-05,
"loss": 0.42,
"step": 1840
},
{
"epoch": 1.2043080939947781,
"grad_norm": 0.1455078125,
"learning_rate": 5.041053063945131e-05,
"loss": 0.4956,
"step": 1845
},
{
"epoch": 1.2075718015665797,
"grad_norm": 0.10546875,
"learning_rate": 5.0287445388676564e-05,
"loss": 0.4849,
"step": 1850
},
{
"epoch": 1.2108355091383811,
"grad_norm": 0.0673828125,
"learning_rate": 5.0164204049548426e-05,
"loss": 0.3587,
"step": 1855
},
{
"epoch": 1.2140992167101827,
"grad_norm": 0.1962890625,
"learning_rate": 5.0040808126396905e-05,
"loss": 0.5099,
"step": 1860
},
{
"epoch": 1.2173629242819843,
"grad_norm": 0.11279296875,
"learning_rate": 4.9917259125438897e-05,
"loss": 0.526,
"step": 1865
},
{
"epoch": 1.220626631853786,
"grad_norm": 0.1396484375,
"learning_rate": 4.9793558554759854e-05,
"loss": 0.3404,
"step": 1870
},
{
"epoch": 1.2238903394255876,
"grad_norm": 0.05859375,
"learning_rate": 4.966970792429532e-05,
"loss": 0.4093,
"step": 1875
},
{
"epoch": 1.227154046997389,
"grad_norm": 0.1552734375,
"learning_rate": 4.954570874581252e-05,
"loss": 0.4384,
"step": 1880
},
{
"epoch": 1.2304177545691906,
"grad_norm": 0.06494140625,
"learning_rate": 4.942156253289194e-05,
"loss": 0.4096,
"step": 1885
},
{
"epoch": 1.2336814621409922,
"grad_norm": 0.0849609375,
"learning_rate": 4.929727080090879e-05,
"loss": 0.4918,
"step": 1890
},
{
"epoch": 1.2369451697127938,
"grad_norm": 0.1474609375,
"learning_rate": 4.917283506701456e-05,
"loss": 0.3785,
"step": 1895
},
{
"epoch": 1.2402088772845954,
"grad_norm": 0.0654296875,
"learning_rate": 4.904825685011848e-05,
"loss": 0.4161,
"step": 1900
},
{
"epoch": 1.2434725848563968,
"grad_norm": 0.142578125,
"learning_rate": 4.8923537670868995e-05,
"loss": 0.3862,
"step": 1905
},
{
"epoch": 1.2467362924281984,
"grad_norm": 0.07177734375,
"learning_rate": 4.879867905163515e-05,
"loss": 0.6231,
"step": 1910
},
{
"epoch": 1.25,
"grad_norm": 0.09619140625,
"learning_rate": 4.8673682516488076e-05,
"loss": 0.3735,
"step": 1915
},
{
"epoch": 1.2532637075718016,
"grad_norm": 0.07568359375,
"learning_rate": 4.854854959118237e-05,
"loss": 0.4026,
"step": 1920
},
{
"epoch": 1.2565274151436032,
"grad_norm": 0.1396484375,
"learning_rate": 4.842328180313742e-05,
"loss": 0.4664,
"step": 1925
},
{
"epoch": 1.2597911227154048,
"grad_norm": 0.08837890625,
"learning_rate": 4.829788068141881e-05,
"loss": 0.3852,
"step": 1930
},
{
"epoch": 1.2630548302872062,
"grad_norm": 0.1162109375,
"learning_rate": 4.817234775671969e-05,
"loss": 0.5633,
"step": 1935
},
{
"epoch": 1.2663185378590078,
"grad_norm": 0.10302734375,
"learning_rate": 4.804668456134198e-05,
"loss": 0.3544,
"step": 1940
},
{
"epoch": 1.2695822454308094,
"grad_norm": 0.08642578125,
"learning_rate": 4.792089262917777e-05,
"loss": 0.3355,
"step": 1945
},
{
"epoch": 1.2728459530026108,
"grad_norm": 0.10888671875,
"learning_rate": 4.7794973495690564e-05,
"loss": 0.3438,
"step": 1950
},
{
"epoch": 1.2761096605744124,
"grad_norm": 0.1572265625,
"learning_rate": 4.7668928697896504e-05,
"loss": 0.4627,
"step": 1955
},
{
"epoch": 1.279373368146214,
"grad_norm": 0.06298828125,
"learning_rate": 4.7542759774345685e-05,
"loss": 0.3699,
"step": 1960
},
{
"epoch": 1.2826370757180157,
"grad_norm": 0.11376953125,
"learning_rate": 4.741646826510326e-05,
"loss": 0.5245,
"step": 1965
},
{
"epoch": 1.2859007832898173,
"grad_norm": 0.08544921875,
"learning_rate": 4.729005571173077e-05,
"loss": 0.3119,
"step": 1970
},
{
"epoch": 1.2891644908616189,
"grad_norm": 0.0791015625,
"learning_rate": 4.716352365726723e-05,
"loss": 0.3892,
"step": 1975
},
{
"epoch": 1.2924281984334205,
"grad_norm": 0.10400390625,
"learning_rate": 4.703687364621036e-05,
"loss": 0.434,
"step": 1980
},
{
"epoch": 1.2956919060052219,
"grad_norm": 0.08154296875,
"learning_rate": 4.691010722449768e-05,
"loss": 0.4034,
"step": 1985
},
{
"epoch": 1.2989556135770235,
"grad_norm": 0.08056640625,
"learning_rate": 4.678322593948766e-05,
"loss": 0.5026,
"step": 1990
},
{
"epoch": 1.302219321148825,
"grad_norm": 0.1396484375,
"learning_rate": 4.6656231339940844e-05,
"loss": 0.4592,
"step": 1995
},
{
"epoch": 1.3054830287206267,
"grad_norm": 0.16015625,
"learning_rate": 4.6529124976000944e-05,
"loss": 0.3529,
"step": 2000
},
{
"epoch": 1.3054830287206267,
"eval_loss": 0.4323955774307251,
"eval_runtime": 96.9439,
"eval_samples_per_second": 1.712,
"eval_steps_per_second": 0.217,
"step": 2000
},
{
"epoch": 1.308746736292428,
"grad_norm": 0.130859375,
"learning_rate": 4.640190839917588e-05,
"loss": 0.4401,
"step": 2005
},
{
"epoch": 1.3120104438642297,
"grad_norm": 0.10888671875,
"learning_rate": 4.627458316231891e-05,
"loss": 0.6135,
"step": 2010
},
{
"epoch": 1.3152741514360313,
"grad_norm": 0.07666015625,
"learning_rate": 4.6147150819609606e-05,
"loss": 0.3826,
"step": 2015
},
{
"epoch": 1.318537859007833,
"grad_norm": 0.099609375,
"learning_rate": 4.6019612926534947e-05,
"loss": 0.3976,
"step": 2020
},
{
"epoch": 1.3218015665796345,
"grad_norm": 0.051025390625,
"learning_rate": 4.5891971039870255e-05,
"loss": 0.4904,
"step": 2025
},
{
"epoch": 1.3250652741514362,
"grad_norm": 0.1767578125,
"learning_rate": 4.5764226717660275e-05,
"loss": 0.3828,
"step": 2030
},
{
"epoch": 1.3283289817232375,
"grad_norm": 0.0751953125,
"learning_rate": 4.56363815192001e-05,
"loss": 0.4767,
"step": 2035
},
{
"epoch": 1.3315926892950392,
"grad_norm": 0.07568359375,
"learning_rate": 4.5508437005016164e-05,
"loss": 0.4446,
"step": 2040
},
{
"epoch": 1.3348563968668408,
"grad_norm": 0.06640625,
"learning_rate": 4.5380394736847195e-05,
"loss": 0.3533,
"step": 2045
},
{
"epoch": 1.3381201044386424,
"grad_norm": 0.0869140625,
"learning_rate": 4.525225627762511e-05,
"loss": 0.3049,
"step": 2050
},
{
"epoch": 1.3413838120104438,
"grad_norm": 0.181640625,
"learning_rate": 4.5124023191455995e-05,
"loss": 0.4373,
"step": 2055
},
{
"epoch": 1.3446475195822454,
"grad_norm": 0.1328125,
"learning_rate": 4.499569704360099e-05,
"loss": 0.5643,
"step": 2060
},
{
"epoch": 1.347911227154047,
"grad_norm": 0.08935546875,
"learning_rate": 4.486727940045717e-05,
"loss": 0.4793,
"step": 2065
},
{
"epoch": 1.3511749347258486,
"grad_norm": 0.1318359375,
"learning_rate": 4.473877182953845e-05,
"loss": 0.3729,
"step": 2070
},
{
"epoch": 1.3544386422976502,
"grad_norm": 0.062255859375,
"learning_rate": 4.461017589945642e-05,
"loss": 0.3655,
"step": 2075
},
{
"epoch": 1.3577023498694518,
"grad_norm": 0.0849609375,
"learning_rate": 4.4481493179901224e-05,
"loss": 0.4118,
"step": 2080
},
{
"epoch": 1.3609660574412532,
"grad_norm": 0.09033203125,
"learning_rate": 4.43527252416224e-05,
"loss": 0.4969,
"step": 2085
},
{
"epoch": 1.3642297650130548,
"grad_norm": 0.1572265625,
"learning_rate": 4.422387365640966e-05,
"loss": 0.4363,
"step": 2090
},
{
"epoch": 1.3674934725848564,
"grad_norm": 0.11865234375,
"learning_rate": 4.409493999707379e-05,
"loss": 0.4515,
"step": 2095
},
{
"epoch": 1.370757180156658,
"grad_norm": 0.0693359375,
"learning_rate": 4.3965925837427384e-05,
"loss": 0.4461,
"step": 2100
},
{
"epoch": 1.3740208877284594,
"grad_norm": 0.068359375,
"learning_rate": 4.383683275226564e-05,
"loss": 0.3828,
"step": 2105
},
{
"epoch": 1.377284595300261,
"grad_norm": 0.09716796875,
"learning_rate": 4.370766231734717e-05,
"loss": 0.4346,
"step": 2110
},
{
"epoch": 1.3805483028720626,
"grad_norm": 0.06982421875,
"learning_rate": 4.357841610937474e-05,
"loss": 0.3767,
"step": 2115
},
{
"epoch": 1.3838120104438643,
"grad_norm": 0.11962890625,
"learning_rate": 4.3449095705976025e-05,
"loss": 0.3613,
"step": 2120
},
{
"epoch": 1.3870757180156659,
"grad_norm": 0.08544921875,
"learning_rate": 4.331970268568436e-05,
"loss": 0.3936,
"step": 2125
},
{
"epoch": 1.3903394255874675,
"grad_norm": 0.046875,
"learning_rate": 4.319023862791948e-05,
"loss": 0.3828,
"step": 2130
},
{
"epoch": 1.3936031331592689,
"grad_norm": 0.1123046875,
"learning_rate": 4.306070511296821e-05,
"loss": 0.3694,
"step": 2135
},
{
"epoch": 1.3968668407310705,
"grad_norm": 0.09375,
"learning_rate": 4.293110372196519e-05,
"loss": 0.3963,
"step": 2140
},
{
"epoch": 1.400130548302872,
"grad_norm": 0.10595703125,
"learning_rate": 4.280143603687363e-05,
"loss": 0.4214,
"step": 2145
},
{
"epoch": 1.4033942558746737,
"grad_norm": 0.057861328125,
"learning_rate": 4.267170364046589e-05,
"loss": 0.3812,
"step": 2150
},
{
"epoch": 1.406657963446475,
"grad_norm": NaN,
"learning_rate": 4.254190811630426e-05,
"loss": 0.4025,
"step": 2155
},
{
"epoch": 1.4099216710182767,
"grad_norm": 0.12890625,
"learning_rate": 4.241205104872158e-05,
"loss": 0.511,
"step": 2160
},
{
"epoch": 1.4131853785900783,
"grad_norm": 0.08251953125,
"learning_rate": 4.228213402280189e-05,
"loss": 0.3358,
"step": 2165
},
{
"epoch": 1.41644908616188,
"grad_norm": 0.11328125,
"learning_rate": 4.215215862436114e-05,
"loss": 0.3802,
"step": 2170
},
{
"epoch": 1.4197127937336815,
"grad_norm": 0.060546875,
"learning_rate": 4.202212643992779e-05,
"loss": 0.3486,
"step": 2175
},
{
"epoch": 1.4229765013054831,
"grad_norm": 0.109375,
"learning_rate": 4.189203905672342e-05,
"loss": 0.3345,
"step": 2180
},
{
"epoch": 1.4262402088772845,
"grad_norm": 0.09033203125,
"learning_rate": 4.1761898062643425e-05,
"loss": 0.3261,
"step": 2185
},
{
"epoch": 1.4295039164490861,
"grad_norm": 0.08642578125,
"learning_rate": 4.163170504623757e-05,
"loss": 0.4276,
"step": 2190
},
{
"epoch": 1.4327676240208878,
"grad_norm": 0.1162109375,
"learning_rate": 4.1501461596690634e-05,
"loss": 0.3816,
"step": 2195
},
{
"epoch": 1.4360313315926894,
"grad_norm": 0.130859375,
"learning_rate": 4.1371169303803006e-05,
"loss": 0.4142,
"step": 2200
},
{
"epoch": 1.4392950391644908,
"grad_norm": 0.103515625,
"learning_rate": 4.124082975797126e-05,
"loss": 0.4228,
"step": 2205
},
{
"epoch": 1.4425587467362924,
"grad_norm": 0.04833984375,
"learning_rate": 4.111044455016877e-05,
"loss": 0.4628,
"step": 2210
},
{
"epoch": 1.445822454308094,
"grad_norm": 0.11083984375,
"learning_rate": 4.098001527192628e-05,
"loss": 0.3949,
"step": 2215
},
{
"epoch": 1.4490861618798956,
"grad_norm": 0.0810546875,
"learning_rate": 4.084954351531246e-05,
"loss": 0.3102,
"step": 2220
},
{
"epoch": 1.4523498694516972,
"grad_norm": 0.0966796875,
"learning_rate": 4.0719030872914496e-05,
"loss": 0.3904,
"step": 2225
},
{
"epoch": 1.4556135770234988,
"grad_norm": 0.11279296875,
"learning_rate": 4.0588478937818636e-05,
"loss": 0.3932,
"step": 2230
},
{
"epoch": 1.4588772845953002,
"grad_norm": 0.076171875,
"learning_rate": 4.0457889303590765e-05,
"loss": 0.3941,
"step": 2235
},
{
"epoch": 1.4621409921671018,
"grad_norm": 0.1328125,
"learning_rate": 4.032726356425693e-05,
"loss": 0.5806,
"step": 2240
},
{
"epoch": 1.4654046997389034,
"grad_norm": 0.07421875,
"learning_rate": 4.019660331428387e-05,
"loss": 0.327,
"step": 2245
},
{
"epoch": 1.468668407310705,
"grad_norm": 0.0859375,
"learning_rate": 4.006591014855959e-05,
"loss": 0.3498,
"step": 2250
},
{
"epoch": 1.468668407310705,
"eval_loss": 0.4334380328655243,
"eval_runtime": 97.3635,
"eval_samples_per_second": 1.705,
"eval_steps_per_second": 0.216,
"step": 2250
},
{
"epoch": 1.4719321148825064,
"grad_norm": 0.09814453125,
"learning_rate": 3.993518566237389e-05,
"loss": 0.3929,
"step": 2255
},
{
"epoch": 1.475195822454308,
"grad_norm": 0.07470703125,
"learning_rate": 3.9804431451398866e-05,
"loss": 0.3313,
"step": 2260
},
{
"epoch": 1.4784595300261096,
"grad_norm": 0.0732421875,
"learning_rate": 3.967364911166944e-05,
"loss": 0.383,
"step": 2265
},
{
"epoch": 1.4817232375979112,
"grad_norm": 0.0869140625,
"learning_rate": 3.954284023956389e-05,
"loss": 0.4548,
"step": 2270
},
{
"epoch": 1.4849869451697129,
"grad_norm": 0.0771484375,
"learning_rate": 3.941200643178436e-05,
"loss": 0.3716,
"step": 2275
},
{
"epoch": 1.4882506527415145,
"grad_norm": 0.0634765625,
"learning_rate": 3.928114928533737e-05,
"loss": 0.335,
"step": 2280
},
{
"epoch": 1.4915143603133159,
"grad_norm": 0.09375,
"learning_rate": 3.9150270397514325e-05,
"loss": 0.3662,
"step": 2285
},
{
"epoch": 1.4947780678851175,
"grad_norm": 0.0712890625,
"learning_rate": 3.9019371365871994e-05,
"loss": 0.4185,
"step": 2290
},
{
"epoch": 1.498041775456919,
"grad_norm": 0.05517578125,
"learning_rate": 3.8888453788213045e-05,
"loss": 0.3562,
"step": 2295
},
{
"epoch": 1.5013054830287205,
"grad_norm": 0.07763671875,
"learning_rate": 3.875751926256653e-05,
"loss": 0.3962,
"step": 2300
},
{
"epoch": 1.504569190600522,
"grad_norm": 0.15625,
"learning_rate": 3.862656938716834e-05,
"loss": 0.4031,
"step": 2305
},
{
"epoch": 1.5078328981723237,
"grad_norm": 0.08544921875,
"learning_rate": 3.8495605760441784e-05,
"loss": 0.3921,
"step": 2310
},
{
"epoch": 1.5110966057441253,
"grad_norm": 0.1123046875,
"learning_rate": 3.8364629980977995e-05,
"loss": 0.3896,
"step": 2315
},
{
"epoch": 1.514360313315927,
"grad_norm": 0.06494140625,
"learning_rate": 3.823364364751644e-05,
"loss": 0.3412,
"step": 2320
},
{
"epoch": 1.5176240208877285,
"grad_norm": 0.12890625,
"learning_rate": 3.8102648358925415e-05,
"loss": 0.4458,
"step": 2325
},
{
"epoch": 1.5208877284595301,
"grad_norm": 0.0908203125,
"learning_rate": 3.7971645714182555e-05,
"loss": 0.3774,
"step": 2330
},
{
"epoch": 1.5241514360313317,
"grad_norm": 0.2197265625,
"learning_rate": 3.784063731235525e-05,
"loss": 0.4596,
"step": 2335
},
{
"epoch": 1.5274151436031331,
"grad_norm": 0.060791015625,
"learning_rate": 3.770962475258118e-05,
"loss": 0.2522,
"step": 2340
},
{
"epoch": 1.5306788511749347,
"grad_norm": 0.1484375,
"learning_rate": 3.7578609634048774e-05,
"loss": 0.4601,
"step": 2345
},
{
"epoch": 1.5339425587467361,
"grad_norm": 0.07177734375,
"learning_rate": 3.7447593555977706e-05,
"loss": 0.329,
"step": 2350
},
{
"epoch": 1.5372062663185377,
"grad_norm": 0.0908203125,
"learning_rate": 3.731657811759933e-05,
"loss": 0.396,
"step": 2355
},
{
"epoch": 1.5404699738903394,
"grad_norm": 0.109375,
"learning_rate": 3.718556491813724e-05,
"loss": 0.5214,
"step": 2360
},
{
"epoch": 1.543733681462141,
"grad_norm": 0.076171875,
"learning_rate": 3.705455555678766e-05,
"loss": 0.4289,
"step": 2365
},
{
"epoch": 1.5469973890339426,
"grad_norm": 0.146484375,
"learning_rate": 3.6923551632699994e-05,
"loss": 0.3971,
"step": 2370
},
{
"epoch": 1.5502610966057442,
"grad_norm": 0.08251953125,
"learning_rate": 3.6792554744957235e-05,
"loss": 0.4115,
"step": 2375
},
{
"epoch": 1.5535248041775458,
"grad_norm": 0.08935546875,
"learning_rate": 3.666156649255656e-05,
"loss": 0.3961,
"step": 2380
},
{
"epoch": 1.5567885117493474,
"grad_norm": 0.115234375,
"learning_rate": 3.653058847438964e-05,
"loss": 0.3608,
"step": 2385
},
{
"epoch": 1.5600522193211488,
"grad_norm": 0.11865234375,
"learning_rate": 3.639962228922332e-05,
"loss": 0.4572,
"step": 2390
},
{
"epoch": 1.5633159268929504,
"grad_norm": 0.0849609375,
"learning_rate": 3.626866953567995e-05,
"loss": 0.3655,
"step": 2395
},
{
"epoch": 1.566579634464752,
"grad_norm": 0.06884765625,
"learning_rate": 3.613773181221794e-05,
"loss": 0.3551,
"step": 2400
},
{
"epoch": 1.5698433420365534,
"grad_norm": 0.1259765625,
"learning_rate": 3.600681071711225e-05,
"loss": 0.3538,
"step": 2405
},
{
"epoch": 1.573107049608355,
"grad_norm": 0.1396484375,
"learning_rate": 3.587590784843486e-05,
"loss": 0.416,
"step": 2410
},
{
"epoch": 1.5763707571801566,
"grad_norm": 0.07373046875,
"learning_rate": 3.574502480403527e-05,
"loss": 0.4449,
"step": 2415
},
{
"epoch": 1.5796344647519582,
"grad_norm": 0.10986328125,
"learning_rate": 3.561416318152098e-05,
"loss": 0.5846,
"step": 2420
},
{
"epoch": 1.5828981723237598,
"grad_norm": 0.087890625,
"learning_rate": 3.548332457823805e-05,
"loss": 0.3468,
"step": 2425
},
{
"epoch": 1.5861618798955615,
"grad_norm": 0.09814453125,
"learning_rate": 3.5352510591251506e-05,
"loss": 0.3941,
"step": 2430
},
{
"epoch": 1.589425587467363,
"grad_norm": 0.0986328125,
"learning_rate": 3.522172281732596e-05,
"loss": 0.3585,
"step": 2435
},
{
"epoch": 1.5926892950391645,
"grad_norm": 0.05908203125,
"learning_rate": 3.509096285290598e-05,
"loss": 0.3889,
"step": 2440
},
{
"epoch": 1.595953002610966,
"grad_norm": 0.07275390625,
"learning_rate": 3.496023229409676e-05,
"loss": 0.3794,
"step": 2445
},
{
"epoch": 1.5992167101827677,
"grad_norm": 0.0634765625,
"learning_rate": 3.482953273664449e-05,
"loss": 0.3316,
"step": 2450
},
{
"epoch": 1.602480417754569,
"grad_norm": 0.07861328125,
"learning_rate": 3.469886577591702e-05,
"loss": 0.4095,
"step": 2455
},
{
"epoch": 1.6057441253263707,
"grad_norm": 0.115234375,
"learning_rate": 3.4568233006884225e-05,
"loss": 0.3882,
"step": 2460
},
{
"epoch": 1.6090078328981723,
"grad_norm": 0.051513671875,
"learning_rate": 3.44376360240987e-05,
"loss": 0.4681,
"step": 2465
},
{
"epoch": 1.612271540469974,
"grad_norm": 0.06884765625,
"learning_rate": 3.4307076421676166e-05,
"loss": 0.327,
"step": 2470
},
{
"epoch": 1.6155352480417755,
"grad_norm": 0.06640625,
"learning_rate": 3.417655579327611e-05,
"loss": 0.3109,
"step": 2475
},
{
"epoch": 1.6187989556135771,
"grad_norm": 0.15234375,
"learning_rate": 3.4046075732082225e-05,
"loss": 0.4118,
"step": 2480
},
{
"epoch": 1.6220626631853787,
"grad_norm": 0.06494140625,
"learning_rate": 3.391563783078311e-05,
"loss": 0.3506,
"step": 2485
},
{
"epoch": 1.6253263707571801,
"grad_norm": 0.1484375,
"learning_rate": 3.3785243681552654e-05,
"loss": 0.3268,
"step": 2490
},
{
"epoch": 1.6285900783289817,
"grad_norm": 0.0810546875,
"learning_rate": 3.365489487603078e-05,
"loss": 0.4438,
"step": 2495
},
{
"epoch": 1.6318537859007833,
"grad_norm": 0.1494140625,
"learning_rate": 3.352459300530387e-05,
"loss": 0.55,
"step": 2500
},
{
"epoch": 1.6318537859007833,
"eval_loss": 0.428629994392395,
"eval_runtime": 96.937,
"eval_samples_per_second": 1.712,
"eval_steps_per_second": 0.217,
"step": 2500
},
{
"epoch": 1.6351174934725847,
"grad_norm": 0.064453125,
"learning_rate": 3.33943396598854e-05,
"loss": 0.4103,
"step": 2505
},
{
"epoch": 1.6383812010443863,
"grad_norm": 0.10400390625,
"learning_rate": 3.326413642969657e-05,
"loss": 0.373,
"step": 2510
},
{
"epoch": 1.641644908616188,
"grad_norm": 0.1142578125,
"learning_rate": 3.313398490404682e-05,
"loss": 0.4306,
"step": 2515
},
{
"epoch": 1.6449086161879896,
"grad_norm": 0.10986328125,
"learning_rate": 3.300388667161448e-05,
"loss": 0.3307,
"step": 2520
},
{
"epoch": 1.6481723237597912,
"grad_norm": 0.0791015625,
"learning_rate": 3.287384332042734e-05,
"loss": 0.3758,
"step": 2525
},
{
"epoch": 1.6514360313315928,
"grad_norm": 0.0712890625,
"learning_rate": 3.274385643784335e-05,
"loss": 0.345,
"step": 2530
},
{
"epoch": 1.6546997389033944,
"grad_norm": 0.16796875,
"learning_rate": 3.2613927610531086e-05,
"loss": 0.3448,
"step": 2535
},
{
"epoch": 1.6579634464751958,
"grad_norm": 0.1416015625,
"learning_rate": 3.248405842445059e-05,
"loss": 0.4311,
"step": 2540
},
{
"epoch": 1.6612271540469974,
"grad_norm": 0.061767578125,
"learning_rate": 3.235425046483379e-05,
"loss": 0.3656,
"step": 2545
},
{
"epoch": 1.664490861618799,
"grad_norm": NaN,
"learning_rate": 3.2224505316165364e-05,
"loss": 0.3653,
"step": 2550
},
{
"epoch": 1.6677545691906004,
"grad_norm": 0.111328125,
"learning_rate": 3.209482456216321e-05,
"loss": 0.3706,
"step": 2555
},
{
"epoch": 1.671018276762402,
"grad_norm": 0.111328125,
"learning_rate": 3.196520978575927e-05,
"loss": 0.338,
"step": 2560
},
{
"epoch": 1.6742819843342036,
"grad_norm": 0.0693359375,
"learning_rate": 3.183566256908008e-05,
"loss": 0.3287,
"step": 2565
},
{
"epoch": 1.6775456919060052,
"grad_norm": 0.16015625,
"learning_rate": 3.170618449342757e-05,
"loss": 0.4767,
"step": 2570
},
{
"epoch": 1.6808093994778068,
"grad_norm": 0.0888671875,
"learning_rate": 3.157677713925965e-05,
"loss": 0.5161,
"step": 2575
},
{
"epoch": 1.6840731070496084,
"grad_norm": 0.1259765625,
"learning_rate": 3.144744208617105e-05,
"loss": 0.4437,
"step": 2580
},
{
"epoch": 1.68733681462141,
"grad_norm": 0.0810546875,
"learning_rate": 3.131818091287389e-05,
"loss": 0.3929,
"step": 2585
},
{
"epoch": 1.6906005221932114,
"grad_norm": 0.08935546875,
"learning_rate": 3.1188995197178525e-05,
"loss": 0.4241,
"step": 2590
},
{
"epoch": 1.693864229765013,
"grad_norm": 0.1015625,
"learning_rate": 3.1059886515974266e-05,
"loss": 0.3692,
"step": 2595
},
{
"epoch": 1.6971279373368147,
"grad_norm": NaN,
"learning_rate": 3.093085644521006e-05,
"loss": 0.4276,
"step": 2600
},
{
"epoch": 1.700391644908616,
"grad_norm": 0.16015625,
"learning_rate": 3.080190655987537e-05,
"loss": 0.3514,
"step": 2605
},
{
"epoch": 1.7036553524804177,
"grad_norm": 0.0810546875,
"learning_rate": 3.06730384339808e-05,
"loss": 0.4211,
"step": 2610
},
{
"epoch": 1.7069190600522193,
"grad_norm": 0.14453125,
"learning_rate": 3.0544253640539064e-05,
"loss": 0.3813,
"step": 2615
},
{
"epoch": 1.7101827676240209,
"grad_norm": NaN,
"learning_rate": 3.0415553751545607e-05,
"loss": 0.3778,
"step": 2620
},
{
"epoch": 1.7134464751958225,
"grad_norm": 0.10107421875,
"learning_rate": 3.0286940337959565e-05,
"loss": 0.656,
"step": 2625
},
{
"epoch": 1.716710182767624,
"grad_norm": 0.07568359375,
"learning_rate": 3.0158414969684457e-05,
"loss": 0.3079,
"step": 2630
},
{
"epoch": 1.7199738903394257,
"grad_norm": 0.1767578125,
"learning_rate": 3.002997921554915e-05,
"loss": 0.4795,
"step": 2635
},
{
"epoch": 1.723237597911227,
"grad_norm": 0.0888671875,
"learning_rate": 2.9901634643288596e-05,
"loss": 0.3406,
"step": 2640
},
{
"epoch": 1.7265013054830287,
"grad_norm": 0.08251953125,
"learning_rate": 2.97733828195248e-05,
"loss": 0.5483,
"step": 2645
},
{
"epoch": 1.7297650130548303,
"grad_norm": 0.1474609375,
"learning_rate": 2.9645225309747576e-05,
"loss": 0.5863,
"step": 2650
},
{
"epoch": 1.7330287206266317,
"grad_norm": 0.057373046875,
"learning_rate": 2.9517163678295583e-05,
"loss": 0.5168,
"step": 2655
},
{
"epoch": 1.7362924281984333,
"grad_norm": NaN,
"learning_rate": 2.9389199488337076e-05,
"loss": 0.4032,
"step": 2660
},
{
"epoch": 1.739556135770235,
"grad_norm": 0.1103515625,
"learning_rate": 2.9261334301850975e-05,
"loss": 0.4681,
"step": 2665
},
{
"epoch": 1.7428198433420365,
"grad_norm": 0.1435546875,
"learning_rate": 2.9133569679607677e-05,
"loss": 0.3807,
"step": 2670
},
{
"epoch": 1.7460835509138382,
"grad_norm": 0.171875,
"learning_rate": 2.9005907181150066e-05,
"loss": 0.4555,
"step": 2675
},
{
"epoch": 1.7493472584856398,
"grad_norm": 0.07470703125,
"learning_rate": 2.8878348364774473e-05,
"loss": 0.4289,
"step": 2680
},
{
"epoch": 1.7526109660574414,
"grad_norm": 0.1396484375,
"learning_rate": 2.8750894787511633e-05,
"loss": 0.392,
"step": 2685
},
{
"epoch": 1.7558746736292428,
"grad_norm": 0.11767578125,
"learning_rate": 2.862354800510773e-05,
"loss": 0.4388,
"step": 2690
},
{
"epoch": 1.7591383812010444,
"grad_norm": 0.076171875,
"learning_rate": 2.849630957200531e-05,
"loss": 0.3528,
"step": 2695
},
{
"epoch": 1.762402088772846,
"grad_norm": 0.10888671875,
"learning_rate": 2.836918104132442e-05,
"loss": 0.4571,
"step": 2700
},
{
"epoch": 1.7656657963446474,
"grad_norm": 0.11572265625,
"learning_rate": 2.8242163964843556e-05,
"loss": 0.4158,
"step": 2705
},
{
"epoch": 1.768929503916449,
"grad_norm": 0.1123046875,
"learning_rate": 2.811525989298081e-05,
"loss": 0.3783,
"step": 2710
},
{
"epoch": 1.7721932114882506,
"grad_norm": 0.1298828125,
"learning_rate": 2.798847037477483e-05,
"loss": 0.4677,
"step": 2715
},
{
"epoch": 1.7754569190600522,
"grad_norm": 0.08349609375,
"learning_rate": 2.786179695786606e-05,
"loss": 0.3359,
"step": 2720
},
{
"epoch": 1.7787206266318538,
"grad_norm": 0.1279296875,
"learning_rate": 2.773524118847769e-05,
"loss": 0.5303,
"step": 2725
},
{
"epoch": 1.7819843342036554,
"grad_norm": 0.1396484375,
"learning_rate": 2.7608804611396926e-05,
"loss": 0.5395,
"step": 2730
},
{
"epoch": 1.785248041775457,
"grad_norm": 0.1865234375,
"learning_rate": 2.7482488769956006e-05,
"loss": 0.431,
"step": 2735
},
{
"epoch": 1.7885117493472587,
"grad_norm": 0.06298828125,
"learning_rate": 2.7356295206013497e-05,
"loss": 0.5289,
"step": 2740
},
{
"epoch": 1.79177545691906,
"grad_norm": 0.10498046875,
"learning_rate": 2.7230225459935333e-05,
"loss": 0.3535,
"step": 2745
},
{
"epoch": 1.7950391644908616,
"grad_norm": 0.08984375,
"learning_rate": 2.7104281070576102e-05,
"loss": 0.3265,
"step": 2750
},
{
"epoch": 1.7950391644908616,
"eval_loss": 0.42748335003852844,
"eval_runtime": 97.001,
"eval_samples_per_second": 1.711,
"eval_steps_per_second": 0.216,
"step": 2750
},
{
"epoch": 1.798302872062663,
"grad_norm": NaN,
"learning_rate": 2.6978463575260266e-05,
"loss": 0.3462,
"step": 2755
},
{
"epoch": 1.8015665796344646,
"grad_norm": 0.1259765625,
"learning_rate": 2.6852774509763334e-05,
"loss": 0.4593,
"step": 2760
},
{
"epoch": 1.8048302872062663,
"grad_norm": 0.06689453125,
"learning_rate": 2.672721540829318e-05,
"loss": 0.529,
"step": 2765
},
{
"epoch": 1.8080939947780679,
"grad_norm": 0.1455078125,
"learning_rate": 2.6601787803471277e-05,
"loss": 0.4503,
"step": 2770
},
{
"epoch": 1.8113577023498695,
"grad_norm": 0.205078125,
"learning_rate": 2.6476493226313997e-05,
"loss": 0.3689,
"step": 2775
},
{
"epoch": 1.814621409921671,
"grad_norm": 0.11767578125,
"learning_rate": 2.635133320621393e-05,
"loss": 0.4557,
"step": 2780
},
{
"epoch": 1.8178851174934727,
"grad_norm": 0.2109375,
"learning_rate": 2.6226309270921233e-05,
"loss": 0.474,
"step": 2785
},
{
"epoch": 1.8211488250652743,
"grad_norm": 0.08349609375,
"learning_rate": 2.610142294652491e-05,
"loss": 0.3612,
"step": 2790
},
{
"epoch": 1.8244125326370757,
"grad_norm": 0.091796875,
"learning_rate": 2.5976675757434293e-05,
"loss": 0.4681,
"step": 2795
},
{
"epoch": 1.8276762402088773,
"grad_norm": 0.158203125,
"learning_rate": 2.5852069226360327e-05,
"loss": 0.7216,
"step": 2800
},
{
"epoch": 1.8309399477806787,
"grad_norm": 0.11083984375,
"learning_rate": 2.572760487429708e-05,
"loss": 0.4456,
"step": 2805
},
{
"epoch": 1.8342036553524803,
"grad_norm": 0.1455078125,
"learning_rate": 2.560328422050307e-05,
"loss": 0.4557,
"step": 2810
},
{
"epoch": 1.837467362924282,
"grad_norm": 0.17578125,
"learning_rate": 2.5479108782482853e-05,
"loss": 0.4459,
"step": 2815
},
{
"epoch": 1.8407310704960835,
"grad_norm": 0.111328125,
"learning_rate": 2.535508007596835e-05,
"loss": 0.4037,
"step": 2820
},
{
"epoch": 1.8439947780678851,
"grad_norm": 0.10546875,
"learning_rate": 2.52311996149005e-05,
"loss": 0.3679,
"step": 2825
},
{
"epoch": 1.8472584856396868,
"grad_norm": 0.049560546875,
"learning_rate": 2.5107468911410642e-05,
"loss": 0.4333,
"step": 2830
},
{
"epoch": 1.8505221932114884,
"grad_norm": 0.07177734375,
"learning_rate": 2.498388947580212e-05,
"loss": 0.3374,
"step": 2835
},
{
"epoch": 1.85378590078329,
"grad_norm": 0.126953125,
"learning_rate": 2.4860462816531897e-05,
"loss": 0.381,
"step": 2840
},
{
"epoch": 1.8570496083550914,
"grad_norm": 0.1123046875,
"learning_rate": 2.473719044019203e-05,
"loss": 0.3058,
"step": 2845
},
{
"epoch": 1.860313315926893,
"grad_norm": 0.0712890625,
"learning_rate": 2.461407385149139e-05,
"loss": 0.3647,
"step": 2850
},
{
"epoch": 1.8635770234986944,
"grad_norm": 0.061279296875,
"learning_rate": 2.4491114553237216e-05,
"loss": 0.3203,
"step": 2855
},
{
"epoch": 1.866840731070496,
"grad_norm": 0.111328125,
"learning_rate": 2.4368314046316812e-05,
"loss": 0.3773,
"step": 2860
},
{
"epoch": 1.8701044386422976,
"grad_norm": 0.0556640625,
"learning_rate": 2.4245673829679214e-05,
"loss": 0.3324,
"step": 2865
},
{
"epoch": 1.8733681462140992,
"grad_norm": 0.07373046875,
"learning_rate": 2.4123195400316884e-05,
"loss": 0.3101,
"step": 2870
},
{
"epoch": 1.8766318537859008,
"grad_norm": 0.09619140625,
"learning_rate": 2.4000880253247453e-05,
"loss": 0.3916,
"step": 2875
},
{
"epoch": 1.8798955613577024,
"grad_norm": 0.1279296875,
"learning_rate": 2.3878729881495504e-05,
"loss": 0.3611,
"step": 2880
},
{
"epoch": 1.883159268929504,
"grad_norm": 0.109375,
"learning_rate": 2.3756745776074235e-05,
"loss": 0.3401,
"step": 2885
},
{
"epoch": 1.8864229765013056,
"grad_norm": 0.20703125,
"learning_rate": 2.3634929425967417e-05,
"loss": 0.4169,
"step": 2890
},
{
"epoch": 1.889686684073107,
"grad_norm": 0.138671875,
"learning_rate": 2.351328231811106e-05,
"loss": 0.4605,
"step": 2895
},
{
"epoch": 1.8929503916449086,
"grad_norm": 0.1328125,
"learning_rate": 2.3391805937375414e-05,
"loss": 0.3902,
"step": 2900
},
{
"epoch": 1.89621409921671,
"grad_norm": 0.06689453125,
"learning_rate": 2.3270501766546694e-05,
"loss": 0.4328,
"step": 2905
},
{
"epoch": 1.8994778067885116,
"grad_norm": 0.0732421875,
"learning_rate": 2.3149371286309133e-05,
"loss": 0.4144,
"step": 2910
},
{
"epoch": 1.9027415143603132,
"grad_norm": 0.09033203125,
"learning_rate": 2.302841597522678e-05,
"loss": 0.3111,
"step": 2915
},
{
"epoch": 1.9060052219321149,
"grad_norm": 0.111328125,
"learning_rate": 2.2907637309725493e-05,
"loss": 0.5008,
"step": 2920
},
{
"epoch": 1.9092689295039165,
"grad_norm": 0.07763671875,
"learning_rate": 2.2787036764074987e-05,
"loss": 0.4183,
"step": 2925
},
{
"epoch": 1.912532637075718,
"grad_norm": 0.08203125,
"learning_rate": 2.266661581037072e-05,
"loss": 0.3335,
"step": 2930
},
{
"epoch": 1.9157963446475197,
"grad_norm": 0.10595703125,
"learning_rate": 2.2546375918516026e-05,
"loss": 0.356,
"step": 2935
},
{
"epoch": 1.9190600522193213,
"grad_norm": 0.09375,
"learning_rate": 2.2426318556204095e-05,
"loss": 0.4071,
"step": 2940
},
{
"epoch": 1.9223237597911227,
"grad_norm": 0.0556640625,
"learning_rate": 2.230644518890013e-05,
"loss": 0.2689,
"step": 2945
},
{
"epoch": 1.9255874673629243,
"grad_norm": 0.107421875,
"learning_rate": 2.2186757279823377e-05,
"loss": 0.3768,
"step": 2950
},
{
"epoch": 1.9288511749347257,
"grad_norm": 0.08642578125,
"learning_rate": 2.206725628992935e-05,
"loss": 0.422,
"step": 2955
},
{
"epoch": 1.9321148825065273,
"grad_norm": 0.09326171875,
"learning_rate": 2.1947943677891948e-05,
"loss": 0.4165,
"step": 2960
},
{
"epoch": 1.935378590078329,
"grad_norm": 0.12158203125,
"learning_rate": 2.1828820900085644e-05,
"loss": 0.4499,
"step": 2965
},
{
"epoch": 1.9386422976501305,
"grad_norm": 0.10888671875,
"learning_rate": 2.170988941056772e-05,
"loss": 0.4426,
"step": 2970
},
{
"epoch": 1.9419060052219321,
"grad_norm": 0.08447265625,
"learning_rate": 2.1591150661060552e-05,
"loss": 0.3248,
"step": 2975
},
{
"epoch": 1.9451697127937337,
"grad_norm": 0.07177734375,
"learning_rate": 2.147260610093383e-05,
"loss": 0.3365,
"step": 2980
},
{
"epoch": 1.9484334203655354,
"grad_norm": 0.10986328125,
"learning_rate": 2.1354257177186918e-05,
"loss": 0.4688,
"step": 2985
},
{
"epoch": 1.951697127937337,
"grad_norm": 0.1015625,
"learning_rate": 2.1236105334431134e-05,
"loss": 0.3508,
"step": 2990
},
{
"epoch": 1.9549608355091384,
"grad_norm": 0.0947265625,
"learning_rate": 2.11181520148722e-05,
"loss": 0.3929,
"step": 2995
},
{
"epoch": 1.95822454308094,
"grad_norm": 0.0693359375,
"learning_rate": 2.1000398658292544e-05,
"loss": 0.351,
"step": 3000
},
{
"epoch": 1.95822454308094,
"eval_loss": 0.42418473958969116,
"eval_runtime": 97.1482,
"eval_samples_per_second": 1.709,
"eval_steps_per_second": 0.216,
"step": 3000
},
{
"epoch": 1.9614882506527413,
"grad_norm": 0.048095703125,
"learning_rate": 2.0882846702033817e-05,
"loss": 0.374,
"step": 3005
},
{
"epoch": 1.964751958224543,
"grad_norm": 0.06591796875,
"learning_rate": 2.0765497580979243e-05,
"loss": 0.3603,
"step": 3010
},
{
"epoch": 1.9680156657963446,
"grad_norm": 0.061767578125,
"learning_rate": 2.0648352727536215e-05,
"loss": 0.3853,
"step": 3015
},
{
"epoch": 1.9712793733681462,
"grad_norm": 0.134765625,
"learning_rate": 2.0531413571618748e-05,
"loss": 0.4701,
"step": 3020
},
{
"epoch": 1.9745430809399478,
"grad_norm": 0.10400390625,
"learning_rate": 2.0414681540630008e-05,
"loss": 0.4403,
"step": 3025
},
{
"epoch": 1.9778067885117494,
"grad_norm": 0.08544921875,
"learning_rate": 2.029815805944496e-05,
"loss": 0.4048,
"step": 3030
},
{
"epoch": 1.981070496083551,
"grad_norm": 0.06494140625,
"learning_rate": 2.0181844550392882e-05,
"loss": 0.3307,
"step": 3035
},
{
"epoch": 1.9843342036553526,
"grad_norm": 0.1240234375,
"learning_rate": 2.0065742433240112e-05,
"loss": 0.4144,
"step": 3040
},
{
"epoch": 1.987597911227154,
"grad_norm": 0.072265625,
"learning_rate": 1.994985312517259e-05,
"loss": 0.3239,
"step": 3045
},
{
"epoch": 1.9908616187989556,
"grad_norm": 0.14453125,
"learning_rate": 1.9834178040778708e-05,
"loss": 0.7305,
"step": 3050
},
{
"epoch": 1.9941253263707572,
"grad_norm": 0.072265625,
"learning_rate": 1.9718718592031877e-05,
"loss": 0.3846,
"step": 3055
},
{
"epoch": 1.9973890339425586,
"grad_norm": 0.09423828125,
"learning_rate": 1.9603476188273463e-05,
"loss": 0.3161,
"step": 3060
},
{
"epoch": 2.0006527415143602,
"grad_norm": 0.0390625,
"learning_rate": 1.948845223619543e-05,
"loss": 0.4225,
"step": 3065
},
{
"epoch": 2.003916449086162,
"grad_norm": 0.07421875,
"learning_rate": 1.9373648139823288e-05,
"loss": 0.3153,
"step": 3070
},
{
"epoch": 2.0071801566579635,
"grad_norm": 0.08154296875,
"learning_rate": 1.9259065300498868e-05,
"loss": 0.3552,
"step": 3075
},
{
"epoch": 2.010443864229765,
"grad_norm": 0.048095703125,
"learning_rate": 1.9144705116863293e-05,
"loss": 0.3486,
"step": 3080
},
{
"epoch": 2.0137075718015667,
"grad_norm": 0.0869140625,
"learning_rate": 1.9030568984839837e-05,
"loss": 0.3223,
"step": 3085
},
{
"epoch": 2.0169712793733683,
"grad_norm": 0.1240234375,
"learning_rate": 1.8916658297616905e-05,
"loss": 0.3672,
"step": 3090
},
{
"epoch": 2.02023498694517,
"grad_norm": 0.04931640625,
"learning_rate": 1.88029744456311e-05,
"loss": 0.3068,
"step": 3095
},
{
"epoch": 2.023498694516971,
"grad_norm": 0.126953125,
"learning_rate": 1.8689518816550106e-05,
"loss": 0.3569,
"step": 3100
},
{
"epoch": 2.0267624020887727,
"grad_norm": 0.10595703125,
"learning_rate": 1.8576292795255912e-05,
"loss": 0.3565,
"step": 3105
},
{
"epoch": 2.0300261096605743,
"grad_norm": 0.07958984375,
"learning_rate": 1.8463297763827757e-05,
"loss": 0.3031,
"step": 3110
},
{
"epoch": 2.033289817232376,
"grad_norm": 0.08984375,
"learning_rate": 1.8350535101525395e-05,
"loss": 0.438,
"step": 3115
},
{
"epoch": 2.0365535248041775,
"grad_norm": 0.0791015625,
"learning_rate": 1.823800618477214e-05,
"loss": 0.2767,
"step": 3120
},
{
"epoch": 2.039817232375979,
"grad_norm": 0.130859375,
"learning_rate": 1.8125712387138168e-05,
"loss": 0.2778,
"step": 3125
},
{
"epoch": 2.0430809399477807,
"grad_norm": 0.10009765625,
"learning_rate": 1.8013655079323657e-05,
"loss": 0.4171,
"step": 3130
},
{
"epoch": 2.0463446475195823,
"grad_norm": 0.1494140625,
"learning_rate": 1.7901835629142152e-05,
"loss": 0.3327,
"step": 3135
},
{
"epoch": 2.049608355091384,
"grad_norm": 0.07763671875,
"learning_rate": 1.779025540150376e-05,
"loss": 0.3382,
"step": 3140
},
{
"epoch": 2.0528720626631856,
"grad_norm": 0.10009765625,
"learning_rate": 1.7678915758398612e-05,
"loss": 0.3052,
"step": 3145
},
{
"epoch": 2.056135770234987,
"grad_norm": 0.181640625,
"learning_rate": 1.7567818058880107e-05,
"loss": 0.4037,
"step": 3150
},
{
"epoch": 2.0593994778067883,
"grad_norm": 0.130859375,
"learning_rate": 1.745696365904846e-05,
"loss": 0.3383,
"step": 3155
},
{
"epoch": 2.06266318537859,
"grad_norm": 0.18359375,
"learning_rate": 1.7346353912034024e-05,
"loss": 0.3054,
"step": 3160
},
{
"epoch": 2.0659268929503916,
"grad_norm": 0.09326171875,
"learning_rate": 1.723599016798083e-05,
"loss": 0.3456,
"step": 3165
},
{
"epoch": 2.069190600522193,
"grad_norm": 0.10693359375,
"learning_rate": 1.7125873774030163e-05,
"loss": 0.3405,
"step": 3170
},
{
"epoch": 2.072454308093995,
"grad_norm": 0.1474609375,
"learning_rate": 1.7016006074303974e-05,
"loss": 0.3015,
"step": 3175
},
{
"epoch": 2.0757180156657964,
"grad_norm": 0.1142578125,
"learning_rate": 1.6906388409888627e-05,
"loss": 0.302,
"step": 3180
},
{
"epoch": 2.078981723237598,
"grad_norm": 0.076171875,
"learning_rate": 1.6797022118818447e-05,
"loss": 0.4294,
"step": 3185
},
{
"epoch": 2.0822454308093996,
"grad_norm": 0.07958984375,
"learning_rate": 1.6687908536059365e-05,
"loss": 0.3167,
"step": 3190
},
{
"epoch": 2.085509138381201,
"grad_norm": 0.17578125,
"learning_rate": 1.6579048993492723e-05,
"loss": 0.3518,
"step": 3195
},
{
"epoch": 2.0887728459530024,
"grad_norm": 0.0634765625,
"learning_rate": 1.6470444819898878e-05,
"loss": 0.2712,
"step": 3200
},
{
"epoch": 2.092036553524804,
"grad_norm": 0.1103515625,
"learning_rate": 1.636209734094112e-05,
"loss": 0.3356,
"step": 3205
},
{
"epoch": 2.0953002610966056,
"grad_norm": 0.07373046875,
"learning_rate": 1.6254007879149407e-05,
"loss": 0.3221,
"step": 3210
},
{
"epoch": 2.098563968668407,
"grad_norm": 0.08837890625,
"learning_rate": 1.6146177753904224e-05,
"loss": 0.289,
"step": 3215
},
{
"epoch": 2.101827676240209,
"grad_norm": 0.058837890625,
"learning_rate": 1.6038608281420538e-05,
"loss": 0.2737,
"step": 3220
},
{
"epoch": 2.1050913838120104,
"grad_norm": 0.1943359375,
"learning_rate": 1.5931300774731636e-05,
"loss": 0.3421,
"step": 3225
},
{
"epoch": 2.108355091383812,
"grad_norm": 0.08349609375,
"learning_rate": 1.5824256543673217e-05,
"loss": 0.3091,
"step": 3230
},
{
"epoch": 2.1116187989556137,
"grad_norm": 0.1708984375,
"learning_rate": 1.5717476894867264e-05,
"loss": 0.325,
"step": 3235
},
{
"epoch": 2.1148825065274153,
"grad_norm": 0.12451171875,
"learning_rate": 1.5610963131706243e-05,
"loss": 0.4947,
"step": 3240
},
{
"epoch": 2.118146214099217,
"grad_norm": 0.0947265625,
"learning_rate": 1.5504716554337078e-05,
"loss": 0.3466,
"step": 3245
},
{
"epoch": 2.1214099216710185,
"grad_norm": 0.115234375,
"learning_rate": 1.539873845964531e-05,
"loss": 0.3074,
"step": 3250
},
{
"epoch": 2.1214099216710185,
"eval_loss": 0.43338489532470703,
"eval_runtime": 97.0849,
"eval_samples_per_second": 1.71,
"eval_steps_per_second": 0.216,
"step": 3250
},
{
"epoch": 2.1246736292428197,
"grad_norm": 0.1396484375,
"learning_rate": 1.5293030141239342e-05,
"loss": 0.3912,
"step": 3255
},
{
"epoch": 2.1279373368146213,
"grad_norm": 0.1005859375,
"learning_rate": 1.5187592889434507e-05,
"loss": 0.3207,
"step": 3260
},
{
"epoch": 2.131201044386423,
"grad_norm": 0.12158203125,
"learning_rate": 1.5082427991237471e-05,
"loss": 0.3349,
"step": 3265
},
{
"epoch": 2.1344647519582245,
"grad_norm": 0.224609375,
"learning_rate": 1.4977536730330383e-05,
"loss": 0.3584,
"step": 3270
},
{
"epoch": 2.137728459530026,
"grad_norm": 0.1025390625,
"learning_rate": 1.4872920387055338e-05,
"loss": 0.4087,
"step": 3275
},
{
"epoch": 2.1409921671018277,
"grad_norm": 0.1884765625,
"learning_rate": 1.4768580238398619e-05,
"loss": 0.322,
"step": 3280
},
{
"epoch": 2.1442558746736293,
"grad_norm": 0.18359375,
"learning_rate": 1.4664517557975236e-05,
"loss": 0.3243,
"step": 3285
},
{
"epoch": 2.147519582245431,
"grad_norm": 0.09033203125,
"learning_rate": 1.456073361601326e-05,
"loss": 0.3427,
"step": 3290
},
{
"epoch": 2.1507832898172325,
"grad_norm": 0.142578125,
"learning_rate": 1.4457229679338425e-05,
"loss": 0.3055,
"step": 3295
},
{
"epoch": 2.1540469973890337,
"grad_norm": 0.103515625,
"learning_rate": 1.435400701135857e-05,
"loss": 0.3251,
"step": 3300
},
{
"epoch": 2.1573107049608353,
"grad_norm": 0.1201171875,
"learning_rate": 1.4251066872048312e-05,
"loss": 0.3683,
"step": 3305
},
{
"epoch": 2.160574412532637,
"grad_norm": 0.10400390625,
"learning_rate": 1.4148410517933568e-05,
"loss": 0.3124,
"step": 3310
},
{
"epoch": 2.1638381201044385,
"grad_norm": 0.1337890625,
"learning_rate": 1.4046039202076314e-05,
"loss": 0.3981,
"step": 3315
},
{
"epoch": 2.16710182767624,
"grad_norm": 0.0634765625,
"learning_rate": 1.3943954174059192e-05,
"loss": 0.3228,
"step": 3320
},
{
"epoch": 2.1703655352480418,
"grad_norm": 0.11865234375,
"learning_rate": 1.384215667997036e-05,
"loss": 0.2762,
"step": 3325
},
{
"epoch": 2.1736292428198434,
"grad_norm": 0.1728515625,
"learning_rate": 1.3740647962388183e-05,
"loss": 0.3592,
"step": 3330
},
{
"epoch": 2.176892950391645,
"grad_norm": 0.1376953125,
"learning_rate": 1.3639429260366128e-05,
"loss": 0.399,
"step": 3335
},
{
"epoch": 2.1801566579634466,
"grad_norm": 0.07958984375,
"learning_rate": 1.3538501809417646e-05,
"loss": 0.2658,
"step": 3340
},
{
"epoch": 2.183420365535248,
"grad_norm": 0.09033203125,
"learning_rate": 1.3437866841501027e-05,
"loss": 0.3383,
"step": 3345
},
{
"epoch": 2.18668407310705,
"grad_norm": 0.09375,
"learning_rate": 1.3337525585004444e-05,
"loss": 0.3072,
"step": 3350
},
{
"epoch": 2.189947780678851,
"grad_norm": 0.11279296875,
"learning_rate": 1.323747926473088e-05,
"loss": 0.3372,
"step": 3355
},
{
"epoch": 2.1932114882506526,
"grad_norm": 0.1025390625,
"learning_rate": 1.3137729101883261e-05,
"loss": 0.3112,
"step": 3360
},
{
"epoch": 2.196475195822454,
"grad_norm": 0.10205078125,
"learning_rate": 1.3038276314049449e-05,
"loss": 0.3035,
"step": 3365
},
{
"epoch": 2.199738903394256,
"grad_norm": 0.07763671875,
"learning_rate": 1.2939122115187478e-05,
"loss": 0.2873,
"step": 3370
},
{
"epoch": 2.2030026109660574,
"grad_norm": 0.0654296875,
"learning_rate": 1.2840267715610693e-05,
"loss": 0.3703,
"step": 3375
},
{
"epoch": 2.206266318537859,
"grad_norm": 0.1962890625,
"learning_rate": 1.2741714321972936e-05,
"loss": 0.3806,
"step": 3380
},
{
"epoch": 2.2095300261096606,
"grad_norm": 0.0966796875,
"learning_rate": 1.2643463137253914e-05,
"loss": 0.2866,
"step": 3385
},
{
"epoch": 2.2127937336814623,
"grad_norm": 0.0869140625,
"learning_rate": 1.2545515360744405e-05,
"loss": 0.3115,
"step": 3390
},
{
"epoch": 2.216057441253264,
"grad_norm": 0.1376953125,
"learning_rate": 1.2447872188031712e-05,
"loss": 0.3881,
"step": 3395
},
{
"epoch": 2.2193211488250655,
"grad_norm": 0.09716796875,
"learning_rate": 1.2350534810985007e-05,
"loss": 0.2636,
"step": 3400
},
{
"epoch": 2.2225848563968666,
"grad_norm": 0.107421875,
"learning_rate": 1.2253504417740808e-05,
"loss": 0.2787,
"step": 3405
},
{
"epoch": 2.2258485639686683,
"grad_norm": 0.125,
"learning_rate": 1.2156782192688502e-05,
"loss": 0.6392,
"step": 3410
},
{
"epoch": 2.22911227154047,
"grad_norm": 0.12158203125,
"learning_rate": 1.206036931645582e-05,
"loss": 0.2901,
"step": 3415
},
{
"epoch": 2.2323759791122715,
"grad_norm": 0.18359375,
"learning_rate": 1.1964266965894472e-05,
"loss": 0.3881,
"step": 3420
},
{
"epoch": 2.235639686684073,
"grad_norm": 0.13671875,
"learning_rate": 1.1868476314065808e-05,
"loss": 0.2948,
"step": 3425
},
{
"epoch": 2.2389033942558747,
"grad_norm": 0.1396484375,
"learning_rate": 1.1772998530226423e-05,
"loss": 0.3676,
"step": 3430
},
{
"epoch": 2.2421671018276763,
"grad_norm": 0.0830078125,
"learning_rate": 1.1677834779813963e-05,
"loss": 0.4046,
"step": 3435
},
{
"epoch": 2.245430809399478,
"grad_norm": 0.1396484375,
"learning_rate": 1.158298622443283e-05,
"loss": 0.3048,
"step": 3440
},
{
"epoch": 2.2486945169712795,
"grad_norm": 0.13671875,
"learning_rate": 1.1488454021840085e-05,
"loss": 0.3067,
"step": 3445
},
{
"epoch": 2.251958224543081,
"grad_norm": 0.1552734375,
"learning_rate": 1.1394239325931214e-05,
"loss": 0.3266,
"step": 3450
},
{
"epoch": 2.2552219321148828,
"grad_norm": 0.1318359375,
"learning_rate": 1.1300343286726145e-05,
"loss": 0.3957,
"step": 3455
},
{
"epoch": 2.258485639686684,
"grad_norm": 0.06689453125,
"learning_rate": 1.120676705035512e-05,
"loss": 0.3257,
"step": 3460
},
{
"epoch": 2.2617493472584855,
"grad_norm": 0.07568359375,
"learning_rate": 1.1113511759044789e-05,
"loss": 0.373,
"step": 3465
},
{
"epoch": 2.265013054830287,
"grad_norm": 0.0986328125,
"learning_rate": 1.1020578551104183e-05,
"loss": 0.3352,
"step": 3470
},
{
"epoch": 2.2682767624020888,
"grad_norm": 0.1455078125,
"learning_rate": 1.0927968560910915e-05,
"loss": 0.3619,
"step": 3475
},
{
"epoch": 2.2715404699738904,
"grad_norm": 0.2138671875,
"learning_rate": 1.0835682918897221e-05,
"loss": 0.4713,
"step": 3480
},
{
"epoch": 2.274804177545692,
"grad_norm": 0.12451171875,
"learning_rate": 1.074372275153628e-05,
"loss": 0.4476,
"step": 3485
},
{
"epoch": 2.2780678851174936,
"grad_norm": 0.0673828125,
"learning_rate": 1.0652089181328364e-05,
"loss": 0.3008,
"step": 3490
},
{
"epoch": 2.281331592689295,
"grad_norm": 0.10595703125,
"learning_rate": 1.0560783326787182e-05,
"loss": 0.3803,
"step": 3495
},
{
"epoch": 2.2845953002610964,
"grad_norm": 0.09228515625,
"learning_rate": 1.0469806302426252e-05,
"loss": 0.3342,
"step": 3500
},
{
"epoch": 2.2845953002610964,
"eval_loss": 0.4299331605434418,
"eval_runtime": 97.0974,
"eval_samples_per_second": 1.71,
"eval_steps_per_second": 0.216,
"step": 3500
},
{
"epoch": 2.287859007832898,
"grad_norm": 0.09375,
"learning_rate": 1.0379159218745229e-05,
"loss": 0.3328,
"step": 3505
},
{
"epoch": 2.2911227154046996,
"grad_norm": 0.154296875,
"learning_rate": 1.0288843182216424e-05,
"loss": 0.3187,
"step": 3510
},
{
"epoch": 2.294386422976501,
"grad_norm": 0.10546875,
"learning_rate": 1.0198859295271224e-05,
"loss": 0.3737,
"step": 3515
},
{
"epoch": 2.297650130548303,
"grad_norm": 0.169921875,
"learning_rate": 1.0109208656286718e-05,
"loss": 0.4332,
"step": 3520
},
{
"epoch": 2.3009138381201044,
"grad_norm": 0.0986328125,
"learning_rate": 1.0019892359572203e-05,
"loss": 0.2869,
"step": 3525
},
{
"epoch": 2.304177545691906,
"grad_norm": 0.11181640625,
"learning_rate": 9.930911495355906e-06,
"loss": 0.3341,
"step": 3530
},
{
"epoch": 2.3074412532637076,
"grad_norm": 0.08447265625,
"learning_rate": 9.842267149771599e-06,
"loss": 0.3378,
"step": 3535
},
{
"epoch": 2.3107049608355092,
"grad_norm": 0.1416015625,
"learning_rate": 9.753960404845429e-06,
"loss": 0.3384,
"step": 3540
},
{
"epoch": 2.313968668407311,
"grad_norm": 0.181640625,
"learning_rate": 9.66599233848261e-06,
"loss": 0.4086,
"step": 3545
},
{
"epoch": 2.3172323759791125,
"grad_norm": 0.07373046875,
"learning_rate": 9.578364024454378e-06,
"loss": 0.2587,
"step": 3550
},
{
"epoch": 2.320496083550914,
"grad_norm": 0.138671875,
"learning_rate": 9.491076532384757e-06,
"loss": 0.3813,
"step": 3555
},
{
"epoch": 2.3237597911227152,
"grad_norm": 0.1318359375,
"learning_rate": 9.404130927737637e-06,
"loss": 0.3561,
"step": 3560
},
{
"epoch": 2.327023498694517,
"grad_norm": 0.158203125,
"learning_rate": 9.317528271803645e-06,
"loss": 0.3228,
"step": 3565
},
{
"epoch": 2.3302872062663185,
"grad_norm": 0.12890625,
"learning_rate": 9.231269621687287e-06,
"loss": 0.3716,
"step": 3570
},
{
"epoch": 2.33355091383812,
"grad_norm": 0.0712890625,
"learning_rate": 9.145356030294e-06,
"loss": 0.3529,
"step": 3575
},
{
"epoch": 2.3368146214099217,
"grad_norm": 0.09521484375,
"learning_rate": 9.059788546317274e-06,
"loss": 0.2741,
"step": 3580
},
{
"epoch": 2.3400783289817233,
"grad_norm": 0.09130859375,
"learning_rate": 8.974568214225928e-06,
"loss": 0.3426,
"step": 3585
},
{
"epoch": 2.343342036553525,
"grad_norm": 0.1201171875,
"learning_rate": 8.889696074251269e-06,
"loss": 0.4396,
"step": 3590
},
{
"epoch": 2.3466057441253265,
"grad_norm": 0.158203125,
"learning_rate": 8.805173162374476e-06,
"loss": 0.3047,
"step": 3595
},
{
"epoch": 2.349869451697128,
"grad_norm": 0.0947265625,
"learning_rate": 8.721000510313898e-06,
"loss": 0.274,
"step": 3600
},
{
"epoch": 2.3531331592689293,
"grad_norm": 0.15625,
"learning_rate": 8.637179145512498e-06,
"loss": 0.3896,
"step": 3605
},
{
"epoch": 2.356396866840731,
"grad_norm": 0.1201171875,
"learning_rate": 8.553710091125273e-06,
"loss": 0.4092,
"step": 3610
},
{
"epoch": 2.3596605744125325,
"grad_norm": 0.11669921875,
"learning_rate": 8.470594366006814e-06,
"loss": 0.2948,
"step": 3615
},
{
"epoch": 2.362924281984334,
"grad_norm": 0.0966796875,
"learning_rate": 8.38783298469881e-06,
"loss": 0.3066,
"step": 3620
},
{
"epoch": 2.3661879895561357,
"grad_norm": 0.1630859375,
"learning_rate": 8.305426957417738e-06,
"loss": 0.3687,
"step": 3625
},
{
"epoch": 2.3694516971279374,
"grad_norm": 0.111328125,
"learning_rate": 8.223377290042442e-06,
"loss": 0.3082,
"step": 3630
},
{
"epoch": 2.372715404699739,
"grad_norm": 0.130859375,
"learning_rate": 8.141684984101953e-06,
"loss": 0.3597,
"step": 3635
},
{
"epoch": 2.3759791122715406,
"grad_norm": 0.1552734375,
"learning_rate": 8.060351036763164e-06,
"loss": 0.3336,
"step": 3640
},
{
"epoch": 2.379242819843342,
"grad_norm": 0.1328125,
"learning_rate": 7.97937644081876e-06,
"loss": 0.2912,
"step": 3645
},
{
"epoch": 2.382506527415144,
"grad_norm": 0.09521484375,
"learning_rate": 7.898762184675001e-06,
"loss": 0.309,
"step": 3650
},
{
"epoch": 2.3857702349869454,
"grad_norm": 0.1279296875,
"learning_rate": 7.818509252339747e-06,
"loss": 0.3665,
"step": 3655
},
{
"epoch": 2.3890339425587466,
"grad_norm": 0.076171875,
"learning_rate": 7.738618623410369e-06,
"loss": 0.3755,
"step": 3660
},
{
"epoch": 2.392297650130548,
"grad_norm": 0.1806640625,
"learning_rate": 7.659091273061833e-06,
"loss": 0.3564,
"step": 3665
},
{
"epoch": 2.39556135770235,
"grad_norm": 0.11865234375,
"learning_rate": 7.5799281720348175e-06,
"loss": 0.4174,
"step": 3670
},
{
"epoch": 2.3988250652741514,
"grad_norm": 0.1044921875,
"learning_rate": 7.5011302866238025e-06,
"loss": 0.3259,
"step": 3675
},
{
"epoch": 2.402088772845953,
"grad_norm": 0.10693359375,
"learning_rate": 7.422698578665338e-06,
"loss": 0.265,
"step": 3680
},
{
"epoch": 2.4053524804177546,
"grad_norm": NaN,
"learning_rate": 7.344634005526242e-06,
"loss": 0.3676,
"step": 3685
},
{
"epoch": 2.4086161879895562,
"grad_norm": 0.130859375,
"learning_rate": 7.266937520091989e-06,
"loss": 0.3388,
"step": 3690
},
{
"epoch": 2.411879895561358,
"grad_norm": 0.138671875,
"learning_rate": 7.189610070754998e-06,
"loss": 0.29,
"step": 3695
},
{
"epoch": 2.4151436031331595,
"grad_norm": 0.08251953125,
"learning_rate": 7.112652601403125e-06,
"loss": 0.484,
"step": 3700
},
{
"epoch": 2.4184073107049606,
"grad_norm": 0.064453125,
"learning_rate": 7.036066051408078e-06,
"loss": 0.3144,
"step": 3705
},
{
"epoch": 2.4216710182767622,
"grad_norm": 0.07177734375,
"learning_rate": 6.9598513556140285e-06,
"loss": 0.3376,
"step": 3710
},
{
"epoch": 2.424934725848564,
"grad_norm": 0.08935546875,
"learning_rate": 6.884009444326109e-06,
"loss": 0.3488,
"step": 3715
},
{
"epoch": 2.4281984334203655,
"grad_norm": 0.1767578125,
"learning_rate": 6.808541243299148e-06,
"loss": 0.3758,
"step": 3720
},
{
"epoch": 2.431462140992167,
"grad_norm": 0.0869140625,
"learning_rate": 6.733447673726288e-06,
"loss": 0.3327,
"step": 3725
},
{
"epoch": 2.4347258485639687,
"grad_norm": 0.09375,
"learning_rate": 6.658729652227815e-06,
"loss": 0.277,
"step": 3730
},
{
"epoch": 2.4379895561357703,
"grad_norm": 0.08935546875,
"learning_rate": 6.584388090839894e-06,
"loss": 0.2496,
"step": 3735
},
{
"epoch": 2.441253263707572,
"grad_norm": 0.1279296875,
"learning_rate": 6.510423897003526e-06,
"loss": 0.43,
"step": 3740
},
{
"epoch": 2.4445169712793735,
"grad_norm": 0.1396484375,
"learning_rate": 6.436837973553377e-06,
"loss": 0.3731,
"step": 3745
},
{
"epoch": 2.447780678851175,
"grad_norm": 0.11962890625,
"learning_rate": 6.36363121870683e-06,
"loss": 0.343,
"step": 3750
},
{
"epoch": 2.447780678851175,
"eval_loss": 0.43050137162208557,
"eval_runtime": 97.0194,
"eval_samples_per_second": 1.711,
"eval_steps_per_second": 0.216,
"step": 3750
},
{
"epoch": 2.4510443864229767,
"grad_norm": 0.0732421875,
"learning_rate": 6.29080452605298e-06,
"loss": 0.2704,
"step": 3755
},
{
"epoch": 2.454308093994778,
"grad_norm": 0.087890625,
"learning_rate": 6.218358784541744e-06,
"loss": 0.3681,
"step": 3760
},
{
"epoch": 2.4575718015665795,
"grad_norm": 0.1337890625,
"learning_rate": 6.146294878473022e-06,
"loss": 0.4178,
"step": 3765
},
{
"epoch": 2.460835509138381,
"grad_norm": 0.10791015625,
"learning_rate": 6.074613687485852e-06,
"loss": 0.4002,
"step": 3770
},
{
"epoch": 2.4640992167101827,
"grad_norm": 0.0888671875,
"learning_rate": 6.003316086547756e-06,
"loss": 0.2876,
"step": 3775
},
{
"epoch": 2.4673629242819843,
"grad_norm": 0.0732421875,
"learning_rate": 5.9324029459439625e-06,
"loss": 0.2893,
"step": 3780
},
{
"epoch": 2.470626631853786,
"grad_norm": 0.13671875,
"learning_rate": 5.8618751312668715e-06,
"loss": 0.3622,
"step": 3785
},
{
"epoch": 2.4738903394255876,
"grad_norm": 0.0712890625,
"learning_rate": 5.791733503405424e-06,
"loss": 0.2832,
"step": 3790
},
{
"epoch": 2.477154046997389,
"grad_norm": NaN,
"learning_rate": 5.721978918534642e-06,
"loss": 0.352,
"step": 3795
},
{
"epoch": 2.480417754569191,
"grad_norm": 0.1484375,
"learning_rate": 5.652612228105132e-06,
"loss": 0.4816,
"step": 3800
},
{
"epoch": 2.483681462140992,
"grad_norm": 0.09326171875,
"learning_rate": 5.583634278832744e-06,
"loss": 0.4329,
"step": 3805
},
{
"epoch": 2.4869451697127936,
"grad_norm": 0.1767578125,
"learning_rate": 5.515045912688172e-06,
"loss": 0.3659,
"step": 3810
},
{
"epoch": 2.490208877284595,
"grad_norm": 0.10693359375,
"learning_rate": 5.446847966886755e-06,
"loss": 0.3918,
"step": 3815
},
{
"epoch": 2.493472584856397,
"grad_norm": 0.07568359375,
"learning_rate": 5.379041273878177e-06,
"loss": 0.2514,
"step": 3820
},
{
"epoch": 2.4967362924281984,
"grad_norm": 0.0810546875,
"learning_rate": 5.311626661336356e-06,
"loss": 0.3803,
"step": 3825
},
{
"epoch": 2.5,
"grad_norm": 0.138671875,
"learning_rate": 5.244604952149344e-06,
"loss": 0.4072,
"step": 3830
},
{
"epoch": 2.5032637075718016,
"grad_norm": 0.150390625,
"learning_rate": 5.177976964409239e-06,
"loss": 0.2915,
"step": 3835
},
{
"epoch": 2.506527415143603,
"grad_norm": 0.1025390625,
"learning_rate": 5.11174351140226e-06,
"loss": 0.2754,
"step": 3840
},
{
"epoch": 2.509791122715405,
"grad_norm": 0.1943359375,
"learning_rate": 5.045905401598755e-06,
"loss": 0.3264,
"step": 3845
},
{
"epoch": 2.5130548302872064,
"grad_norm": 0.076171875,
"learning_rate": 4.980463438643395e-06,
"loss": 0.4017,
"step": 3850
},
{
"epoch": 2.516318537859008,
"grad_norm": 0.0908203125,
"learning_rate": 4.915418421345301e-06,
"loss": 0.3507,
"step": 3855
},
{
"epoch": 2.5195822454308097,
"grad_norm": 0.1259765625,
"learning_rate": 4.850771143668365e-06,
"loss": 0.3118,
"step": 3860
},
{
"epoch": 2.522845953002611,
"grad_norm": 0.140625,
"learning_rate": 4.78652239472148e-06,
"loss": 0.3528,
"step": 3865
},
{
"epoch": 2.5261096605744124,
"grad_norm": 0.0634765625,
"learning_rate": 4.7226729587489856e-06,
"loss": 0.244,
"step": 3870
},
{
"epoch": 2.529373368146214,
"grad_norm": 0.09423828125,
"learning_rate": 4.65922361512102e-06,
"loss": 0.2996,
"step": 3875
},
{
"epoch": 2.5326370757180157,
"grad_norm": 0.0634765625,
"learning_rate": 4.5961751383240845e-06,
"loss": 0.3682,
"step": 3880
},
{
"epoch": 2.5359007832898173,
"grad_norm": 0.134765625,
"learning_rate": 4.53352829795151e-06,
"loss": 0.3229,
"step": 3885
},
{
"epoch": 2.539164490861619,
"grad_norm": 0.12353515625,
"learning_rate": 4.47128385869413e-06,
"loss": 0.3573,
"step": 3890
},
{
"epoch": 2.5424281984334205,
"grad_norm": 0.1845703125,
"learning_rate": 4.409442580330907e-06,
"loss": 0.3391,
"step": 3895
},
{
"epoch": 2.5456919060052217,
"grad_norm": 0.1572265625,
"learning_rate": 4.34800521771968e-06,
"loss": 0.3845,
"step": 3900
},
{
"epoch": 2.5489556135770233,
"grad_norm": 0.1474609375,
"learning_rate": 4.2869725207879304e-06,
"loss": 0.3741,
"step": 3905
},
{
"epoch": 2.552219321148825,
"grad_norm": 0.08544921875,
"learning_rate": 4.226345234523634e-06,
"loss": 0.3384,
"step": 3910
},
{
"epoch": 2.5554830287206265,
"grad_norm": 0.0673828125,
"learning_rate": 4.166124098966196e-06,
"loss": 0.3473,
"step": 3915
},
{
"epoch": 2.558746736292428,
"grad_norm": 0.07666015625,
"learning_rate": 4.106309849197362e-06,
"loss": 0.2661,
"step": 3920
},
{
"epoch": 2.5620104438642297,
"grad_norm": 0.16796875,
"learning_rate": 4.046903215332312e-06,
"loss": 0.3476,
"step": 3925
},
{
"epoch": 2.5652741514360313,
"grad_norm": 0.1396484375,
"learning_rate": 3.987904922510677e-06,
"loss": 0.4288,
"step": 3930
},
{
"epoch": 2.568537859007833,
"grad_norm": 0.0654296875,
"learning_rate": 3.929315690887754e-06,
"loss": 0.2945,
"step": 3935
},
{
"epoch": 2.5718015665796345,
"grad_norm": 0.2373046875,
"learning_rate": 3.871136235625673e-06,
"loss": 0.5041,
"step": 3940
},
{
"epoch": 2.575065274151436,
"grad_norm": 0.1865234375,
"learning_rate": 3.8133672668846745e-06,
"loss": 0.3282,
"step": 3945
},
{
"epoch": 2.5783289817232378,
"grad_norm": 0.107421875,
"learning_rate": 3.75600948981446e-06,
"loss": 0.4907,
"step": 3950
},
{
"epoch": 2.5815926892950394,
"grad_norm": 0.10546875,
"learning_rate": 3.6990636045455574e-06,
"loss": 0.3415,
"step": 3955
},
{
"epoch": 2.584856396866841,
"grad_norm": 0.10107421875,
"learning_rate": 3.6425303061807965e-06,
"loss": 0.3957,
"step": 3960
},
{
"epoch": 2.588120104438642,
"grad_norm": 0.1337890625,
"learning_rate": 3.5864102847868226e-06,
"loss": 0.3348,
"step": 3965
},
{
"epoch": 2.5913838120104438,
"grad_norm": 0.09326171875,
"learning_rate": 3.530704225385654e-06,
"loss": 0.2835,
"step": 3970
},
{
"epoch": 2.5946475195822454,
"grad_norm": 0.2197265625,
"learning_rate": 3.4754128079463427e-06,
"loss": 0.3671,
"step": 3975
},
{
"epoch": 2.597911227154047,
"grad_norm": 0.1298828125,
"learning_rate": 3.4205367073766603e-06,
"loss": 0.452,
"step": 3980
},
{
"epoch": 2.6011749347258486,
"grad_norm": 0.08740234375,
"learning_rate": 3.3660765935148744e-06,
"loss": 0.3029,
"step": 3985
},
{
"epoch": 2.60443864229765,
"grad_norm": 0.12451171875,
"learning_rate": 3.312033131121548e-06,
"loss": 0.2667,
"step": 3990
},
{
"epoch": 2.607702349869452,
"grad_norm": 0.1162109375,
"learning_rate": 3.2584069798714474e-06,
"loss": 0.3078,
"step": 3995
},
{
"epoch": 2.6109660574412534,
"grad_norm": 0.0791015625,
"learning_rate": 3.205198794345494e-06,
"loss": 0.3406,
"step": 4000
},
{
"epoch": 2.6109660574412534,
"eval_loss": 0.4306102395057678,
"eval_runtime": 96.9205,
"eval_samples_per_second": 1.713,
"eval_steps_per_second": 0.217,
"step": 4000
},
{
"epoch": 2.6142297650130546,
"grad_norm": 0.09716796875,
"learning_rate": 3.152409224022734e-06,
"loss": 0.3848,
"step": 4005
},
{
"epoch": 2.617493472584856,
"grad_norm": 0.1279296875,
"learning_rate": 3.100038913272479e-06,
"loss": 0.49,
"step": 4010
},
{
"epoch": 2.620757180156658,
"grad_norm": 0.04833984375,
"learning_rate": 3.04808850134636e-06,
"loss": 0.2684,
"step": 4015
},
{
"epoch": 2.6240208877284594,
"grad_norm": 0.1689453125,
"learning_rate": 2.9965586223705996e-06,
"loss": 0.4062,
"step": 4020
},
{
"epoch": 2.627284595300261,
"grad_norm": 0.142578125,
"learning_rate": 2.9454499053382095e-06,
"loss": 0.3041,
"step": 4025
},
{
"epoch": 2.6305483028720626,
"grad_norm": 0.2294921875,
"learning_rate": 2.8947629741013724e-06,
"loss": 0.8468,
"step": 4030
},
{
"epoch": 2.6338120104438643,
"grad_norm": 0.11767578125,
"learning_rate": 2.844498447363759e-06,
"loss": 0.4113,
"step": 4035
},
{
"epoch": 2.637075718015666,
"grad_norm": 0.10107421875,
"learning_rate": 2.7946569386730413e-06,
"loss": 0.2856,
"step": 4040
},
{
"epoch": 2.6403394255874675,
"grad_norm": 0.140625,
"learning_rate": 2.745239056413358e-06,
"loss": 0.3137,
"step": 4045
},
{
"epoch": 2.643603133159269,
"grad_norm": 0.09228515625,
"learning_rate": 2.6962454037979213e-06,
"loss": 0.325,
"step": 4050
},
{
"epoch": 2.6468668407310707,
"grad_norm": 0.138671875,
"learning_rate": 2.647676578861607e-06,
"loss": 0.5683,
"step": 4055
},
{
"epoch": 2.6501305483028723,
"grad_norm": 0.06787109375,
"learning_rate": 2.5995331744537273e-06,
"loss": 0.2632,
"step": 4060
},
{
"epoch": 2.6533942558746735,
"grad_norm": 0.158203125,
"learning_rate": 2.551815778230709e-06,
"loss": 0.3466,
"step": 4065
},
{
"epoch": 2.656657963446475,
"grad_norm": 0.0693359375,
"learning_rate": 2.504524972648998e-06,
"loss": 0.2886,
"step": 4070
},
{
"epoch": 2.6599216710182767,
"grad_norm": 0.1591796875,
"learning_rate": 2.457661334957893e-06,
"loss": 0.5142,
"step": 4075
},
{
"epoch": 2.6631853785900783,
"grad_norm": 0.076171875,
"learning_rate": 2.411225437192521e-06,
"loss": 0.4388,
"step": 4080
},
{
"epoch": 2.66644908616188,
"grad_norm": 0.13671875,
"learning_rate": 2.3652178461668824e-06,
"loss": 0.3042,
"step": 4085
},
{
"epoch": 2.6697127937336815,
"grad_norm": 0.07763671875,
"learning_rate": 2.3196391234668625e-06,
"loss": 0.2691,
"step": 4090
},
{
"epoch": 2.672976501305483,
"grad_norm": 0.09033203125,
"learning_rate": 2.2744898254434567e-06,
"loss": 0.3605,
"step": 4095
},
{
"epoch": 2.6762402088772848,
"grad_norm": 0.1474609375,
"learning_rate": 2.229770503205919e-06,
"loss": 0.4555,
"step": 4100
},
{
"epoch": 2.679503916449086,
"grad_norm": 0.109375,
"learning_rate": 2.185481702615076e-06,
"loss": 0.332,
"step": 4105
},
{
"epoch": 2.6827676240208875,
"grad_norm": 0.0732421875,
"learning_rate": 2.1416239642766177e-06,
"loss": 0.3176,
"step": 4110
},
{
"epoch": 2.686031331592689,
"grad_norm": 0.09228515625,
"learning_rate": 2.0981978235345586e-06,
"loss": 0.2961,
"step": 4115
},
{
"epoch": 2.6892950391644908,
"grad_norm": 0.1298828125,
"learning_rate": 2.0552038104646453e-06,
"loss": 0.2982,
"step": 4120
},
{
"epoch": 2.6925587467362924,
"grad_norm": 0.07861328125,
"learning_rate": 2.0126424498679294e-06,
"loss": 0.2402,
"step": 4125
},
{
"epoch": 2.695822454308094,
"grad_norm": 0.115234375,
"learning_rate": 1.970514261264343e-06,
"loss": 0.3185,
"step": 4130
},
{
"epoch": 2.6990861618798956,
"grad_norm": 0.06591796875,
"learning_rate": 1.928819758886342e-06,
"loss": 0.2964,
"step": 4135
},
{
"epoch": 2.702349869451697,
"grad_norm": 0.09423828125,
"learning_rate": 1.8875594516726641e-06,
"loss": 0.2859,
"step": 4140
},
{
"epoch": 2.705613577023499,
"grad_norm": 0.1875,
"learning_rate": 1.8467338432620854e-06,
"loss": 0.2926,
"step": 4145
},
{
"epoch": 2.7088772845953004,
"grad_norm": NaN,
"learning_rate": 1.8063434319872977e-06,
"loss": 0.45,
"step": 4150
},
{
"epoch": 2.712140992167102,
"grad_norm": 0.193359375,
"learning_rate": 1.7663887108688005e-06,
"loss": 0.3227,
"step": 4155
},
{
"epoch": 2.7154046997389036,
"grad_norm": 0.12890625,
"learning_rate": 1.7268701676089055e-06,
"loss": 0.3024,
"step": 4160
},
{
"epoch": 2.718668407310705,
"grad_norm": 0.08935546875,
"learning_rate": 1.6877882845857671e-06,
"loss": 0.4541,
"step": 4165
},
{
"epoch": 2.7219321148825064,
"grad_norm": 0.09716796875,
"learning_rate": 1.6491435388475068e-06,
"loss": 0.3162,
"step": 4170
},
{
"epoch": 2.725195822454308,
"grad_norm": 0.134765625,
"learning_rate": 1.6109364021063776e-06,
"loss": 0.3428,
"step": 4175
},
{
"epoch": 2.7284595300261096,
"grad_norm": 0.07958984375,
"learning_rate": 1.5731673407330252e-06,
"loss": 0.3112,
"step": 4180
},
{
"epoch": 2.7317232375979112,
"grad_norm": 0.06005859375,
"learning_rate": 1.5358368157507655e-06,
"loss": 0.2769,
"step": 4185
},
{
"epoch": 2.734986945169713,
"grad_norm": 0.06298828125,
"learning_rate": 1.4989452828299956e-06,
"loss": 0.3968,
"step": 4190
},
{
"epoch": 2.7382506527415145,
"grad_norm": 0.08837890625,
"learning_rate": 1.4624931922825863e-06,
"loss": 0.2949,
"step": 4195
},
{
"epoch": 2.741514360313316,
"grad_norm": 0.14453125,
"learning_rate": 1.4264809890564374e-06,
"loss": 0.3438,
"step": 4200
},
{
"epoch": 2.7447780678851172,
"grad_norm": 0.083984375,
"learning_rate": 1.3909091127299932e-06,
"loss": 0.2876,
"step": 4205
},
{
"epoch": 2.748041775456919,
"grad_norm": 0.0673828125,
"learning_rate": 1.3557779975069267e-06,
"loss": 0.2769,
"step": 4210
},
{
"epoch": 2.7513054830287205,
"grad_norm": 0.08935546875,
"learning_rate": 1.3210880722107895e-06,
"loss": 0.2985,
"step": 4215
},
{
"epoch": 2.754569190600522,
"grad_norm": 0.10400390625,
"learning_rate": 1.2868397602798287e-06,
"loss": 0.4005,
"step": 4220
},
{
"epoch": 2.7578328981723237,
"grad_norm": 0.134765625,
"learning_rate": 1.253033479761774e-06,
"loss": 0.2723,
"step": 4225
},
{
"epoch": 2.7610966057441253,
"grad_norm": 0.1845703125,
"learning_rate": 1.219669643308771e-06,
"loss": 0.4161,
"step": 4230
},
{
"epoch": 2.764360313315927,
"grad_norm": 0.1220703125,
"learning_rate": 1.186748658172311e-06,
"loss": 0.266,
"step": 4235
},
{
"epoch": 2.7676240208877285,
"grad_norm": 0.09814453125,
"learning_rate": 1.1542709261982835e-06,
"loss": 0.3112,
"step": 4240
},
{
"epoch": 2.77088772845953,
"grad_norm": 0.0908203125,
"learning_rate": 1.1222368438220691e-06,
"loss": 0.3189,
"step": 4245
},
{
"epoch": 2.7741514360313317,
"grad_norm": 0.076171875,
"learning_rate": 1.0906468020636799e-06,
"loss": 0.3175,
"step": 4250
},
{
"epoch": 2.7741514360313317,
"eval_loss": 0.4307728111743927,
"eval_runtime": 97.0566,
"eval_samples_per_second": 1.71,
"eval_steps_per_second": 0.216,
"step": 4250
},
{
"epoch": 2.7774151436031334,
"grad_norm": 0.0986328125,
"learning_rate": 1.0595011865230221e-06,
"loss": 0.3127,
"step": 4255
},
{
"epoch": 2.780678851174935,
"grad_norm": 0.0654296875,
"learning_rate": 1.0288003773751454e-06,
"loss": 0.3544,
"step": 4260
},
{
"epoch": 2.783942558746736,
"grad_norm": 0.1796875,
"learning_rate": 9.985447493656513e-07,
"loss": 0.38,
"step": 4265
},
{
"epoch": 2.7872062663185377,
"grad_norm": 0.1396484375,
"learning_rate": 9.687346718060707e-07,
"loss": 0.3401,
"step": 4270
},
{
"epoch": 2.7904699738903394,
"grad_norm": 0.125,
"learning_rate": 9.39370508569394e-07,
"loss": 0.345,
"step": 4275
},
{
"epoch": 2.793733681462141,
"grad_norm": 0.0712890625,
"learning_rate": 9.104526180856025e-07,
"loss": 0.3241,
"step": 4280
},
{
"epoch": 2.7969973890339426,
"grad_norm": 0.1630859375,
"learning_rate": 8.819813533373147e-07,
"loss": 0.3457,
"step": 4285
},
{
"epoch": 2.800261096605744,
"grad_norm": 0.08740234375,
"learning_rate": 8.539570618554676e-07,
"loss": 0.3233,
"step": 4290
},
{
"epoch": 2.803524804177546,
"grad_norm": 0.1728515625,
"learning_rate": 8.263800857150671e-07,
"loss": 0.3968,
"step": 4295
},
{
"epoch": 2.8067885117493474,
"grad_norm": 0.076171875,
"learning_rate": 7.992507615310201e-07,
"loss": 0.3133,
"step": 4300
},
{
"epoch": 2.8100522193211486,
"grad_norm": 0.1357421875,
"learning_rate": 7.725694204540417e-07,
"loss": 0.2727,
"step": 4305
},
{
"epoch": 2.81331592689295,
"grad_norm": 0.2041015625,
"learning_rate": 7.463363881665754e-07,
"loss": 0.3509,
"step": 4310
},
{
"epoch": 2.816579634464752,
"grad_norm": 0.1669921875,
"learning_rate": 7.205519848788588e-07,
"loss": 0.3307,
"step": 4315
},
{
"epoch": 2.8198433420365534,
"grad_norm": 0.115234375,
"learning_rate": 6.952165253249805e-07,
"loss": 0.401,
"step": 4320
},
{
"epoch": 2.823107049608355,
"grad_norm": 0.07763671875,
"learning_rate": 6.703303187590631e-07,
"loss": 0.2843,
"step": 4325
},
{
"epoch": 2.8263707571801566,
"grad_norm": 0.10400390625,
"learning_rate": 6.458936689514735e-07,
"loss": 0.3478,
"step": 4330
},
{
"epoch": 2.8296344647519582,
"grad_norm": 0.087890625,
"learning_rate": 6.219068741851227e-07,
"loss": 0.3569,
"step": 4335
},
{
"epoch": 2.83289817232376,
"grad_norm": 0.158203125,
"learning_rate": 5.983702272518264e-07,
"loss": 0.4081,
"step": 4340
},
{
"epoch": 2.8361618798955615,
"grad_norm": 0.1064453125,
"learning_rate": 5.752840154487168e-07,
"loss": 0.4128,
"step": 4345
},
{
"epoch": 2.839425587467363,
"grad_norm": 0.1220703125,
"learning_rate": 5.526485205747569e-07,
"loss": 0.2948,
"step": 4350
},
{
"epoch": 2.8426892950391647,
"grad_norm": 0.12451171875,
"learning_rate": 5.304640189272777e-07,
"loss": 0.3386,
"step": 4355
},
{
"epoch": 2.8459530026109663,
"grad_norm": 0.1103515625,
"learning_rate": 5.087307812986343e-07,
"loss": 0.3151,
"step": 4360
},
{
"epoch": 2.849216710182768,
"grad_norm": 0.1376953125,
"learning_rate": 4.874490729728675e-07,
"loss": 0.3458,
"step": 4365
},
{
"epoch": 2.852480417754569,
"grad_norm": 0.119140625,
"learning_rate": 4.666191537224931e-07,
"loss": 0.3181,
"step": 4370
},
{
"epoch": 2.8557441253263707,
"grad_norm": 0.076171875,
"learning_rate": 4.4624127780530925e-07,
"loss": 0.3734,
"step": 4375
},
{
"epoch": 2.8590078328981723,
"grad_norm": 0.2333984375,
"learning_rate": 4.263156939613238e-07,
"loss": 0.4662,
"step": 4380
},
{
"epoch": 2.862271540469974,
"grad_norm": 0.09228515625,
"learning_rate": 4.068426454096732e-07,
"loss": 0.3408,
"step": 4385
},
{
"epoch": 2.8655352480417755,
"grad_norm": 0.10791015625,
"learning_rate": 3.8782236984569173e-07,
"loss": 0.3939,
"step": 4390
},
{
"epoch": 2.868798955613577,
"grad_norm": 0.12890625,
"learning_rate": 3.6925509943800113e-07,
"loss": 0.3397,
"step": 4395
},
{
"epoch": 2.8720626631853787,
"grad_norm": 0.09619140625,
"learning_rate": 3.51141060825659e-07,
"loss": 0.3527,
"step": 4400
}
],
"logging_steps": 5,
"max_steps": 4596,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 200,
"total_flos": 8.296824683102208e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}