|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.49603174603174605, |
|
"eval_steps": 500, |
|
"global_step": 12500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 23.495065689086914, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 8.6232, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 24.87381935119629, |
|
"learning_rate": 9e-07, |
|
"loss": 8.4514, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 22.63846778869629, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 8.3152, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 20.65507698059082, |
|
"learning_rate": 1.9e-06, |
|
"loss": 7.9732, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 16.147356033325195, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 7.6879, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 14.252687454223633, |
|
"learning_rate": 2.9e-06, |
|
"loss": 7.4152, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 12.011382102966309, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 7.2472, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 10.081536293029785, |
|
"learning_rate": 3.9e-06, |
|
"loss": 7.2433, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 9.65556526184082, |
|
"learning_rate": 4.4e-06, |
|
"loss": 7.2298, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 8.830799102783203, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 7.19, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 9.019427299499512, |
|
"learning_rate": 5.4e-06, |
|
"loss": 7.2362, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 9.13768482208252, |
|
"learning_rate": 5.9e-06, |
|
"loss": 7.2804, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.02840805053711, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 7.2345, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.178731918334961, |
|
"learning_rate": 6.900000000000001e-06, |
|
"loss": 7.3014, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.83882999420166, |
|
"learning_rate": 7.4e-06, |
|
"loss": 7.3875, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.3687162399292, |
|
"learning_rate": 7.9e-06, |
|
"loss": 7.4546, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.09121036529541, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 7.407, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 12.731450080871582, |
|
"learning_rate": 8.9e-06, |
|
"loss": 7.4321, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.601162910461426, |
|
"learning_rate": 9.4e-06, |
|
"loss": 7.4155, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.006213188171387, |
|
"learning_rate": 9.900000000000002e-06, |
|
"loss": 7.3705, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.79794454574585, |
|
"learning_rate": 1.04e-05, |
|
"loss": 7.3759, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.281346321105957, |
|
"learning_rate": 1.09e-05, |
|
"loss": 7.3798, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.605266094207764, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 7.3878, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.446412563323975, |
|
"learning_rate": 1.19e-05, |
|
"loss": 7.3098, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.6324334144592285, |
|
"learning_rate": 1.24e-05, |
|
"loss": 7.2995, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.330737113952637, |
|
"learning_rate": 1.29e-05, |
|
"loss": 7.1887, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.283708572387695, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 7.1184, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.950069904327393, |
|
"learning_rate": 1.3900000000000002e-05, |
|
"loss": 6.9876, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.844958782196045, |
|
"learning_rate": 1.44e-05, |
|
"loss": 6.9319, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.499362468719482, |
|
"learning_rate": 1.49e-05, |
|
"loss": 6.9253, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.45572280883789, |
|
"learning_rate": 1.54e-05, |
|
"loss": 6.7999, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.108625411987305, |
|
"learning_rate": 1.59e-05, |
|
"loss": 6.7667, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.131596565246582, |
|
"learning_rate": 1.6400000000000002e-05, |
|
"loss": 6.7599, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.2159247398376465, |
|
"learning_rate": 1.69e-05, |
|
"loss": 6.7379, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.335048675537109, |
|
"learning_rate": 1.74e-05, |
|
"loss": 6.6553, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.361081600189209, |
|
"learning_rate": 1.79e-05, |
|
"loss": 6.6905, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.59442138671875, |
|
"learning_rate": 1.84e-05, |
|
"loss": 6.6356, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.31695032119751, |
|
"learning_rate": 1.8900000000000002e-05, |
|
"loss": 6.6538, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.541380882263184, |
|
"learning_rate": 1.94e-05, |
|
"loss": 6.6873, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 8.056180000305176, |
|
"learning_rate": 1.9900000000000003e-05, |
|
"loss": 6.6332, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.74859094619751, |
|
"learning_rate": 2.04e-05, |
|
"loss": 6.5488, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.700525760650635, |
|
"learning_rate": 2.09e-05, |
|
"loss": 6.611, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.347009181976318, |
|
"learning_rate": 2.1400000000000002e-05, |
|
"loss": 6.6621, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.607337951660156, |
|
"learning_rate": 2.19e-05, |
|
"loss": 6.568, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.816825866699219, |
|
"learning_rate": 2.2400000000000002e-05, |
|
"loss": 6.6293, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.984443664550781, |
|
"learning_rate": 2.29e-05, |
|
"loss": 6.5667, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.995021820068359, |
|
"learning_rate": 2.3400000000000003e-05, |
|
"loss": 6.5062, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.876286029815674, |
|
"learning_rate": 2.39e-05, |
|
"loss": 6.5034, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.953365802764893, |
|
"learning_rate": 2.44e-05, |
|
"loss": 6.459, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.246520519256592, |
|
"learning_rate": 2.4900000000000002e-05, |
|
"loss": 6.4359, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"eval_loss": 6.133457660675049, |
|
"eval_runtime": 422.8798, |
|
"eval_samples_per_second": 50.182, |
|
"eval_steps_per_second": 0.393, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.1657819747924805, |
|
"learning_rate": 2.54e-05, |
|
"loss": 6.4372, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.203813552856445, |
|
"learning_rate": 2.5900000000000003e-05, |
|
"loss": 6.3684, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.829228401184082, |
|
"learning_rate": 2.64e-05, |
|
"loss": 6.3275, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.641738414764404, |
|
"learning_rate": 2.6900000000000003e-05, |
|
"loss": 6.2923, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.824960708618164, |
|
"learning_rate": 2.7400000000000002e-05, |
|
"loss": 6.3685, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.7754364013671875, |
|
"learning_rate": 2.7900000000000004e-05, |
|
"loss": 6.3069, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.3608903884887695, |
|
"learning_rate": 2.84e-05, |
|
"loss": 6.2915, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.551064968109131, |
|
"learning_rate": 2.8899999999999998e-05, |
|
"loss": 6.1676, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.588657379150391, |
|
"learning_rate": 2.94e-05, |
|
"loss": 6.2311, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.631275177001953, |
|
"learning_rate": 2.9900000000000002e-05, |
|
"loss": 6.1824, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.2990288734436035, |
|
"learning_rate": 3.04e-05, |
|
"loss": 6.1641, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.6439714431762695, |
|
"learning_rate": 3.09e-05, |
|
"loss": 6.0125, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.402431964874268, |
|
"learning_rate": 3.1400000000000004e-05, |
|
"loss": 6.1314, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.883937358856201, |
|
"learning_rate": 3.19e-05, |
|
"loss": 5.9858, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.988973140716553, |
|
"learning_rate": 3.24e-05, |
|
"loss": 5.9886, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.186301231384277, |
|
"learning_rate": 3.29e-05, |
|
"loss": 5.9732, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.2982354164123535, |
|
"learning_rate": 3.3400000000000005e-05, |
|
"loss": 6.0391, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.037730693817139, |
|
"learning_rate": 3.3900000000000004e-05, |
|
"loss": 6.0547, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.280949592590332, |
|
"learning_rate": 3.4399999999999996e-05, |
|
"loss": 5.8734, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.861902236938477, |
|
"learning_rate": 3.49e-05, |
|
"loss": 5.8997, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.4033589363098145, |
|
"learning_rate": 3.54e-05, |
|
"loss": 5.8111, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.376819133758545, |
|
"learning_rate": 3.59e-05, |
|
"loss": 5.9027, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.777069568634033, |
|
"learning_rate": 3.6400000000000004e-05, |
|
"loss": 5.727, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.183162689208984, |
|
"learning_rate": 3.69e-05, |
|
"loss": 5.733, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.658604621887207, |
|
"learning_rate": 3.74e-05, |
|
"loss": 5.7018, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.263491153717041, |
|
"learning_rate": 3.79e-05, |
|
"loss": 5.7315, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.86756706237793, |
|
"learning_rate": 3.8400000000000005e-05, |
|
"loss": 5.6637, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.122657299041748, |
|
"learning_rate": 3.8900000000000004e-05, |
|
"loss": 5.6849, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.6024580001831055, |
|
"learning_rate": 3.94e-05, |
|
"loss": 5.5835, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.619604587554932, |
|
"learning_rate": 3.99e-05, |
|
"loss": 5.7, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.7342352867126465, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 5.4825, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.796237468719482, |
|
"learning_rate": 4.09e-05, |
|
"loss": 5.6496, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.601135730743408, |
|
"learning_rate": 4.14e-05, |
|
"loss": 5.3526, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.576679229736328, |
|
"learning_rate": 4.19e-05, |
|
"loss": 5.3757, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.774380683898926, |
|
"learning_rate": 4.24e-05, |
|
"loss": 5.5164, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.140038967132568, |
|
"learning_rate": 4.29e-05, |
|
"loss": 5.3124, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.971868515014648, |
|
"learning_rate": 4.3400000000000005e-05, |
|
"loss": 5.3387, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.431824207305908, |
|
"learning_rate": 4.39e-05, |
|
"loss": 5.4363, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.174604892730713, |
|
"learning_rate": 4.44e-05, |
|
"loss": 5.282, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.313474178314209, |
|
"learning_rate": 4.49e-05, |
|
"loss": 5.3754, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.066281318664551, |
|
"learning_rate": 4.5400000000000006e-05, |
|
"loss": 5.3197, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 7.4433817863464355, |
|
"learning_rate": 4.5900000000000004e-05, |
|
"loss": 5.3277, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.884599208831787, |
|
"learning_rate": 4.64e-05, |
|
"loss": 5.0681, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.17655611038208, |
|
"learning_rate": 4.69e-05, |
|
"loss": 5.1159, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 7.090456485748291, |
|
"learning_rate": 4.74e-05, |
|
"loss": 5.2264, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.508240699768066, |
|
"learning_rate": 4.79e-05, |
|
"loss": 5.2113, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.5788445472717285, |
|
"learning_rate": 4.8400000000000004e-05, |
|
"loss": 4.9996, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.722221374511719, |
|
"learning_rate": 4.89e-05, |
|
"loss": 4.9688, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.813933849334717, |
|
"learning_rate": 4.94e-05, |
|
"loss": 4.8691, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.425748348236084, |
|
"learning_rate": 4.99e-05, |
|
"loss": 4.9883, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"eval_loss": 4.425854682922363, |
|
"eval_runtime": 422.6592, |
|
"eval_samples_per_second": 50.208, |
|
"eval_steps_per_second": 0.393, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.170848369598389, |
|
"learning_rate": 4.9999996764541565e-05, |
|
"loss": 4.8794, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.247808933258057, |
|
"learning_rate": 4.99999836204931e-05, |
|
"loss": 4.9664, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.27077054977417, |
|
"learning_rate": 4.9999960365643774e-05, |
|
"loss": 4.7753, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.284353733062744, |
|
"learning_rate": 4.999992700000298e-05, |
|
"loss": 4.8909, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.716318607330322, |
|
"learning_rate": 4.999988352358421e-05, |
|
"loss": 4.861, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.448729991912842, |
|
"learning_rate": 4.999982993640506e-05, |
|
"loss": 4.8776, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.401464939117432, |
|
"learning_rate": 4.9999766238487204e-05, |
|
"loss": 4.8507, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.219631195068359, |
|
"learning_rate": 4.999969242985639e-05, |
|
"loss": 4.8234, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.225848197937012, |
|
"learning_rate": 4.9999608510542485e-05, |
|
"loss": 4.6093, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.117152690887451, |
|
"learning_rate": 4.999951448057941e-05, |
|
"loss": 4.786, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.788968086242676, |
|
"learning_rate": 4.999941034000521e-05, |
|
"loss": 4.8389, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.037156581878662, |
|
"learning_rate": 4.999929608886198e-05, |
|
"loss": 4.7674, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.567563056945801, |
|
"learning_rate": 4.999917172719596e-05, |
|
"loss": 4.8247, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.557987213134766, |
|
"learning_rate": 4.999903725505742e-05, |
|
"loss": 4.7517, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.490599632263184, |
|
"learning_rate": 4.999889267250075e-05, |
|
"loss": 4.6037, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.000855922698975, |
|
"learning_rate": 4.999873797958443e-05, |
|
"loss": 4.6077, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.378462314605713, |
|
"learning_rate": 4.999857317637102e-05, |
|
"loss": 4.3662, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.4290924072265625, |
|
"learning_rate": 4.999839826292718e-05, |
|
"loss": 4.5339, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.727224826812744, |
|
"learning_rate": 4.999821323932363e-05, |
|
"loss": 4.5744, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.893082618713379, |
|
"learning_rate": 4.9998018105635214e-05, |
|
"loss": 4.5247, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.759618759155273, |
|
"learning_rate": 4.999781286194085e-05, |
|
"loss": 4.5744, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.082376003265381, |
|
"learning_rate": 4.999759750832355e-05, |
|
"loss": 4.6011, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.305749416351318, |
|
"learning_rate": 4.999737204487039e-05, |
|
"loss": 4.4587, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.710574626922607, |
|
"learning_rate": 4.999713647167258e-05, |
|
"loss": 4.46, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.753530502319336, |
|
"learning_rate": 4.999689078882537e-05, |
|
"loss": 4.5674, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.949398040771484, |
|
"learning_rate": 4.999663499642814e-05, |
|
"loss": 4.3999, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.530541896820068, |
|
"learning_rate": 4.999636909458434e-05, |
|
"loss": 4.533, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.9646453857421875, |
|
"learning_rate": 4.9996093083401495e-05, |
|
"loss": 4.374, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.346386432647705, |
|
"learning_rate": 4.9995806962991246e-05, |
|
"loss": 4.4758, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.797152519226074, |
|
"learning_rate": 4.9995510733469295e-05, |
|
"loss": 4.6275, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.426751136779785, |
|
"learning_rate": 4.999520439495547e-05, |
|
"loss": 4.3561, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.3839030265808105, |
|
"learning_rate": 4.9994887947573646e-05, |
|
"loss": 4.3134, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.01127290725708, |
|
"learning_rate": 4.9994594501953555e-05, |
|
"loss": 4.4336, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.228607654571533, |
|
"learning_rate": 4.999425884807851e-05, |
|
"loss": 4.1543, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.958486080169678, |
|
"learning_rate": 4.999391308571788e-05, |
|
"loss": 4.3806, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.816386699676514, |
|
"learning_rate": 4.99935572150115e-05, |
|
"loss": 4.3759, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 6.345638751983643, |
|
"learning_rate": 4.9993191236103295e-05, |
|
"loss": 4.5759, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.738880634307861, |
|
"learning_rate": 4.9992815149141294e-05, |
|
"loss": 4.3274, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.694160461425781, |
|
"learning_rate": 4.9992428954277573e-05, |
|
"loss": 4.376, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.050902843475342, |
|
"learning_rate": 4.999203265166834e-05, |
|
"loss": 4.1629, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 7.09380578994751, |
|
"learning_rate": 4.9991626241473876e-05, |
|
"loss": 4.3638, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.538166046142578, |
|
"learning_rate": 4.999120972385853e-05, |
|
"loss": 4.1318, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.052664279937744, |
|
"learning_rate": 4.9990783098990765e-05, |
|
"loss": 4.1856, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.707901477813721, |
|
"learning_rate": 4.9990346367043114e-05, |
|
"loss": 4.3361, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.3822150230407715, |
|
"learning_rate": 4.998989952819223e-05, |
|
"loss": 4.301, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.586170196533203, |
|
"learning_rate": 4.998944258261879e-05, |
|
"loss": 4.3131, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.62904691696167, |
|
"learning_rate": 4.998897553050763e-05, |
|
"loss": 4.3357, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.669075965881348, |
|
"learning_rate": 4.9988498372047633e-05, |
|
"loss": 4.2434, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.776549339294434, |
|
"learning_rate": 4.998801110743177e-05, |
|
"loss": 4.0441, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.442574977874756, |
|
"learning_rate": 4.9987513736857106e-05, |
|
"loss": 4.0931, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"eval_loss": 3.676598310470581, |
|
"eval_runtime": 423.4536, |
|
"eval_samples_per_second": 50.114, |
|
"eval_steps_per_second": 0.392, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.529382705688477, |
|
"learning_rate": 4.998700626052481e-05, |
|
"loss": 4.1739, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.5605387687683105, |
|
"learning_rate": 4.99864886786401e-05, |
|
"loss": 4.1861, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.168206214904785, |
|
"learning_rate": 4.998596099141231e-05, |
|
"loss": 4.0245, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.634309768676758, |
|
"learning_rate": 4.998542319905486e-05, |
|
"loss": 4.2131, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.191194534301758, |
|
"learning_rate": 4.998487530178525e-05, |
|
"loss": 4.163, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.52882719039917, |
|
"learning_rate": 4.998431729982506e-05, |
|
"loss": 4.0668, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.614863395690918, |
|
"learning_rate": 4.998374919339998e-05, |
|
"loss": 4.2501, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.7852373123168945, |
|
"learning_rate": 4.9983170982739745e-05, |
|
"loss": 4.207, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 7.439071178436279, |
|
"learning_rate": 4.998258266807822e-05, |
|
"loss": 4.0462, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.510477542877197, |
|
"learning_rate": 4.998198424965333e-05, |
|
"loss": 4.277, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.421988487243652, |
|
"learning_rate": 4.9981375727707114e-05, |
|
"loss": 4.0289, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.632848739624023, |
|
"learning_rate": 4.998075710248565e-05, |
|
"loss": 4.2922, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.031602382659912, |
|
"learning_rate": 4.9980128374239156e-05, |
|
"loss": 3.9845, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.3867669105529785, |
|
"learning_rate": 4.997948954322189e-05, |
|
"loss": 4.1095, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.150663375854492, |
|
"learning_rate": 4.997884060969222e-05, |
|
"loss": 3.9247, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.717851638793945, |
|
"learning_rate": 4.99781815739126e-05, |
|
"loss": 4.1911, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.766101837158203, |
|
"learning_rate": 4.9977512436149563e-05, |
|
"loss": 3.9194, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.316621780395508, |
|
"learning_rate": 4.9976833196673735e-05, |
|
"loss": 4.101, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.107331275939941, |
|
"learning_rate": 4.9976143855759814e-05, |
|
"loss": 4.0706, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.533346652984619, |
|
"learning_rate": 4.99754444136866e-05, |
|
"loss": 4.0164, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.967229843139648, |
|
"learning_rate": 4.9974734870736975e-05, |
|
"loss": 4.1229, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.502392768859863, |
|
"learning_rate": 4.997401522719788e-05, |
|
"loss": 4.0456, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.6358962059021, |
|
"learning_rate": 4.997328548336038e-05, |
|
"loss": 3.9432, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.525423049926758, |
|
"learning_rate": 4.99725456395196e-05, |
|
"loss": 4.05, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.675175666809082, |
|
"learning_rate": 4.997179569597475e-05, |
|
"loss": 3.8145, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.409067630767822, |
|
"learning_rate": 4.9971035653029153e-05, |
|
"loss": 3.914, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.812373161315918, |
|
"learning_rate": 4.997026551099017e-05, |
|
"loss": 3.9817, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.165591239929199, |
|
"learning_rate": 4.996948527016929e-05, |
|
"loss": 3.9414, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.0605034828186035, |
|
"learning_rate": 4.996869493088205e-05, |
|
"loss": 3.8139, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.348527908325195, |
|
"learning_rate": 4.996789449344812e-05, |
|
"loss": 3.7905, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.135634422302246, |
|
"learning_rate": 4.9967083958191187e-05, |
|
"loss": 3.7834, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 8.48004150390625, |
|
"learning_rate": 4.996626332543907e-05, |
|
"loss": 3.7551, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.518482208251953, |
|
"learning_rate": 4.996543259552367e-05, |
|
"loss": 3.8619, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.7253737449646, |
|
"learning_rate": 4.996459176878095e-05, |
|
"loss": 3.7978, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.030267715454102, |
|
"learning_rate": 4.9963740845550965e-05, |
|
"loss": 3.9309, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.561445713043213, |
|
"learning_rate": 4.996287982617787e-05, |
|
"loss": 3.9761, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.555076599121094, |
|
"learning_rate": 4.996200871100988e-05, |
|
"loss": 3.8587, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.502940654754639, |
|
"learning_rate": 4.9961127500399295e-05, |
|
"loss": 3.847, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.557964324951172, |
|
"learning_rate": 4.996023619470252e-05, |
|
"loss": 3.6117, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.37682580947876, |
|
"learning_rate": 4.9959334794280014e-05, |
|
"loss": 3.7581, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.152722358703613, |
|
"learning_rate": 4.9958423299496344e-05, |
|
"loss": 3.8454, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.4067301750183105, |
|
"learning_rate": 4.995750171072014e-05, |
|
"loss": 3.9353, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.564638614654541, |
|
"learning_rate": 4.9956570028324124e-05, |
|
"loss": 3.738, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.320713520050049, |
|
"learning_rate": 4.99556282526851e-05, |
|
"loss": 3.5365, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.384575843811035, |
|
"learning_rate": 4.995467638418395e-05, |
|
"loss": 3.8153, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.424959659576416, |
|
"learning_rate": 4.995371442320565e-05, |
|
"loss": 3.779, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.928106784820557, |
|
"learning_rate": 4.9952742370139224e-05, |
|
"loss": 3.7395, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.09404182434082, |
|
"learning_rate": 4.995176022537784e-05, |
|
"loss": 3.757, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 8.332275390625, |
|
"learning_rate": 4.995076798931867e-05, |
|
"loss": 3.8866, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.44631814956665, |
|
"learning_rate": 4.994976566236303e-05, |
|
"loss": 3.8395, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"eval_loss": 3.2655727863311768, |
|
"eval_runtime": 423.1622, |
|
"eval_samples_per_second": 50.149, |
|
"eval_steps_per_second": 0.392, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.428518295288086, |
|
"learning_rate": 4.994875324491629e-05, |
|
"loss": 3.7103, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.11751651763916, |
|
"learning_rate": 4.9947730737387885e-05, |
|
"loss": 3.8224, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.404431343078613, |
|
"learning_rate": 4.994669814019138e-05, |
|
"loss": 3.9208, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.487029075622559, |
|
"learning_rate": 4.994565545374438e-05, |
|
"loss": 3.7412, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.979548454284668, |
|
"learning_rate": 4.9944602678468575e-05, |
|
"loss": 3.6008, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.379942417144775, |
|
"learning_rate": 4.994353981478974e-05, |
|
"loss": 3.6077, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.666346549987793, |
|
"learning_rate": 4.994246686313774e-05, |
|
"loss": 3.8157, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.252011299133301, |
|
"learning_rate": 4.99413838239465e-05, |
|
"loss": 3.6929, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.579212188720703, |
|
"learning_rate": 4.9940290697654045e-05, |
|
"loss": 3.9075, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.747458457946777, |
|
"learning_rate": 4.993918748470246e-05, |
|
"loss": 3.5528, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.356452465057373, |
|
"learning_rate": 4.993818596932118e-05, |
|
"loss": 3.4987, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.761812686920166, |
|
"learning_rate": 4.993706359294986e-05, |
|
"loss": 3.7627, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.732122898101807, |
|
"learning_rate": 4.993593113122456e-05, |
|
"loss": 3.6106, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 11.450267791748047, |
|
"learning_rate": 4.993478858460329e-05, |
|
"loss": 3.4716, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.774083614349365, |
|
"learning_rate": 4.993363595354813e-05, |
|
"loss": 3.5741, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.652318000793457, |
|
"learning_rate": 4.9932473238525247e-05, |
|
"loss": 3.676, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.903106689453125, |
|
"learning_rate": 4.993130044000487e-05, |
|
"loss": 3.6334, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.358557224273682, |
|
"learning_rate": 4.993011755846132e-05, |
|
"loss": 3.5592, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.509594440460205, |
|
"learning_rate": 4.9928924594372995e-05, |
|
"loss": 3.6512, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.318192958831787, |
|
"learning_rate": 4.9927721548222374e-05, |
|
"loss": 3.3691, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.848221778869629, |
|
"learning_rate": 4.9926508420496e-05, |
|
"loss": 3.3721, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.9782490730285645, |
|
"learning_rate": 4.992528521168449e-05, |
|
"loss": 3.5859, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.406031608581543, |
|
"learning_rate": 4.992405192228258e-05, |
|
"loss": 3.6206, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.102032661437988, |
|
"learning_rate": 4.992280855278903e-05, |
|
"loss": 3.4615, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.5005035400390625, |
|
"learning_rate": 4.992155510370669e-05, |
|
"loss": 3.507, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.336762428283691, |
|
"learning_rate": 4.9920291575542515e-05, |
|
"loss": 3.3897, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 7.851747989654541, |
|
"learning_rate": 4.9919017968807514e-05, |
|
"loss": 3.5232, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.816250324249268, |
|
"learning_rate": 4.991773428401676e-05, |
|
"loss": 3.5638, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.746279716491699, |
|
"learning_rate": 4.9916440521689433e-05, |
|
"loss": 3.5446, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.8899922370910645, |
|
"learning_rate": 4.9915136682348774e-05, |
|
"loss": 3.3533, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.160922050476074, |
|
"learning_rate": 4.9913822766522076e-05, |
|
"loss": 3.4222, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.936460494995117, |
|
"learning_rate": 4.991249877474075e-05, |
|
"loss": 3.5763, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 16.517559051513672, |
|
"learning_rate": 4.991116470754025e-05, |
|
"loss": 3.5324, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.840503692626953, |
|
"learning_rate": 4.9909820565460116e-05, |
|
"loss": 3.5764, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.952908515930176, |
|
"learning_rate": 4.990846634904398e-05, |
|
"loss": 3.4369, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.108431339263916, |
|
"learning_rate": 4.9907102058839504e-05, |
|
"loss": 3.4518, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.068904399871826, |
|
"learning_rate": 4.990572769539848e-05, |
|
"loss": 3.5594, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.512117385864258, |
|
"learning_rate": 4.990434325927671e-05, |
|
"loss": 3.6366, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.916763782501221, |
|
"learning_rate": 4.990294875103413e-05, |
|
"loss": 3.4344, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.244757175445557, |
|
"learning_rate": 4.990154417123472e-05, |
|
"loss": 3.5858, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.071867942810059, |
|
"learning_rate": 4.990012952044654e-05, |
|
"loss": 3.5888, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.984373092651367, |
|
"learning_rate": 4.98987047992417e-05, |
|
"loss": 3.5084, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 7.062913417816162, |
|
"learning_rate": 4.989727000819644e-05, |
|
"loss": 3.4169, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.671230792999268, |
|
"learning_rate": 4.9895825147891e-05, |
|
"loss": 3.3354, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.235222339630127, |
|
"learning_rate": 4.989437021890975e-05, |
|
"loss": 3.4756, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.888786792755127, |
|
"learning_rate": 4.98929052218411e-05, |
|
"loss": 3.3409, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.014183044433594, |
|
"learning_rate": 4.989143015727754e-05, |
|
"loss": 3.4246, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 8.75373649597168, |
|
"learning_rate": 4.988994502581565e-05, |
|
"loss": 3.2431, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.800814628601074, |
|
"learning_rate": 4.988844982805605e-05, |
|
"loss": 3.4702, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.22312068939209, |
|
"learning_rate": 4.9886944564603444e-05, |
|
"loss": 3.2994, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_loss": 2.9097418785095215, |
|
"eval_runtime": 423.0547, |
|
"eval_samples_per_second": 50.161, |
|
"eval_steps_per_second": 0.392, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.41413688659668, |
|
"learning_rate": 4.988542923606663e-05, |
|
"loss": 3.5138, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.5836968421936035, |
|
"learning_rate": 4.988390384305843e-05, |
|
"loss": 3.4161, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.957060813903809, |
|
"learning_rate": 4.9882368386195776e-05, |
|
"loss": 3.4227, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.718446731567383, |
|
"learning_rate": 4.988082286609965e-05, |
|
"loss": 3.3632, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.273780822753906, |
|
"learning_rate": 4.9879267283395125e-05, |
|
"loss": 3.4018, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.000144004821777, |
|
"learning_rate": 4.9877701638711314e-05, |
|
"loss": 3.2865, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.369386672973633, |
|
"learning_rate": 4.987612593268142e-05, |
|
"loss": 3.2916, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.275691032409668, |
|
"learning_rate": 4.98745401659427e-05, |
|
"loss": 3.3678, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.77783727645874, |
|
"learning_rate": 4.9872944339136503e-05, |
|
"loss": 3.2567, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.3765435218811035, |
|
"learning_rate": 4.987133845290822e-05, |
|
"loss": 3.3649, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.956243991851807, |
|
"learning_rate": 4.9869722507907343e-05, |
|
"loss": 3.428, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.933529376983643, |
|
"learning_rate": 4.9868096504787395e-05, |
|
"loss": 3.0448, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.319052696228027, |
|
"learning_rate": 4.9866460444205983e-05, |
|
"loss": 3.3518, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.592201232910156, |
|
"learning_rate": 4.986481432682479e-05, |
|
"loss": 3.1236, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.243892192840576, |
|
"learning_rate": 4.986315815330956e-05, |
|
"loss": 3.2447, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.197099208831787, |
|
"learning_rate": 4.98614919243301e-05, |
|
"loss": 3.3031, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.438327312469482, |
|
"learning_rate": 4.98598156405603e-05, |
|
"loss": 3.5433, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.134957313537598, |
|
"learning_rate": 4.985812930267808e-05, |
|
"loss": 3.3124, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.727309703826904, |
|
"learning_rate": 4.985643291136546e-05, |
|
"loss": 3.2169, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.680230617523193, |
|
"learning_rate": 4.985472646730852e-05, |
|
"loss": 3.0445, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.9644598960876465, |
|
"learning_rate": 4.985300997119739e-05, |
|
"loss": 3.1588, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.430914402008057, |
|
"learning_rate": 4.985128342372629e-05, |
|
"loss": 3.2385, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.565753936767578, |
|
"learning_rate": 4.9849546825593476e-05, |
|
"loss": 3.2654, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.126991271972656, |
|
"learning_rate": 4.9847800177501305e-05, |
|
"loss": 3.3024, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.406001567840576, |
|
"learning_rate": 4.984604348015616e-05, |
|
"loss": 3.219, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.55892276763916, |
|
"learning_rate": 4.984427673426851e-05, |
|
"loss": 3.3882, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.547699451446533, |
|
"learning_rate": 4.9842499940552886e-05, |
|
"loss": 3.2334, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.514155864715576, |
|
"learning_rate": 4.9840713099727885e-05, |
|
"loss": 3.2041, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.4210991859436035, |
|
"learning_rate": 4.983891621251616e-05, |
|
"loss": 3.1746, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.854736328125, |
|
"learning_rate": 4.983710927964442e-05, |
|
"loss": 3.1961, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.277487754821777, |
|
"learning_rate": 4.9835292301843476e-05, |
|
"loss": 2.9688, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.925424098968506, |
|
"learning_rate": 4.983346527984813e-05, |
|
"loss": 3.2315, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 7.818734169006348, |
|
"learning_rate": 4.983162821439733e-05, |
|
"loss": 3.325, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.312294006347656, |
|
"learning_rate": 4.982978110623402e-05, |
|
"loss": 3.2141, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.804452419281006, |
|
"learning_rate": 4.982792395610524e-05, |
|
"loss": 3.1214, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.256209850311279, |
|
"learning_rate": 4.9826056764762074e-05, |
|
"loss": 3.2357, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.713292121887207, |
|
"learning_rate": 4.982417953295968e-05, |
|
"loss": 3.0448, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.762032985687256, |
|
"learning_rate": 4.982229226145727e-05, |
|
"loss": 3.1855, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.245543956756592, |
|
"learning_rate": 4.982039495101812e-05, |
|
"loss": 3.1024, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.456753730773926, |
|
"learning_rate": 4.9818487602409556e-05, |
|
"loss": 3.2703, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.831927299499512, |
|
"learning_rate": 4.981657021640298e-05, |
|
"loss": 3.2172, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.4401350021362305, |
|
"learning_rate": 4.981464279377384e-05, |
|
"loss": 3.2972, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.1732892990112305, |
|
"learning_rate": 4.981270533530164e-05, |
|
"loss": 3.1305, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.929070472717285, |
|
"learning_rate": 4.9810757841769976e-05, |
|
"loss": 3.0932, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.50985860824585, |
|
"learning_rate": 4.980880031396645e-05, |
|
"loss": 3.1417, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.071355819702148, |
|
"learning_rate": 4.980683275268277e-05, |
|
"loss": 3.1796, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.182791709899902, |
|
"learning_rate": 4.980485515871467e-05, |
|
"loss": 3.268, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.4410786628723145, |
|
"learning_rate": 4.980286753286195e-05, |
|
"loss": 3.2922, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 22.151193618774414, |
|
"learning_rate": 4.9800869875928475e-05, |
|
"loss": 3.0482, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.808634281158447, |
|
"learning_rate": 4.9798862188722164e-05, |
|
"loss": 3.1515, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"eval_loss": 2.6660187244415283, |
|
"eval_runtime": 423.2518, |
|
"eval_samples_per_second": 50.138, |
|
"eval_steps_per_second": 0.392, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 9.806035995483398, |
|
"learning_rate": 4.979684447205499e-05, |
|
"loss": 3.0872, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.456057071685791, |
|
"learning_rate": 4.9794816726742976e-05, |
|
"loss": 2.882, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.348060607910156, |
|
"learning_rate": 4.979277895360622e-05, |
|
"loss": 3.1984, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.867720603942871, |
|
"learning_rate": 4.979073115346885e-05, |
|
"loss": 3.0328, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.720703601837158, |
|
"learning_rate": 4.9788673327159076e-05, |
|
"loss": 3.1852, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.027235984802246, |
|
"learning_rate": 4.978660547550914e-05, |
|
"loss": 3.1082, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.131529808044434, |
|
"learning_rate": 4.978452759935535e-05, |
|
"loss": 3.0318, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.773056983947754, |
|
"learning_rate": 4.978243969953806e-05, |
|
"loss": 3.3087, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.788400650024414, |
|
"learning_rate": 4.9780341776901696e-05, |
|
"loss": 2.9589, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.815254211425781, |
|
"learning_rate": 4.9778233832294724e-05, |
|
"loss": 3.1083, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.483798027038574, |
|
"learning_rate": 4.9776115866569654e-05, |
|
"loss": 3.1023, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.394237995147705, |
|
"learning_rate": 4.9773987880583075e-05, |
|
"loss": 3.0322, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.961243152618408, |
|
"learning_rate": 4.97718498751956e-05, |
|
"loss": 3.0204, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.827451229095459, |
|
"learning_rate": 4.976970185127191e-05, |
|
"loss": 3.1435, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.503066539764404, |
|
"learning_rate": 4.9767543809680744e-05, |
|
"loss": 3.0807, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.9387288093566895, |
|
"learning_rate": 4.9765375751294874e-05, |
|
"loss": 2.7987, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.066097259521484, |
|
"learning_rate": 4.9763197676991134e-05, |
|
"loss": 3.0099, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 7.227752685546875, |
|
"learning_rate": 4.976100958765042e-05, |
|
"loss": 2.9368, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.03649377822876, |
|
"learning_rate": 4.975881148415765e-05, |
|
"loss": 2.9237, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.1522979736328125, |
|
"learning_rate": 4.9756603367401814e-05, |
|
"loss": 2.96, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.068181037902832, |
|
"learning_rate": 4.975438523827594e-05, |
|
"loss": 2.9398, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.104649066925049, |
|
"learning_rate": 4.975215709767712e-05, |
|
"loss": 2.781, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 7.2377471923828125, |
|
"learning_rate": 4.974991894650649e-05, |
|
"loss": 3.2773, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.730634689331055, |
|
"learning_rate": 4.974767078566922e-05, |
|
"loss": 3.0184, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.900280952453613, |
|
"learning_rate": 4.974541261607454e-05, |
|
"loss": 2.9432, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.755638122558594, |
|
"learning_rate": 4.974314443863573e-05, |
|
"loss": 2.9988, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.039461612701416, |
|
"learning_rate": 4.974086625427011e-05, |
|
"loss": 3.0655, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.955098628997803, |
|
"learning_rate": 4.9738578063899066e-05, |
|
"loss": 2.822, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.892226696014404, |
|
"learning_rate": 4.9736279868447996e-05, |
|
"loss": 2.9688, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.9840497970581055, |
|
"learning_rate": 4.9733971668846376e-05, |
|
"loss": 2.9008, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.418862819671631, |
|
"learning_rate": 4.973165346602771e-05, |
|
"loss": 2.9117, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.666804790496826, |
|
"learning_rate": 4.972932526092956e-05, |
|
"loss": 3.0392, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.902047634124756, |
|
"learning_rate": 4.972698705449353e-05, |
|
"loss": 3.0601, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.236441612243652, |
|
"learning_rate": 4.972463884766525e-05, |
|
"loss": 2.961, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.548509120941162, |
|
"learning_rate": 4.972228064139444e-05, |
|
"loss": 2.8966, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.943577766418457, |
|
"learning_rate": 4.9719912436634796e-05, |
|
"loss": 3.0408, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.531796455383301, |
|
"learning_rate": 4.971753423434413e-05, |
|
"loss": 2.8598, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.425514221191406, |
|
"learning_rate": 4.971514603548425e-05, |
|
"loss": 3.0003, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.9626593589782715, |
|
"learning_rate": 4.9712747841021024e-05, |
|
"loss": 3.0137, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.935604572296143, |
|
"learning_rate": 4.971033965192435e-05, |
|
"loss": 2.9509, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.610740661621094, |
|
"learning_rate": 4.9708163737130644e-05, |
|
"loss": 3.0231, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.248142242431641, |
|
"learning_rate": 4.970573656091701e-05, |
|
"loss": 2.7345, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.92423677444458, |
|
"learning_rate": 4.9703299392905516e-05, |
|
"loss": 2.9733, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.833972454071045, |
|
"learning_rate": 4.9700852234081843e-05, |
|
"loss": 2.8882, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.712262153625488, |
|
"learning_rate": 4.96983950854357e-05, |
|
"loss": 2.9849, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.70693826675415, |
|
"learning_rate": 4.969592794796083e-05, |
|
"loss": 2.7034, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.603397369384766, |
|
"learning_rate": 4.969345082265503e-05, |
|
"loss": 2.9441, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.455355644226074, |
|
"learning_rate": 4.9690963710520125e-05, |
|
"loss": 3.0818, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.410728454589844, |
|
"learning_rate": 4.968846661256199e-05, |
|
"loss": 2.7837, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.591252326965332, |
|
"learning_rate": 4.9685959529790526e-05, |
|
"loss": 2.8103, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"eval_loss": 2.493670701980591, |
|
"eval_runtime": 423.4603, |
|
"eval_samples_per_second": 50.113, |
|
"eval_steps_per_second": 0.392, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.1827216148376465, |
|
"learning_rate": 4.9683442463219674e-05, |
|
"loss": 2.9208, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.817008018493652, |
|
"learning_rate": 4.968091541386742e-05, |
|
"loss": 2.9904, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.470141887664795, |
|
"learning_rate": 4.96783783827558e-05, |
|
"loss": 2.7532, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.129313945770264, |
|
"learning_rate": 4.967583137091085e-05, |
|
"loss": 3.1164, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.951664924621582, |
|
"learning_rate": 4.967327437936268e-05, |
|
"loss": 3.0038, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.184429168701172, |
|
"learning_rate": 4.96707074091454e-05, |
|
"loss": 2.8789, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.133409023284912, |
|
"learning_rate": 4.9668130461297194e-05, |
|
"loss": 2.8137, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.30552339553833, |
|
"learning_rate": 4.9665543536860255e-05, |
|
"loss": 2.764, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.916990280151367, |
|
"learning_rate": 4.966294663688083e-05, |
|
"loss": 2.8359, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.573614597320557, |
|
"learning_rate": 4.966033976240916e-05, |
|
"loss": 2.9216, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.318258285522461, |
|
"learning_rate": 4.965772291449958e-05, |
|
"loss": 2.8818, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.087459564208984, |
|
"learning_rate": 4.965509609421042e-05, |
|
"loss": 2.6802, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.408172607421875, |
|
"learning_rate": 4.9652459302604046e-05, |
|
"loss": 2.7713, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.441077709197998, |
|
"learning_rate": 4.964981254074686e-05, |
|
"loss": 2.8727, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.799238681793213, |
|
"learning_rate": 4.9647155809709306e-05, |
|
"loss": 2.9911, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.362485885620117, |
|
"learning_rate": 4.9644489110565845e-05, |
|
"loss": 2.8088, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.419576168060303, |
|
"learning_rate": 4.964181244439498e-05, |
|
"loss": 2.8133, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.19252347946167, |
|
"learning_rate": 4.963912581227924e-05, |
|
"loss": 2.8429, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.425354480743408, |
|
"learning_rate": 4.963642921530518e-05, |
|
"loss": 2.8272, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.866875171661377, |
|
"learning_rate": 4.9633722654563405e-05, |
|
"loss": 2.816, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.425843238830566, |
|
"learning_rate": 4.963100613114852e-05, |
|
"loss": 2.8552, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.268444538116455, |
|
"learning_rate": 4.962827964615919e-05, |
|
"loss": 2.8642, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.511022090911865, |
|
"learning_rate": 4.962554320069808e-05, |
|
"loss": 2.8388, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 8.639039993286133, |
|
"learning_rate": 4.9622796795871904e-05, |
|
"loss": 2.809, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.79989767074585, |
|
"learning_rate": 4.9620040432791395e-05, |
|
"loss": 3.0316, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.568778991699219, |
|
"learning_rate": 4.961727411257131e-05, |
|
"loss": 2.7867, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.364297389984131, |
|
"learning_rate": 4.961449783633045e-05, |
|
"loss": 3.0213, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.523791790008545, |
|
"learning_rate": 4.9611711605191625e-05, |
|
"loss": 2.7875, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.39138650894165, |
|
"learning_rate": 4.9608915420281675e-05, |
|
"loss": 2.7144, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.800009250640869, |
|
"learning_rate": 4.9606109282731463e-05, |
|
"loss": 2.8091, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.470329284667969, |
|
"learning_rate": 4.96032931936759e-05, |
|
"loss": 2.8055, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.461717128753662, |
|
"learning_rate": 4.9600467154253885e-05, |
|
"loss": 2.8633, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.095701217651367, |
|
"learning_rate": 4.959763116560836e-05, |
|
"loss": 2.9004, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.429464340209961, |
|
"learning_rate": 4.95947852288863e-05, |
|
"loss": 2.6428, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.361551761627197, |
|
"learning_rate": 4.95919293452387e-05, |
|
"loss": 2.867, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.917137622833252, |
|
"learning_rate": 4.958906351582054e-05, |
|
"loss": 2.7266, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.074246406555176, |
|
"learning_rate": 4.958618774179089e-05, |
|
"loss": 2.8333, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.167778968811035, |
|
"learning_rate": 4.9583302024312796e-05, |
|
"loss": 2.7009, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.73693323135376, |
|
"learning_rate": 4.9580406364553325e-05, |
|
"loss": 2.5967, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.558803558349609, |
|
"learning_rate": 4.957750076368357e-05, |
|
"loss": 2.6763, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 7.1173481941223145, |
|
"learning_rate": 4.957458522287868e-05, |
|
"loss": 2.8593, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 7.378798484802246, |
|
"learning_rate": 4.9571659743317754e-05, |
|
"loss": 2.7876, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.375428199768066, |
|
"learning_rate": 4.956872432618399e-05, |
|
"loss": 2.7443, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.509357452392578, |
|
"learning_rate": 4.9565778972664534e-05, |
|
"loss": 2.8787, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.430800437927246, |
|
"learning_rate": 4.95628236839506e-05, |
|
"loss": 2.609, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.8563618659973145, |
|
"learning_rate": 4.95598584612374e-05, |
|
"loss": 2.7912, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.738502502441406, |
|
"learning_rate": 4.9556883305724155e-05, |
|
"loss": 2.8222, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.658450603485107, |
|
"learning_rate": 4.955389821861412e-05, |
|
"loss": 2.6731, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.250921726226807, |
|
"learning_rate": 4.955090320111457e-05, |
|
"loss": 2.6865, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.725744247436523, |
|
"learning_rate": 4.954789825443678e-05, |
|
"loss": 2.6249, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"eval_loss": 2.342557668685913, |
|
"eval_runtime": 423.1443, |
|
"eval_samples_per_second": 50.151, |
|
"eval_steps_per_second": 0.392, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.008103847503662, |
|
"learning_rate": 4.954488337979604e-05, |
|
"loss": 2.6645, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.027274131774902, |
|
"learning_rate": 4.9541858578411674e-05, |
|
"loss": 2.8588, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.368979454040527, |
|
"learning_rate": 4.9538823851507006e-05, |
|
"loss": 2.7103, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.687747001647949, |
|
"learning_rate": 4.953577920030937e-05, |
|
"loss": 2.5958, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.669676780700684, |
|
"learning_rate": 4.953272462605013e-05, |
|
"loss": 2.7199, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.0332112312316895, |
|
"learning_rate": 4.952966012996466e-05, |
|
"loss": 2.62, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.005265235900879, |
|
"learning_rate": 4.952658571329233e-05, |
|
"loss": 2.5974, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.806674003601074, |
|
"learning_rate": 4.952350137727653e-05, |
|
"loss": 2.9317, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.117702484130859, |
|
"learning_rate": 4.9520407123164676e-05, |
|
"loss": 2.6551, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.5031023025512695, |
|
"learning_rate": 4.9517302952208174e-05, |
|
"loss": 2.7362, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.63228702545166, |
|
"learning_rate": 4.951418886566247e-05, |
|
"loss": 2.6743, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.421300888061523, |
|
"learning_rate": 4.951106486478698e-05, |
|
"loss": 2.6827, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.456374168395996, |
|
"learning_rate": 4.950793095084516e-05, |
|
"loss": 2.7266, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.39730978012085, |
|
"learning_rate": 4.950478712510446e-05, |
|
"loss": 2.8165, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.817816734313965, |
|
"learning_rate": 4.9501633388836354e-05, |
|
"loss": 2.6275, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 7.556663990020752, |
|
"learning_rate": 4.949846974331631e-05, |
|
"loss": 2.6368, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.171590805053711, |
|
"learning_rate": 4.949529618982381e-05, |
|
"loss": 2.8336, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.013977527618408, |
|
"learning_rate": 4.9492112729642346e-05, |
|
"loss": 2.6988, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.861149311065674, |
|
"learning_rate": 4.948891936405941e-05, |
|
"loss": 2.8883, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.240298271179199, |
|
"learning_rate": 4.948571609436649e-05, |
|
"loss": 2.8353, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.673245429992676, |
|
"learning_rate": 4.94825029218591e-05, |
|
"loss": 2.6835, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.680504322052002, |
|
"learning_rate": 4.947927984783674e-05, |
|
"loss": 2.594, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.967173099517822, |
|
"learning_rate": 4.947604687360295e-05, |
|
"loss": 2.5797, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.29856538772583, |
|
"learning_rate": 4.947280400046525e-05, |
|
"loss": 2.6197, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.518858432769775, |
|
"learning_rate": 4.9469551229735126e-05, |
|
"loss": 2.6727, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.493609428405762, |
|
"learning_rate": 4.946628856272814e-05, |
|
"loss": 2.521, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.581683158874512, |
|
"learning_rate": 4.9463016000763794e-05, |
|
"loss": 2.4878, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.769138336181641, |
|
"learning_rate": 4.945973354516564e-05, |
|
"loss": 2.6396, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.217662811279297, |
|
"learning_rate": 4.9456441197261195e-05, |
|
"loss": 2.3943, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.986959457397461, |
|
"learning_rate": 4.9453138958381994e-05, |
|
"loss": 2.9015, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.872792720794678, |
|
"learning_rate": 4.944982682986357e-05, |
|
"loss": 2.7326, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.723078727722168, |
|
"learning_rate": 4.944650481304545e-05, |
|
"loss": 2.7212, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.189296722412109, |
|
"learning_rate": 4.944317290927117e-05, |
|
"loss": 2.8079, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.358461856842041, |
|
"learning_rate": 4.943983111988827e-05, |
|
"loss": 2.6931, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.47238826751709, |
|
"learning_rate": 4.943647944624826e-05, |
|
"loss": 2.5772, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.701056957244873, |
|
"learning_rate": 4.943311788970667e-05, |
|
"loss": 2.5407, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.032464504241943, |
|
"learning_rate": 4.9429746451623024e-05, |
|
"loss": 2.6685, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.141815185546875, |
|
"learning_rate": 4.9426365133360834e-05, |
|
"loss": 2.7409, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.483497619628906, |
|
"learning_rate": 4.942297393628763e-05, |
|
"loss": 2.5305, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.223423957824707, |
|
"learning_rate": 4.941957286177491e-05, |
|
"loss": 2.8228, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.8881402015686035, |
|
"learning_rate": 4.9416161911198176e-05, |
|
"loss": 2.5842, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.5558929443359375, |
|
"learning_rate": 4.941274108593693e-05, |
|
"loss": 2.5049, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 7.3317694664001465, |
|
"learning_rate": 4.940931038737468e-05, |
|
"loss": 2.5361, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 6.913518905639648, |
|
"learning_rate": 4.940586981689888e-05, |
|
"loss": 2.7733, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 8.628571510314941, |
|
"learning_rate": 4.940241937590102e-05, |
|
"loss": 2.642, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 6.870291233062744, |
|
"learning_rate": 4.939895906577658e-05, |
|
"loss": 2.9244, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.8803253173828125, |
|
"learning_rate": 4.939548888792502e-05, |
|
"loss": 2.6324, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 6.114381790161133, |
|
"learning_rate": 4.939200884374979e-05, |
|
"loss": 2.674, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 7.145431041717529, |
|
"learning_rate": 4.938851893465832e-05, |
|
"loss": 2.6255, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 6.35526704788208, |
|
"learning_rate": 4.938501916206206e-05, |
|
"loss": 2.8467, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"eval_loss": 2.259162664413452, |
|
"eval_runtime": 421.9557, |
|
"eval_samples_per_second": 50.292, |
|
"eval_steps_per_second": 0.393, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 6.096010208129883, |
|
"learning_rate": 4.938150952737642e-05, |
|
"loss": 2.6732, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.902897357940674, |
|
"learning_rate": 4.937799003202081e-05, |
|
"loss": 2.5127, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.106553077697754, |
|
"learning_rate": 4.9374460677418635e-05, |
|
"loss": 2.5394, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.985318183898926, |
|
"learning_rate": 4.937092146499727e-05, |
|
"loss": 2.6342, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.647487163543701, |
|
"learning_rate": 4.9367372396188095e-05, |
|
"loss": 2.7253, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.041297435760498, |
|
"learning_rate": 4.9363813472426465e-05, |
|
"loss": 2.7264, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 6.295587062835693, |
|
"learning_rate": 4.9360244695151716e-05, |
|
"loss": 2.6379, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.751054286956787, |
|
"learning_rate": 4.935666606580719e-05, |
|
"loss": 2.6497, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.367273330688477, |
|
"learning_rate": 4.935307758584019e-05, |
|
"loss": 2.3892, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.491433143615723, |
|
"learning_rate": 4.9349479256702023e-05, |
|
"loss": 2.4803, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.676270484924316, |
|
"learning_rate": 4.934587107984796e-05, |
|
"loss": 2.4972, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.060196876525879, |
|
"learning_rate": 4.934225305673728e-05, |
|
"loss": 2.4878, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.352468967437744, |
|
"learning_rate": 4.93386251888332e-05, |
|
"loss": 2.4713, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 8.105636596679688, |
|
"learning_rate": 4.9334987477602975e-05, |
|
"loss": 2.7637, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.352108478546143, |
|
"learning_rate": 4.933133992451779e-05, |
|
"loss": 2.5642, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.43104362487793, |
|
"learning_rate": 4.932768253105286e-05, |
|
"loss": 2.6106, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.641717433929443, |
|
"learning_rate": 4.932401529868733e-05, |
|
"loss": 2.6049, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.041056156158447, |
|
"learning_rate": 4.932033822890436e-05, |
|
"loss": 2.512, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.870728015899658, |
|
"learning_rate": 4.9316651323191067e-05, |
|
"loss": 2.4649, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.347024917602539, |
|
"learning_rate": 4.931295458303856e-05, |
|
"loss": 2.5996, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.286810398101807, |
|
"learning_rate": 4.9309248009941914e-05, |
|
"loss": 2.5899, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.839949131011963, |
|
"learning_rate": 4.93055316054002e-05, |
|
"loss": 2.5646, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.071991920471191, |
|
"learning_rate": 4.930180537091644e-05, |
|
"loss": 2.7168, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.042398452758789, |
|
"learning_rate": 4.929806930799765e-05, |
|
"loss": 2.7169, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.924988269805908, |
|
"learning_rate": 4.9294323418154805e-05, |
|
"loss": 2.4587, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.021846294403076, |
|
"learning_rate": 4.929056770290288e-05, |
|
"loss": 2.5327, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.043169975280762, |
|
"learning_rate": 4.928680216376079e-05, |
|
"loss": 2.4268, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 7.106345176696777, |
|
"learning_rate": 4.9283026802251454e-05, |
|
"loss": 2.5116, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.8474650382995605, |
|
"learning_rate": 4.927924161990175e-05, |
|
"loss": 2.5163, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.736364841461182, |
|
"learning_rate": 4.9275446618242516e-05, |
|
"loss": 2.4918, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.512024879455566, |
|
"learning_rate": 4.927164179880858e-05, |
|
"loss": 2.5756, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.911081790924072, |
|
"learning_rate": 4.926782716313874e-05, |
|
"loss": 2.4661, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.932026386260986, |
|
"learning_rate": 4.9264002712775745e-05, |
|
"loss": 2.5059, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.116228103637695, |
|
"learning_rate": 4.9260168449266335e-05, |
|
"loss": 2.4183, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.023027420043945, |
|
"learning_rate": 4.925632437416121e-05, |
|
"loss": 2.4144, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.4858832359313965, |
|
"learning_rate": 4.925247048901504e-05, |
|
"loss": 2.4708, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.690423965454102, |
|
"learning_rate": 4.924860679538645e-05, |
|
"loss": 2.7061, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.818970203399658, |
|
"learning_rate": 4.924473329483806e-05, |
|
"loss": 2.6152, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.238198280334473, |
|
"learning_rate": 4.924084998893642e-05, |
|
"loss": 2.3184, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.587952613830566, |
|
"learning_rate": 4.923695687925207e-05, |
|
"loss": 2.6381, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.2837066650390625, |
|
"learning_rate": 4.923305396735952e-05, |
|
"loss": 2.52, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.908483028411865, |
|
"learning_rate": 4.9229141254837226e-05, |
|
"loss": 2.675, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.803688049316406, |
|
"learning_rate": 4.922521874326761e-05, |
|
"loss": 2.6054, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.124865531921387, |
|
"learning_rate": 4.922128643423707e-05, |
|
"loss": 2.6076, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.534984111785889, |
|
"learning_rate": 4.921734432933596e-05, |
|
"loss": 2.4958, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 7.013035297393799, |
|
"learning_rate": 4.921339243015858e-05, |
|
"loss": 2.6099, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.598790645599365, |
|
"learning_rate": 4.920943073830322e-05, |
|
"loss": 2.493, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.958088397979736, |
|
"learning_rate": 4.920545925537212e-05, |
|
"loss": 2.5689, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.180896282196045, |
|
"learning_rate": 4.9201477982971466e-05, |
|
"loss": 2.6138, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.9859209060668945, |
|
"learning_rate": 4.919748692271141e-05, |
|
"loss": 2.5003, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 2.1625306606292725, |
|
"eval_runtime": 422.5397, |
|
"eval_samples_per_second": 50.223, |
|
"eval_steps_per_second": 0.393, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.54530143737793, |
|
"learning_rate": 4.9193486076206075e-05, |
|
"loss": 2.446, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.814847469329834, |
|
"learning_rate": 4.918947544507353e-05, |
|
"loss": 2.3595, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.315622806549072, |
|
"learning_rate": 4.91854550309358e-05, |
|
"loss": 2.6138, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.739955425262451, |
|
"learning_rate": 4.9181424835418874e-05, |
|
"loss": 2.5347, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.838977336883545, |
|
"learning_rate": 4.91773848601527e-05, |
|
"loss": 2.3811, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.2752766609191895, |
|
"learning_rate": 4.9173335106771154e-05, |
|
"loss": 2.3222, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 7.31117057800293, |
|
"learning_rate": 4.916927557691211e-05, |
|
"loss": 2.3912, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.800614833831787, |
|
"learning_rate": 4.916520627221736e-05, |
|
"loss": 2.5183, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.574156284332275, |
|
"learning_rate": 4.9161127194332655e-05, |
|
"loss": 2.4722, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.964874744415283, |
|
"learning_rate": 4.915703834490773e-05, |
|
"loss": 2.6562, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.67961311340332, |
|
"learning_rate": 4.915293972559623e-05, |
|
"loss": 2.6234, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.129438877105713, |
|
"learning_rate": 4.9148831338055766e-05, |
|
"loss": 2.3889, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.661647796630859, |
|
"learning_rate": 4.9144713183947915e-05, |
|
"loss": 2.337, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.4586567878723145, |
|
"learning_rate": 4.914058526493819e-05, |
|
"loss": 2.525, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.5740532875061035, |
|
"learning_rate": 4.913644758269604e-05, |
|
"loss": 2.4817, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.7009124755859375, |
|
"learning_rate": 4.9132300138894895e-05, |
|
"loss": 2.3586, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.846322536468506, |
|
"learning_rate": 4.912814293521211e-05, |
|
"loss": 2.4756, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.497846603393555, |
|
"learning_rate": 4.9123975973328986e-05, |
|
"loss": 2.3421, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.7938971519470215, |
|
"learning_rate": 4.911979925493079e-05, |
|
"loss": 2.609, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.914577960968018, |
|
"learning_rate": 4.91156127817067e-05, |
|
"loss": 2.5344, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.262577533721924, |
|
"learning_rate": 4.911141655534989e-05, |
|
"loss": 2.4138, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.629760265350342, |
|
"learning_rate": 4.9107210577557415e-05, |
|
"loss": 2.4292, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 18.421449661254883, |
|
"learning_rate": 4.9102994850030344e-05, |
|
"loss": 2.594, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.440621376037598, |
|
"learning_rate": 4.909876937447363e-05, |
|
"loss": 2.338, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.139665603637695, |
|
"learning_rate": 4.90945341525962e-05, |
|
"loss": 2.4571, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.897266864776611, |
|
"learning_rate": 4.909028918611091e-05, |
|
"loss": 2.4774, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.971827507019043, |
|
"learning_rate": 4.908603447673457e-05, |
|
"loss": 2.5588, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.420186996459961, |
|
"learning_rate": 4.9081770026187914e-05, |
|
"loss": 2.4299, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.321455478668213, |
|
"learning_rate": 4.907749583619562e-05, |
|
"loss": 2.4522, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.8206400871276855, |
|
"learning_rate": 4.907321190848632e-05, |
|
"loss": 2.5081, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.017814636230469, |
|
"learning_rate": 4.9068918244792565e-05, |
|
"loss": 2.2797, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.493027210235596, |
|
"learning_rate": 4.906461484685085e-05, |
|
"loss": 2.4124, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.415233612060547, |
|
"learning_rate": 4.906030171640163e-05, |
|
"loss": 2.4818, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.612936496734619, |
|
"learning_rate": 4.905597885518923e-05, |
|
"loss": 2.5769, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.570766448974609, |
|
"learning_rate": 4.9051646264962005e-05, |
|
"loss": 2.3302, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.15294885635376, |
|
"learning_rate": 4.904730394747216e-05, |
|
"loss": 2.3181, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.568897247314453, |
|
"learning_rate": 4.904295190447587e-05, |
|
"loss": 2.4157, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.8973236083984375, |
|
"learning_rate": 4.903859013773325e-05, |
|
"loss": 2.5564, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.484323024749756, |
|
"learning_rate": 4.903421864900835e-05, |
|
"loss": 2.3377, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.30608606338501, |
|
"learning_rate": 4.902983744006913e-05, |
|
"loss": 2.3119, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.372623920440674, |
|
"learning_rate": 4.902544651268749e-05, |
|
"loss": 2.3764, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.923567771911621, |
|
"learning_rate": 4.902104586863927e-05, |
|
"loss": 2.452, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.1613664627075195, |
|
"learning_rate": 4.901663550970422e-05, |
|
"loss": 2.5677, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.322874069213867, |
|
"learning_rate": 4.901221543766604e-05, |
|
"loss": 2.3442, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.805524826049805, |
|
"learning_rate": 4.900778565431236e-05, |
|
"loss": 2.377, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.61544942855835, |
|
"learning_rate": 4.900334616143471e-05, |
|
"loss": 2.2881, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 9.518655776977539, |
|
"learning_rate": 4.8998896960828576e-05, |
|
"loss": 2.5395, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.4181108474731445, |
|
"learning_rate": 4.899443805429336e-05, |
|
"loss": 2.5815, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 7.65767765045166, |
|
"learning_rate": 4.8989969443632366e-05, |
|
"loss": 2.383, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.252597332000732, |
|
"learning_rate": 4.898549113065287e-05, |
|
"loss": 2.3579, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"eval_loss": 2.0875422954559326, |
|
"eval_runtime": 423.4806, |
|
"eval_samples_per_second": 50.111, |
|
"eval_steps_per_second": 0.392, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.8088860511779785, |
|
"learning_rate": 4.898100311716604e-05, |
|
"loss": 2.4613, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.02274227142334, |
|
"learning_rate": 4.897650540498697e-05, |
|
"loss": 2.2701, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.915895938873291, |
|
"learning_rate": 4.8971997995934675e-05, |
|
"loss": 2.3393, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.784215450286865, |
|
"learning_rate": 4.896748089183211e-05, |
|
"loss": 2.5954, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.802945613861084, |
|
"learning_rate": 4.896295409450612e-05, |
|
"loss": 2.2532, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.0984649658203125, |
|
"learning_rate": 4.895841760578751e-05, |
|
"loss": 2.3553, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.783207893371582, |
|
"learning_rate": 4.8953871427510965e-05, |
|
"loss": 2.445, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.820375919342041, |
|
"learning_rate": 4.894931556151512e-05, |
|
"loss": 2.4465, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 54.05234146118164, |
|
"learning_rate": 4.894475000964249e-05, |
|
"loss": 2.4557, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.997950077056885, |
|
"learning_rate": 4.8940174773739564e-05, |
|
"loss": 2.2733, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.4290642738342285, |
|
"learning_rate": 4.8935589855656694e-05, |
|
"loss": 2.3262, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.6273064613342285, |
|
"learning_rate": 4.893099525724818e-05, |
|
"loss": 2.4704, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.293818473815918, |
|
"learning_rate": 4.892639098037221e-05, |
|
"loss": 2.525, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.767632484436035, |
|
"learning_rate": 4.892177702689091e-05, |
|
"loss": 2.2745, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.506222248077393, |
|
"learning_rate": 4.891715339867033e-05, |
|
"loss": 2.4635, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.971251010894775, |
|
"learning_rate": 4.8912520097580395e-05, |
|
"loss": 2.5934, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.757556438446045, |
|
"learning_rate": 4.890787712549496e-05, |
|
"loss": 2.4497, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 8.545730590820312, |
|
"learning_rate": 4.890369018346882e-05, |
|
"loss": 2.3384, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.717365264892578, |
|
"learning_rate": 4.889902884166843e-05, |
|
"loss": 2.3127, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.706165790557861, |
|
"learning_rate": 4.889435783432884e-05, |
|
"loss": 2.1936, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.551875591278076, |
|
"learning_rate": 4.888967716333917e-05, |
|
"loss": 2.5815, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.260705471038818, |
|
"learning_rate": 4.888498683059243e-05, |
|
"loss": 2.3263, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.164261341094971, |
|
"learning_rate": 4.8880286837985526e-05, |
|
"loss": 2.276, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.247165679931641, |
|
"learning_rate": 4.887557718741931e-05, |
|
"loss": 2.4281, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.835640907287598, |
|
"learning_rate": 4.88708578807985e-05, |
|
"loss": 2.2763, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.9643354415893555, |
|
"learning_rate": 4.886612892003174e-05, |
|
"loss": 2.4745, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 7.18971061706543, |
|
"learning_rate": 4.886139030703159e-05, |
|
"loss": 2.4685, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.121529579162598, |
|
"learning_rate": 4.885664204371446e-05, |
|
"loss": 2.5183, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.581070423126221, |
|
"learning_rate": 4.885188413200075e-05, |
|
"loss": 2.5167, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.491427898406982, |
|
"learning_rate": 4.8847116573814676e-05, |
|
"loss": 2.383, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.595372676849365, |
|
"learning_rate": 4.8842339371084414e-05, |
|
"loss": 2.0989, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.8517165184021, |
|
"learning_rate": 4.8837552525742004e-05, |
|
"loss": 2.4912, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.275095462799072, |
|
"learning_rate": 4.88327560397234e-05, |
|
"loss": 2.3524, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.616918563842773, |
|
"learning_rate": 4.8827949914968474e-05, |
|
"loss": 1.9738, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 7.1793999671936035, |
|
"learning_rate": 4.882313415342097e-05, |
|
"loss": 2.2815, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.783013343811035, |
|
"learning_rate": 4.881830875702852e-05, |
|
"loss": 2.4371, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 18.31147003173828, |
|
"learning_rate": 4.88134737277427e-05, |
|
"loss": 2.5314, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.92976713180542, |
|
"learning_rate": 4.880862906751893e-05, |
|
"loss": 2.2746, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.150814056396484, |
|
"learning_rate": 4.8803774778316557e-05, |
|
"loss": 2.277, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.579446792602539, |
|
"learning_rate": 4.87989108620988e-05, |
|
"loss": 2.2136, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.303000450134277, |
|
"learning_rate": 4.8794037320832806e-05, |
|
"loss": 2.258, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.469798564910889, |
|
"learning_rate": 4.878915415648957e-05, |
|
"loss": 2.4185, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.8365960121154785, |
|
"learning_rate": 4.8784261371044016e-05, |
|
"loss": 2.3902, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.7992987632751465, |
|
"learning_rate": 4.877984963973593e-05, |
|
"loss": 2.3468, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.936422348022461, |
|
"learning_rate": 4.877493857965077e-05, |
|
"loss": 2.3446, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.544583797454834, |
|
"learning_rate": 4.8770017904212525e-05, |
|
"loss": 2.2751, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.835214138031006, |
|
"learning_rate": 4.876508761541129e-05, |
|
"loss": 2.3919, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.438685417175293, |
|
"learning_rate": 4.8760147715241e-05, |
|
"loss": 2.3636, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.63602876663208, |
|
"learning_rate": 4.875519820569954e-05, |
|
"loss": 2.4277, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.563483238220215, |
|
"learning_rate": 4.8750239088788636e-05, |
|
"loss": 2.2586, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"eval_loss": 2.014089584350586, |
|
"eval_runtime": 422.9188, |
|
"eval_samples_per_second": 50.177, |
|
"eval_steps_per_second": 0.393, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.133357524871826, |
|
"learning_rate": 4.8745270366513914e-05, |
|
"loss": 2.2622, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 7.025454521179199, |
|
"learning_rate": 4.87402920408849e-05, |
|
"loss": 2.2786, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.6243181228637695, |
|
"learning_rate": 4.873530411391498e-05, |
|
"loss": 2.3901, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.4480109214782715, |
|
"learning_rate": 4.873030658762143e-05, |
|
"loss": 2.1692, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.633693695068359, |
|
"learning_rate": 4.8725299464025414e-05, |
|
"loss": 2.198, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.928077220916748, |
|
"learning_rate": 4.872028274515198e-05, |
|
"loss": 2.0226, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.940571308135986, |
|
"learning_rate": 4.871525643303004e-05, |
|
"loss": 2.5249, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.45697021484375, |
|
"learning_rate": 4.871022052969241e-05, |
|
"loss": 2.184, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.87355899810791, |
|
"learning_rate": 4.870517503717576e-05, |
|
"loss": 2.4252, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.142642021179199, |
|
"learning_rate": 4.8700119957520653e-05, |
|
"loss": 2.2134, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.03863000869751, |
|
"learning_rate": 4.869505529277154e-05, |
|
"loss": 2.4, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.4546003341674805, |
|
"learning_rate": 4.8689981044976715e-05, |
|
"loss": 2.2927, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 10.309746742248535, |
|
"learning_rate": 4.868489721618838e-05, |
|
"loss": 2.3127, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.640440464019775, |
|
"learning_rate": 4.867980380846259e-05, |
|
"loss": 2.1445, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 7.428249835968018, |
|
"learning_rate": 4.86747008238593e-05, |
|
"loss": 2.4585, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.020784854888916, |
|
"learning_rate": 4.86695882644423e-05, |
|
"loss": 2.2498, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.098503112792969, |
|
"learning_rate": 4.8664466132279294e-05, |
|
"loss": 2.1231, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.907447814941406, |
|
"learning_rate": 4.8659334429441825e-05, |
|
"loss": 2.2518, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 38.992706298828125, |
|
"learning_rate": 4.8654193158005336e-05, |
|
"loss": 2.4063, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.094198226928711, |
|
"learning_rate": 4.86490423200491e-05, |
|
"loss": 2.1258, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.0221686363220215, |
|
"learning_rate": 4.864388191765629e-05, |
|
"loss": 2.5631, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.385717391967773, |
|
"learning_rate": 4.863871195291395e-05, |
|
"loss": 2.4827, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.672618389129639, |
|
"learning_rate": 4.863353242791297e-05, |
|
"loss": 2.2295, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.568421840667725, |
|
"learning_rate": 4.8628343344748116e-05, |
|
"loss": 2.2235, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.92976713180542, |
|
"learning_rate": 4.8623144705518034e-05, |
|
"loss": 2.2561, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 7.64150857925415, |
|
"learning_rate": 4.8617936512325214e-05, |
|
"loss": 2.5747, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.804096221923828, |
|
"learning_rate": 4.861271876727601e-05, |
|
"loss": 2.1542, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.928376197814941, |
|
"learning_rate": 4.860749147248066e-05, |
|
"loss": 2.2101, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.408993244171143, |
|
"learning_rate": 4.8602254630053235e-05, |
|
"loss": 2.3248, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.5954813957214355, |
|
"learning_rate": 4.8597008242111694e-05, |
|
"loss": 2.3062, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.591384410858154, |
|
"learning_rate": 4.859175231077785e-05, |
|
"loss": 2.2668, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.448415279388428, |
|
"learning_rate": 4.858648683817736e-05, |
|
"loss": 2.2404, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.797817707061768, |
|
"learning_rate": 4.8581211826439765e-05, |
|
"loss": 2.3924, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.555346965789795, |
|
"learning_rate": 4.8575927277698435e-05, |
|
"loss": 2.2588, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.0208821296691895, |
|
"learning_rate": 4.857063319409062e-05, |
|
"loss": 2.2423, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.1246466636657715, |
|
"learning_rate": 4.8565329577757426e-05, |
|
"loss": 2.1666, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 12.975321769714355, |
|
"learning_rate": 4.856001643084378e-05, |
|
"loss": 2.253, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.758956432342529, |
|
"learning_rate": 4.855469375549853e-05, |
|
"loss": 2.061, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 5.913463115692139, |
|
"learning_rate": 4.8549361553874295e-05, |
|
"loss": 2.2273, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 8.23082160949707, |
|
"learning_rate": 4.854401982812762e-05, |
|
"loss": 2.599, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 8.17214298248291, |
|
"learning_rate": 4.853866858041887e-05, |
|
"loss": 2.5361, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 9.37897777557373, |
|
"learning_rate": 4.853330781291224e-05, |
|
"loss": 2.3414, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 5.9388427734375, |
|
"learning_rate": 4.852793752777582e-05, |
|
"loss": 2.3078, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.1688055992126465, |
|
"learning_rate": 4.8522557727181517e-05, |
|
"loss": 2.2146, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 13.751556396484375, |
|
"learning_rate": 4.85171684133051e-05, |
|
"loss": 2.3457, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.383643627166748, |
|
"learning_rate": 4.851176958832618e-05, |
|
"loss": 2.4478, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.072232723236084, |
|
"learning_rate": 4.850636125442821e-05, |
|
"loss": 2.0503, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.105084419250488, |
|
"learning_rate": 4.850094341379851e-05, |
|
"loss": 2.4092, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 5.889256477355957, |
|
"learning_rate": 4.8495516068628216e-05, |
|
"loss": 2.1762, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 10.305624008178711, |
|
"learning_rate": 4.849007922111233e-05, |
|
"loss": 2.2757, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"eval_loss": 1.962647795677185, |
|
"eval_runtime": 422.8287, |
|
"eval_samples_per_second": 50.188, |
|
"eval_steps_per_second": 0.393, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 12.288636207580566, |
|
"learning_rate": 4.8484632873449676e-05, |
|
"loss": 2.3152, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 18.38760757446289, |
|
"learning_rate": 4.847917702784295e-05, |
|
"loss": 2.3602, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 7.56703519821167, |
|
"learning_rate": 4.8473711686498655e-05, |
|
"loss": 2.3064, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.525967121124268, |
|
"learning_rate": 4.8468236851627176e-05, |
|
"loss": 2.2774, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 5.542328357696533, |
|
"learning_rate": 4.846275252544269e-05, |
|
"loss": 2.2453, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 9.50683879852295, |
|
"learning_rate": 4.845725871016324e-05, |
|
"loss": 2.43, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 7.898688793182373, |
|
"learning_rate": 4.84517554080107e-05, |
|
"loss": 2.1673, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.77755069732666, |
|
"learning_rate": 4.8446242621210804e-05, |
|
"loss": 2.2338, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.6925225257873535, |
|
"learning_rate": 4.844072035199307e-05, |
|
"loss": 2.2819, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.21223258972168, |
|
"learning_rate": 4.843518860259091e-05, |
|
"loss": 2.3132, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.520556926727295, |
|
"learning_rate": 4.842964737524153e-05, |
|
"loss": 2.3449, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 21.28665542602539, |
|
"learning_rate": 4.842409667218598e-05, |
|
"loss": 2.315, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.1759033203125, |
|
"learning_rate": 4.841853649566914e-05, |
|
"loss": 2.3547, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 19.148984909057617, |
|
"learning_rate": 4.841296684793973e-05, |
|
"loss": 2.5135, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 9.910473823547363, |
|
"learning_rate": 4.84073877312503e-05, |
|
"loss": 2.204, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 27.07410430908203, |
|
"learning_rate": 4.8401799147857216e-05, |
|
"loss": 2.4857, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 62.342281341552734, |
|
"learning_rate": 4.839620110002069e-05, |
|
"loss": 2.3909, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 103.58372497558594, |
|
"learning_rate": 4.8391154766739814e-05, |
|
"loss": 2.5908, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 22.150035858154297, |
|
"learning_rate": 4.83855387427013e-05, |
|
"loss": 2.3127, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 14.04951000213623, |
|
"learning_rate": 4.8379913260795586e-05, |
|
"loss": 2.4046, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 22.72818946838379, |
|
"learning_rate": 4.837427832329779e-05, |
|
"loss": 2.8313, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 13.296052932739258, |
|
"learning_rate": 4.836863393248684e-05, |
|
"loss": 2.4321, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 5.5498833656311035, |
|
"learning_rate": 4.836298009064554e-05, |
|
"loss": 2.3137, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 13.326327323913574, |
|
"learning_rate": 4.835731680006047e-05, |
|
"loss": 2.291, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 32.48208999633789, |
|
"learning_rate": 4.835164406302205e-05, |
|
"loss": 2.6136, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 11.908808708190918, |
|
"learning_rate": 4.834596188182453e-05, |
|
"loss": 2.1887, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 10.075331687927246, |
|
"learning_rate": 4.834027025876595e-05, |
|
"loss": 2.3643, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 5.72551155090332, |
|
"learning_rate": 4.83345691961482e-05, |
|
"loss": 2.2839, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 5.926360130310059, |
|
"learning_rate": 4.8328858696276955e-05, |
|
"loss": 2.3022, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 9.422292709350586, |
|
"learning_rate": 4.832313876146175e-05, |
|
"loss": 2.4454, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 13.58336353302002, |
|
"learning_rate": 4.83174093940159e-05, |
|
"loss": 2.176, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 8.902631759643555, |
|
"learning_rate": 4.831167059625654e-05, |
|
"loss": 2.3952, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 16.641265869140625, |
|
"learning_rate": 4.830592237050464e-05, |
|
"loss": 2.2495, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 11.762859344482422, |
|
"learning_rate": 4.830016471908496e-05, |
|
"loss": 2.4345, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 6.909867763519287, |
|
"learning_rate": 4.829439764432607e-05, |
|
"loss": 2.207, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 13.684906959533691, |
|
"learning_rate": 4.828862114856038e-05, |
|
"loss": 2.3951, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 6.779490947723389, |
|
"learning_rate": 4.8282835234124074e-05, |
|
"loss": 2.3883, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 7.6955366134643555, |
|
"learning_rate": 4.827703990335718e-05, |
|
"loss": 2.3341, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 7.92070198059082, |
|
"learning_rate": 4.82712351586035e-05, |
|
"loss": 2.4001, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 12.462532997131348, |
|
"learning_rate": 4.826542100221067e-05, |
|
"loss": 2.2261, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 18.708791732788086, |
|
"learning_rate": 4.8259597436530125e-05, |
|
"loss": 2.1191, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 6.56512975692749, |
|
"learning_rate": 4.8253764463917096e-05, |
|
"loss": 2.2219, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 6.9286699295043945, |
|
"learning_rate": 4.8247922086730634e-05, |
|
"loss": 2.3153, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 7.228487491607666, |
|
"learning_rate": 4.8242070307333574e-05, |
|
"loss": 2.4929, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 11.886446952819824, |
|
"learning_rate": 4.8236209128092566e-05, |
|
"loss": 2.3487, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 6.152067184448242, |
|
"learning_rate": 4.823033855137807e-05, |
|
"loss": 2.2251, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 14.04540729522705, |
|
"learning_rate": 4.8224458579564334e-05, |
|
"loss": 2.3503, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 9.333481788635254, |
|
"learning_rate": 4.8218569215029405e-05, |
|
"loss": 2.3221, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 35.72920227050781, |
|
"learning_rate": 4.821267046015513e-05, |
|
"loss": 2.3805, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 18.05036163330078, |
|
"learning_rate": 4.820676231732716e-05, |
|
"loss": 2.2986, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"eval_loss": 1.956457495689392, |
|
"eval_runtime": 423.6428, |
|
"eval_samples_per_second": 50.092, |
|
"eval_steps_per_second": 0.392, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 18.98715591430664, |
|
"learning_rate": 4.820084478893494e-05, |
|
"loss": 2.3742, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 7.488858699798584, |
|
"learning_rate": 4.819491787737171e-05, |
|
"loss": 2.3052, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 7.39491605758667, |
|
"learning_rate": 4.81889815850345e-05, |
|
"loss": 2.54, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 7.2730865478515625, |
|
"learning_rate": 4.8183035914324136e-05, |
|
"loss": 2.2487, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 7.395474433898926, |
|
"learning_rate": 4.8177080867645245e-05, |
|
"loss": 2.3872, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 13.659703254699707, |
|
"learning_rate": 4.817111644740624e-05, |
|
"loss": 2.3713, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 21.210573196411133, |
|
"learning_rate": 4.8165142656019325e-05, |
|
"loss": 2.2776, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 35.836185455322266, |
|
"learning_rate": 4.81591594959005e-05, |
|
"loss": 2.4872, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 13.00560188293457, |
|
"learning_rate": 4.815316696946953e-05, |
|
"loss": 2.2952, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 7.423733234405518, |
|
"learning_rate": 4.814716507915e-05, |
|
"loss": 2.3123, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 29.948333740234375, |
|
"learning_rate": 4.8141153827369264e-05, |
|
"loss": 2.0931, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 7.296632289886475, |
|
"learning_rate": 4.813513321655848e-05, |
|
"loss": 2.2626, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 5.078789710998535, |
|
"learning_rate": 4.8129103249152554e-05, |
|
"loss": 2.193, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 6.776204586029053, |
|
"learning_rate": 4.812306392759022e-05, |
|
"loss": 2.2646, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 6.253006458282471, |
|
"learning_rate": 4.811701525431396e-05, |
|
"loss": 2.2343, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 41.79158401489258, |
|
"learning_rate": 4.811095723177006e-05, |
|
"loss": 2.232, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 8.95724105834961, |
|
"learning_rate": 4.810488986240858e-05, |
|
"loss": 2.2008, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 9.038932800292969, |
|
"learning_rate": 4.8098813148683356e-05, |
|
"loss": 2.1931, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.18593692779541, |
|
"learning_rate": 4.809272709305201e-05, |
|
"loss": 2.2143, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 7.01367712020874, |
|
"learning_rate": 4.808663169797594e-05, |
|
"loss": 2.1562, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 6.437080383300781, |
|
"learning_rate": 4.808052696592032e-05, |
|
"loss": 2.1143, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 7.393974304199219, |
|
"learning_rate": 4.807441289935411e-05, |
|
"loss": 2.4122, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.011038780212402, |
|
"learning_rate": 4.806828950075002e-05, |
|
"loss": 2.1467, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 9.512205123901367, |
|
"learning_rate": 4.806215677258456e-05, |
|
"loss": 2.2061, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.728775978088379, |
|
"learning_rate": 4.805601471733801e-05, |
|
"loss": 2.234, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 10.132803916931152, |
|
"learning_rate": 4.8049863337494383e-05, |
|
"loss": 2.249, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 11.986092567443848, |
|
"learning_rate": 4.8043702635541534e-05, |
|
"loss": 2.2069, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 6.403532981872559, |
|
"learning_rate": 4.8037532613971035e-05, |
|
"loss": 2.1335, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 28.95196533203125, |
|
"learning_rate": 4.803135327527825e-05, |
|
"loss": 2.3068, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 7.692062854766846, |
|
"learning_rate": 4.8025164621962284e-05, |
|
"loss": 2.4062, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.148292541503906, |
|
"learning_rate": 4.801896665652603e-05, |
|
"loss": 2.2943, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 7.389751434326172, |
|
"learning_rate": 4.801275938147617e-05, |
|
"loss": 2.2432, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 9.887344360351562, |
|
"learning_rate": 4.800654279932311e-05, |
|
"loss": 2.3278, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.3506927490234375, |
|
"learning_rate": 4.800031691258103e-05, |
|
"loss": 2.1808, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 7.447039604187012, |
|
"learning_rate": 4.79940817237679e-05, |
|
"loss": 2.3918, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 7.131879806518555, |
|
"learning_rate": 4.798783723540541e-05, |
|
"loss": 2.3571, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 9.371427536010742, |
|
"learning_rate": 4.7981583450019045e-05, |
|
"loss": 2.3176, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 6.0981903076171875, |
|
"learning_rate": 4.797532037013803e-05, |
|
"loss": 2.2672, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.475356578826904, |
|
"learning_rate": 4.796904799829537e-05, |
|
"loss": 2.1954, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 6.3766937255859375, |
|
"learning_rate": 4.796276633702781e-05, |
|
"loss": 2.0606, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 6.111029624938965, |
|
"learning_rate": 4.795647538887584e-05, |
|
"loss": 2.148, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 6.0170817375183105, |
|
"learning_rate": 4.795017515638375e-05, |
|
"loss": 1.9902, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 6.29054594039917, |
|
"learning_rate": 4.794386564209953e-05, |
|
"loss": 2.0952, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 6.003581523895264, |
|
"learning_rate": 4.793754684857497e-05, |
|
"loss": 1.987, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 6.239002227783203, |
|
"learning_rate": 4.7931218778365594e-05, |
|
"loss": 2.1665, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 6.7367377281188965, |
|
"learning_rate": 4.792488143403067e-05, |
|
"loss": 2.1673, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 6.604746341705322, |
|
"learning_rate": 4.791853481813322e-05, |
|
"loss": 2.1562, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 6.525786876678467, |
|
"learning_rate": 4.7912178933240035e-05, |
|
"loss": 2.1059, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 6.215798377990723, |
|
"learning_rate": 4.790581378192164e-05, |
|
"loss": 2.1964, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 21.55533790588379, |
|
"learning_rate": 4.7899439366752294e-05, |
|
"loss": 2.2484, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_loss": 1.8438102006912231, |
|
"eval_runtime": 423.8305, |
|
"eval_samples_per_second": 50.07, |
|
"eval_steps_per_second": 0.392, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 6.74520206451416, |
|
"learning_rate": 4.789305569031002e-05, |
|
"loss": 2.0724, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 6.096541404724121, |
|
"learning_rate": 4.7886662755176594e-05, |
|
"loss": 2.142, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.870784282684326, |
|
"learning_rate": 4.7880260563937515e-05, |
|
"loss": 2.0857, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.653884410858154, |
|
"learning_rate": 4.787384911918204e-05, |
|
"loss": 2.1568, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 4.95023775100708, |
|
"learning_rate": 4.786742842350316e-05, |
|
"loss": 2.1114, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 6.527095317840576, |
|
"learning_rate": 4.786099847949761e-05, |
|
"loss": 2.1395, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.1902899742126465, |
|
"learning_rate": 4.785455928976588e-05, |
|
"loss": 1.9845, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.819202899932861, |
|
"learning_rate": 4.784811085691218e-05, |
|
"loss": 2.0774, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.495123386383057, |
|
"learning_rate": 4.7841653183544455e-05, |
|
"loss": 2.2617, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.861876487731934, |
|
"learning_rate": 4.783518627227441e-05, |
|
"loss": 2.313, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.845401763916016, |
|
"learning_rate": 4.782871012571747e-05, |
|
"loss": 2.1452, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.459070205688477, |
|
"learning_rate": 4.782222474649279e-05, |
|
"loss": 1.91, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 6.476640224456787, |
|
"learning_rate": 4.7815730137223273e-05, |
|
"loss": 2.1809, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 8.694378852844238, |
|
"learning_rate": 4.780922630053555e-05, |
|
"loss": 2.0834, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 7.8879923820495605, |
|
"learning_rate": 4.780271323905998e-05, |
|
"loss": 2.2303, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 7.245183944702148, |
|
"learning_rate": 4.7796190955430655e-05, |
|
"loss": 1.9748, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.349466323852539, |
|
"learning_rate": 4.7789659452285394e-05, |
|
"loss": 2.1068, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 11.410516738891602, |
|
"learning_rate": 4.778311873226575e-05, |
|
"loss": 2.2332, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.053547382354736, |
|
"learning_rate": 4.777656879801701e-05, |
|
"loss": 2.0781, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 6.239130973815918, |
|
"learning_rate": 4.777000965218817e-05, |
|
"loss": 2.1827, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.953700542449951, |
|
"learning_rate": 4.776344129743196e-05, |
|
"loss": 2.2209, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 7.110509872436523, |
|
"learning_rate": 4.775686373640484e-05, |
|
"loss": 2.3088, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.350749969482422, |
|
"learning_rate": 4.7750276971766996e-05, |
|
"loss": 1.9362, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.501804828643799, |
|
"learning_rate": 4.774368100618231e-05, |
|
"loss": 1.9958, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 6.133676052093506, |
|
"learning_rate": 4.77370758423184e-05, |
|
"loss": 2.0108, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.481894016265869, |
|
"learning_rate": 4.773046148284663e-05, |
|
"loss": 2.0169, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 21.402868270874023, |
|
"learning_rate": 4.772383793044205e-05, |
|
"loss": 1.9482, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 6.794774532318115, |
|
"learning_rate": 4.771720518778344e-05, |
|
"loss": 2.1169, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 6.211555480957031, |
|
"learning_rate": 4.7710563257553286e-05, |
|
"loss": 2.1805, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.304569244384766, |
|
"learning_rate": 4.7703912142437815e-05, |
|
"loss": 2.0067, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.812686920166016, |
|
"learning_rate": 4.769725184512694e-05, |
|
"loss": 2.0295, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 7.2158308029174805, |
|
"learning_rate": 4.7690582368314304e-05, |
|
"loss": 2.1737, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.527587890625, |
|
"learning_rate": 4.7683903714697264e-05, |
|
"loss": 2.099, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 6.601354122161865, |
|
"learning_rate": 4.767721588697688e-05, |
|
"loss": 2.308, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 7.4081711769104, |
|
"learning_rate": 4.7670518887857916e-05, |
|
"loss": 2.1246, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.526096820831299, |
|
"learning_rate": 4.766381272004887e-05, |
|
"loss": 2.1721, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.237331867218018, |
|
"learning_rate": 4.765709738626192e-05, |
|
"loss": 2.0706, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 11.353988647460938, |
|
"learning_rate": 4.7650372889212976e-05, |
|
"loss": 1.929, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 9.323110580444336, |
|
"learning_rate": 4.7643639231621626e-05, |
|
"loss": 2.0205, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.578587055206299, |
|
"learning_rate": 4.76368964162112e-05, |
|
"loss": 1.9712, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.492863178253174, |
|
"learning_rate": 4.76301444457087e-05, |
|
"loss": 2.1388, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 8.908137321472168, |
|
"learning_rate": 4.7623383322844835e-05, |
|
"loss": 1.9039, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 6.375561714172363, |
|
"learning_rate": 4.7616613050354025e-05, |
|
"loss": 2.0121, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.633610248565674, |
|
"learning_rate": 4.760983363097439e-05, |
|
"loss": 2.0154, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.779325008392334, |
|
"learning_rate": 4.760304506744774e-05, |
|
"loss": 2.0507, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.787586688995361, |
|
"learning_rate": 4.75962473625196e-05, |
|
"loss": 2.078, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.715172290802002, |
|
"learning_rate": 4.758944051893917e-05, |
|
"loss": 2.063, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.478941440582275, |
|
"learning_rate": 4.7582624539459366e-05, |
|
"loss": 2.0065, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 6.507533073425293, |
|
"learning_rate": 4.7575799426836785e-05, |
|
"loss": 1.9767, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.369671821594238, |
|
"learning_rate": 4.7568965183831726e-05, |
|
"loss": 1.9402, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"eval_loss": 1.7692859172821045, |
|
"eval_runtime": 423.4176, |
|
"eval_samples_per_second": 50.118, |
|
"eval_steps_per_second": 0.392, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 6.809439659118652, |
|
"learning_rate": 4.756212181320817e-05, |
|
"loss": 1.9837, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 4.8267292976379395, |
|
"learning_rate": 4.7555269317733806e-05, |
|
"loss": 1.9497, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.9060163497924805, |
|
"learning_rate": 4.754840770018001e-05, |
|
"loss": 2.0145, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 6.141025066375732, |
|
"learning_rate": 4.7541536963321834e-05, |
|
"loss": 1.8596, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 7.8809123039245605, |
|
"learning_rate": 4.753465710993801e-05, |
|
"loss": 2.0378, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.924752235412598, |
|
"learning_rate": 4.7527768142811003e-05, |
|
"loss": 2.1096, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.651288986206055, |
|
"learning_rate": 4.7520870064726916e-05, |
|
"loss": 1.8447, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.8274922370910645, |
|
"learning_rate": 4.751396287847556e-05, |
|
"loss": 1.9829, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 6.125004291534424, |
|
"learning_rate": 4.750704658685041e-05, |
|
"loss": 2.1189, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 88.02458953857422, |
|
"learning_rate": 4.750012119264866e-05, |
|
"loss": 2.101, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 10.099868774414062, |
|
"learning_rate": 4.749318669867115e-05, |
|
"loss": 2.1791, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 7.033977508544922, |
|
"learning_rate": 4.748624310772242e-05, |
|
"loss": 2.0594, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.70294189453125, |
|
"learning_rate": 4.747929042261068e-05, |
|
"loss": 1.9056, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.636380672454834, |
|
"learning_rate": 4.7472328646147814e-05, |
|
"loss": 2.0761, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.38163948059082, |
|
"learning_rate": 4.74653577811494e-05, |
|
"loss": 1.9754, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 6.055973529815674, |
|
"learning_rate": 4.7458377830434676e-05, |
|
"loss": 1.8625, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.357255458831787, |
|
"learning_rate": 4.7451388796826545e-05, |
|
"loss": 1.8988, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 6.224727153778076, |
|
"learning_rate": 4.744439068315163e-05, |
|
"loss": 1.9152, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.7499165534973145, |
|
"learning_rate": 4.743738349224016e-05, |
|
"loss": 2.3241, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 6.902726650238037, |
|
"learning_rate": 4.74303672269261e-05, |
|
"loss": 2.0803, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 9.815139770507812, |
|
"learning_rate": 4.742334189004704e-05, |
|
"loss": 2.0549, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.603668689727783, |
|
"learning_rate": 4.741630748444424e-05, |
|
"loss": 2.1398, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.725936412811279, |
|
"learning_rate": 4.740926401296266e-05, |
|
"loss": 1.97, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 6.910879611968994, |
|
"learning_rate": 4.74022114784509e-05, |
|
"loss": 1.8817, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 4.222038745880127, |
|
"learning_rate": 4.7395149883761235e-05, |
|
"loss": 1.7971, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.179903507232666, |
|
"learning_rate": 4.73880792317496e-05, |
|
"loss": 1.8004, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 4.983460903167725, |
|
"learning_rate": 4.738099952527559e-05, |
|
"loss": 2.1015, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 4.896063804626465, |
|
"learning_rate": 4.737391076720249e-05, |
|
"loss": 2.1134, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.984912872314453, |
|
"learning_rate": 4.736681296039719e-05, |
|
"loss": 2.1292, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 4.538473606109619, |
|
"learning_rate": 4.7359706107730295e-05, |
|
"loss": 1.9702, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.9707231521606445, |
|
"learning_rate": 4.7352590212076034e-05, |
|
"loss": 1.771, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.303897380828857, |
|
"learning_rate": 4.734546527631232e-05, |
|
"loss": 2.0974, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.653863906860352, |
|
"learning_rate": 4.733833130332068e-05, |
|
"loss": 1.8675, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 7.186466217041016, |
|
"learning_rate": 4.733118829598635e-05, |
|
"loss": 2.1016, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.940579414367676, |
|
"learning_rate": 4.732403625719818e-05, |
|
"loss": 2.1889, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.717761516571045, |
|
"learning_rate": 4.73168751898487e-05, |
|
"loss": 1.7199, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 6.8094892501831055, |
|
"learning_rate": 4.730970509683406e-05, |
|
"loss": 1.958, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 6.522322177886963, |
|
"learning_rate": 4.730252598105407e-05, |
|
"loss": 1.9033, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.876448631286621, |
|
"learning_rate": 4.7295337845412216e-05, |
|
"loss": 1.9084, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.507818222045898, |
|
"learning_rate": 4.7288140692815605e-05, |
|
"loss": 1.8546, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 6.461522102355957, |
|
"learning_rate": 4.7280934526175e-05, |
|
"loss": 1.9473, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.854064464569092, |
|
"learning_rate": 4.727371934840481e-05, |
|
"loss": 1.8768, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 6.8991923332214355, |
|
"learning_rate": 4.726649516242307e-05, |
|
"loss": 2.0273, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 4.412707805633545, |
|
"learning_rate": 4.7259261971151494e-05, |
|
"loss": 2.057, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.540420055389404, |
|
"learning_rate": 4.725201977751541e-05, |
|
"loss": 1.7884, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.3580241203308105, |
|
"learning_rate": 4.724476858444379e-05, |
|
"loss": 1.9676, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.679504871368408, |
|
"learning_rate": 4.723750839486926e-05, |
|
"loss": 2.0706, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.0920538902282715, |
|
"learning_rate": 4.723023921172807e-05, |
|
"loss": 1.9423, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.097662448883057, |
|
"learning_rate": 4.722296103796011e-05, |
|
"loss": 1.8774, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 8.337467193603516, |
|
"learning_rate": 4.721567387650892e-05, |
|
"loss": 2.0885, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"eval_loss": 1.678551435470581, |
|
"eval_runtime": 423.6293, |
|
"eval_samples_per_second": 50.093, |
|
"eval_steps_per_second": 0.392, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.902133941650391, |
|
"learning_rate": 4.7208377730321643e-05, |
|
"loss": 2.2287, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 7.351017951965332, |
|
"learning_rate": 4.72010726023491e-05, |
|
"loss": 2.1598, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.693913459777832, |
|
"learning_rate": 4.71937584955457e-05, |
|
"loss": 1.8859, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.01932954788208, |
|
"learning_rate": 4.7186435412869515e-05, |
|
"loss": 1.9043, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 24.060230255126953, |
|
"learning_rate": 4.7179103357282236e-05, |
|
"loss": 1.9134, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.253117561340332, |
|
"learning_rate": 4.717176233174918e-05, |
|
"loss": 2.0946, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.719861030578613, |
|
"learning_rate": 4.71644123392393e-05, |
|
"loss": 1.9406, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 11.881241798400879, |
|
"learning_rate": 4.7157053382725164e-05, |
|
"loss": 2.2027, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.93878173828125, |
|
"learning_rate": 4.714968546518297e-05, |
|
"loss": 1.914, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.93536376953125, |
|
"learning_rate": 4.7142308589592556e-05, |
|
"loss": 2.1651, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.014473915100098, |
|
"learning_rate": 4.7134922758937355e-05, |
|
"loss": 1.9013, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 25.88618278503418, |
|
"learning_rate": 4.712752797620444e-05, |
|
"loss": 2.01, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.001585483551025, |
|
"learning_rate": 4.71201242443845e-05, |
|
"loss": 1.7434, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 7.327158451080322, |
|
"learning_rate": 4.7112711566471834e-05, |
|
"loss": 2.107, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 4.623456954956055, |
|
"learning_rate": 4.7105289945464384e-05, |
|
"loss": 1.953, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.4558868408203125, |
|
"learning_rate": 4.7097859384363686e-05, |
|
"loss": 1.8178, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.775446891784668, |
|
"learning_rate": 4.70904198861749e-05, |
|
"loss": 1.9418, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 10.967681884765625, |
|
"learning_rate": 4.708297145390679e-05, |
|
"loss": 1.993, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.730310916900635, |
|
"learning_rate": 4.707551409057176e-05, |
|
"loss": 2.0054, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 9.371408462524414, |
|
"learning_rate": 4.70680477991858e-05, |
|
"loss": 2.0018, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.070098876953125, |
|
"learning_rate": 4.706057258276851e-05, |
|
"loss": 2.0201, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 9.502942085266113, |
|
"learning_rate": 4.705308844434313e-05, |
|
"loss": 2.1858, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 10.906458854675293, |
|
"learning_rate": 4.704559538693647e-05, |
|
"loss": 1.9434, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.904813289642334, |
|
"learning_rate": 4.7038093413578975e-05, |
|
"loss": 1.8493, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 5.065293312072754, |
|
"learning_rate": 4.7030582527304675e-05, |
|
"loss": 1.7872, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 11.148314476013184, |
|
"learning_rate": 4.702306273115122e-05, |
|
"loss": 1.8762, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.202971458435059, |
|
"learning_rate": 4.701553402815986e-05, |
|
"loss": 2.0413, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 10.41578197479248, |
|
"learning_rate": 4.700799642137544e-05, |
|
"loss": 1.9312, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 4.611337184906006, |
|
"learning_rate": 4.7000449913846424e-05, |
|
"loss": 1.9724, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 49.853824615478516, |
|
"learning_rate": 4.699289450862485e-05, |
|
"loss": 2.0025, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.44148063659668, |
|
"learning_rate": 4.6985330208766375e-05, |
|
"loss": 1.8576, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.226993083953857, |
|
"learning_rate": 4.6977757017330245e-05, |
|
"loss": 1.8268, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 7.233063220977783, |
|
"learning_rate": 4.69701749373793e-05, |
|
"loss": 1.8525, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 7.900531768798828, |
|
"learning_rate": 4.696258397197998e-05, |
|
"loss": 2.1743, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.996172904968262, |
|
"learning_rate": 4.695498412420232e-05, |
|
"loss": 1.7183, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 5.938333034515381, |
|
"learning_rate": 4.694737539711994e-05, |
|
"loss": 1.9403, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 5.985813140869141, |
|
"learning_rate": 4.6939757793810055e-05, |
|
"loss": 1.9518, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 7.172812461853027, |
|
"learning_rate": 4.693213131735348e-05, |
|
"loss": 2.0953, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 41.72245407104492, |
|
"learning_rate": 4.692449597083459e-05, |
|
"loss": 2.0219, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 5.960461616516113, |
|
"learning_rate": 4.691685175734138e-05, |
|
"loss": 1.9091, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 5.79845666885376, |
|
"learning_rate": 4.6909198679965405e-05, |
|
"loss": 1.9832, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 9.317214965820312, |
|
"learning_rate": 4.690153674180183e-05, |
|
"loss": 2.2419, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.582641124725342, |
|
"learning_rate": 4.689386594594938e-05, |
|
"loss": 1.9132, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 4.3012471199035645, |
|
"learning_rate": 4.688618629551038e-05, |
|
"loss": 1.9446, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 7.338431358337402, |
|
"learning_rate": 4.6878497793590725e-05, |
|
"loss": 1.8855, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 6.319138526916504, |
|
"learning_rate": 4.6870800443299896e-05, |
|
"loss": 1.8556, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 7.105714321136475, |
|
"learning_rate": 4.686309424775094e-05, |
|
"loss": 2.1338, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.138299942016602, |
|
"learning_rate": 4.6855379210060506e-05, |
|
"loss": 1.8654, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.727604866027832, |
|
"learning_rate": 4.684765533334879e-05, |
|
"loss": 2.1185, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 4.990983009338379, |
|
"learning_rate": 4.68399226207396e-05, |
|
"loss": 1.8292, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"eval_loss": 1.639684796333313, |
|
"eval_runtime": 422.9949, |
|
"eval_samples_per_second": 50.168, |
|
"eval_steps_per_second": 0.392, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.5144429206848145, |
|
"learning_rate": 4.6832181075360274e-05, |
|
"loss": 2.1007, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 6.92350959777832, |
|
"learning_rate": 4.6824430700341736e-05, |
|
"loss": 1.9391, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.39557409286499, |
|
"learning_rate": 4.681667149881852e-05, |
|
"loss": 1.8487, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 4.977539539337158, |
|
"learning_rate": 4.680890347392867e-05, |
|
"loss": 1.8729, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 6.647794723510742, |
|
"learning_rate": 4.680112662881383e-05, |
|
"loss": 2.031, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.776614189147949, |
|
"learning_rate": 4.6793340966619214e-05, |
|
"loss": 1.8169, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 6.00407600402832, |
|
"learning_rate": 4.678554649049359e-05, |
|
"loss": 1.8746, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 4.949479103088379, |
|
"learning_rate": 4.677774320358931e-05, |
|
"loss": 1.7223, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 6.128554344177246, |
|
"learning_rate": 4.676993110906225e-05, |
|
"loss": 1.7069, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 6.835212230682373, |
|
"learning_rate": 4.6762110210071894e-05, |
|
"loss": 2.1278, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.878398895263672, |
|
"learning_rate": 4.675428050978127e-05, |
|
"loss": 1.9742, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 6.689033508300781, |
|
"learning_rate": 4.674644201135694e-05, |
|
"loss": 2.0122, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.20786190032959, |
|
"learning_rate": 4.673859471796905e-05, |
|
"loss": 1.7227, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.527268409729004, |
|
"learning_rate": 4.6730738632791325e-05, |
|
"loss": 1.9288, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 10.17988109588623, |
|
"learning_rate": 4.672287375900099e-05, |
|
"loss": 1.9036, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 6.694543361663818, |
|
"learning_rate": 4.6715000099778864e-05, |
|
"loss": 1.9696, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 9.240964889526367, |
|
"learning_rate": 4.6707117658309316e-05, |
|
"loss": 1.9683, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 10.630289077758789, |
|
"learning_rate": 4.669922643778025e-05, |
|
"loss": 1.8037, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 6.575771331787109, |
|
"learning_rate": 4.669132644138314e-05, |
|
"loss": 1.9856, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.165228843688965, |
|
"learning_rate": 4.668341767231299e-05, |
|
"loss": 2.033, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 29.935487747192383, |
|
"learning_rate": 4.6675500133768367e-05, |
|
"loss": 1.9919, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 5.627548694610596, |
|
"learning_rate": 4.6667573828951375e-05, |
|
"loss": 2.0417, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 7.543898582458496, |
|
"learning_rate": 4.665963876106767e-05, |
|
"loss": 1.9762, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 12.477391242980957, |
|
"learning_rate": 4.665169493332646e-05, |
|
"loss": 1.8202, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 8.293655395507812, |
|
"learning_rate": 4.6643742348940464e-05, |
|
"loss": 1.9062, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.157840251922607, |
|
"learning_rate": 4.663578101112599e-05, |
|
"loss": 1.8978, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 5.024789810180664, |
|
"learning_rate": 4.662781092310283e-05, |
|
"loss": 1.658, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 8.7192964553833, |
|
"learning_rate": 4.661983208809437e-05, |
|
"loss": 1.8223, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.595676898956299, |
|
"learning_rate": 4.6611844509327505e-05, |
|
"loss": 1.8821, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 5.086737155914307, |
|
"learning_rate": 4.660384819003266e-05, |
|
"loss": 1.8998, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 4.518343448638916, |
|
"learning_rate": 4.65958431334438e-05, |
|
"loss": 1.8404, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 5.972059726715088, |
|
"learning_rate": 4.6587829342798456e-05, |
|
"loss": 1.951, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.087925910949707, |
|
"learning_rate": 4.6579806821337634e-05, |
|
"loss": 1.7567, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 7.334329128265381, |
|
"learning_rate": 4.657177557230592e-05, |
|
"loss": 1.8196, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 4.589081287384033, |
|
"learning_rate": 4.6563735598951395e-05, |
|
"loss": 1.9163, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.94594144821167, |
|
"learning_rate": 4.6555686904525695e-05, |
|
"loss": 1.9189, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 7.6889119148254395, |
|
"learning_rate": 4.6547629492283976e-05, |
|
"loss": 1.9279, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.893225193023682, |
|
"learning_rate": 4.65395633654849e-05, |
|
"loss": 1.8149, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 7.492513179779053, |
|
"learning_rate": 4.653148852739068e-05, |
|
"loss": 1.8264, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 4.651477813720703, |
|
"learning_rate": 4.6523404981267036e-05, |
|
"loss": 1.7099, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 5.677443504333496, |
|
"learning_rate": 4.651531273038323e-05, |
|
"loss": 2.0826, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 5.65981388092041, |
|
"learning_rate": 4.6507211778012004e-05, |
|
"loss": 1.8778, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.33742094039917, |
|
"learning_rate": 4.6499102127429665e-05, |
|
"loss": 1.976, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 7.806815147399902, |
|
"learning_rate": 4.6490983781916015e-05, |
|
"loss": 1.8781, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 7.299323081970215, |
|
"learning_rate": 4.6482856744754364e-05, |
|
"loss": 1.8524, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.100677490234375, |
|
"learning_rate": 4.6474721019231566e-05, |
|
"loss": 2.0033, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 4.778229236602783, |
|
"learning_rate": 4.646657660863796e-05, |
|
"loss": 1.8675, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 6.6370930671691895, |
|
"learning_rate": 4.64584235162674e-05, |
|
"loss": 1.8061, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 8.557727813720703, |
|
"learning_rate": 4.6450261745417276e-05, |
|
"loss": 2.016, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 7.485627174377441, |
|
"learning_rate": 4.6442091299388467e-05, |
|
"loss": 1.7278, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"eval_loss": 1.5960439443588257, |
|
"eval_runtime": 423.3373, |
|
"eval_samples_per_second": 50.128, |
|
"eval_steps_per_second": 0.392, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 7.613509654998779, |
|
"learning_rate": 4.643391218148536e-05, |
|
"loss": 1.9552, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.298041820526123, |
|
"learning_rate": 4.6425724395015865e-05, |
|
"loss": 2.0533, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.571812629699707, |
|
"learning_rate": 4.641752794329137e-05, |
|
"loss": 2.0098, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 7.494356155395508, |
|
"learning_rate": 4.64093228296268e-05, |
|
"loss": 1.902, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 6.5204315185546875, |
|
"learning_rate": 4.640110905734057e-05, |
|
"loss": 1.9357, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.17507791519165, |
|
"learning_rate": 4.6392886629754586e-05, |
|
"loss": 1.6532, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 4.883286476135254, |
|
"learning_rate": 4.6384655550194256e-05, |
|
"loss": 1.8173, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.756780624389648, |
|
"learning_rate": 4.6376415821988514e-05, |
|
"loss": 1.6706, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.555366516113281, |
|
"learning_rate": 4.636816744846976e-05, |
|
"loss": 1.8445, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 6.893411159515381, |
|
"learning_rate": 4.6359910432973905e-05, |
|
"loss": 1.8346, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 4.471080780029297, |
|
"learning_rate": 4.635164477884035e-05, |
|
"loss": 1.6428, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 6.882155895233154, |
|
"learning_rate": 4.6343370489412e-05, |
|
"loss": 1.7322, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 18.208683013916016, |
|
"learning_rate": 4.633508756803523e-05, |
|
"loss": 1.8269, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 14.348465919494629, |
|
"learning_rate": 4.6326796018059936e-05, |
|
"loss": 1.8359, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 7.5844831466674805, |
|
"learning_rate": 4.631849584283949e-05, |
|
"loss": 1.9765, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.831000328063965, |
|
"learning_rate": 4.631018704573074e-05, |
|
"loss": 1.7907, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.3236188888549805, |
|
"learning_rate": 4.630186963009403e-05, |
|
"loss": 1.7845, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 8.6161527633667, |
|
"learning_rate": 4.629354359929321e-05, |
|
"loss": 1.8121, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 7.705484390258789, |
|
"learning_rate": 4.628520895669558e-05, |
|
"loss": 2.0315, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 6.727534294128418, |
|
"learning_rate": 4.627686570567194e-05, |
|
"loss": 1.8766, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 6.244938850402832, |
|
"learning_rate": 4.626851384959657e-05, |
|
"loss": 1.7736, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 4.889083385467529, |
|
"learning_rate": 4.626015339184724e-05, |
|
"loss": 1.7259, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.227889537811279, |
|
"learning_rate": 4.625178433580518e-05, |
|
"loss": 1.704, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 6.0999250411987305, |
|
"learning_rate": 4.624340668485511e-05, |
|
"loss": 1.9576, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 14.676617622375488, |
|
"learning_rate": 4.623502044238521e-05, |
|
"loss": 1.8773, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.836556911468506, |
|
"learning_rate": 4.6226625611787175e-05, |
|
"loss": 1.8428, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 7.364138126373291, |
|
"learning_rate": 4.621822219645612e-05, |
|
"loss": 1.8232, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 12.476197242736816, |
|
"learning_rate": 4.620981019979067e-05, |
|
"loss": 2.0284, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 33.210792541503906, |
|
"learning_rate": 4.62013896251929e-05, |
|
"loss": 1.7304, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 7.111621856689453, |
|
"learning_rate": 4.619296047606837e-05, |
|
"loss": 1.9921, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 8.426152229309082, |
|
"learning_rate": 4.6184522755826105e-05, |
|
"loss": 1.6146, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 19.042428970336914, |
|
"learning_rate": 4.617607646787858e-05, |
|
"loss": 1.788, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 34.727783203125, |
|
"learning_rate": 4.6167621615641746e-05, |
|
"loss": 1.8662, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 11.465919494628906, |
|
"learning_rate": 4.6159158202535026e-05, |
|
"loss": 1.8507, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 4.54009485244751, |
|
"learning_rate": 4.61506862319813e-05, |
|
"loss": 1.6138, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 8.663006782531738, |
|
"learning_rate": 4.614220570740689e-05, |
|
"loss": 1.7006, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 12.138349533081055, |
|
"learning_rate": 4.613371663224162e-05, |
|
"loss": 2.0698, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.74115514755249, |
|
"learning_rate": 4.612521900991872e-05, |
|
"loss": 1.7282, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 11.925437927246094, |
|
"learning_rate": 4.611671284387492e-05, |
|
"loss": 1.6966, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 8.422083854675293, |
|
"learning_rate": 4.610819813755038e-05, |
|
"loss": 1.9501, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.11015510559082, |
|
"learning_rate": 4.609967489438873e-05, |
|
"loss": 1.6498, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 4.888397693634033, |
|
"learning_rate": 4.609114311783703e-05, |
|
"loss": 1.6546, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 17.405601501464844, |
|
"learning_rate": 4.608260281134582e-05, |
|
"loss": 1.7784, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 10.067267417907715, |
|
"learning_rate": 4.607405397836908e-05, |
|
"loss": 1.8552, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.969694137573242, |
|
"learning_rate": 4.6065496622364224e-05, |
|
"loss": 1.8743, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 6.426912784576416, |
|
"learning_rate": 4.605693074679211e-05, |
|
"loss": 1.8538, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 7.665450096130371, |
|
"learning_rate": 4.604835635511709e-05, |
|
"loss": 1.8622, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 6.462819576263428, |
|
"learning_rate": 4.60397734508069e-05, |
|
"loss": 2.0098, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 6.3902082443237305, |
|
"learning_rate": 4.6031182037332745e-05, |
|
"loss": 1.7431, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 4.557443618774414, |
|
"learning_rate": 4.602258211816927e-05, |
|
"loss": 1.6484, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"eval_loss": 1.5718907117843628, |
|
"eval_runtime": 423.3203, |
|
"eval_samples_per_second": 50.13, |
|
"eval_steps_per_second": 0.392, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.951988697052002, |
|
"learning_rate": 4.601397369679457e-05, |
|
"loss": 1.8044, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 7.188987731933594, |
|
"learning_rate": 4.600535677669016e-05, |
|
"loss": 1.8985, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 6.467220783233643, |
|
"learning_rate": 4.5996731361340994e-05, |
|
"loss": 1.7658, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.105921268463135, |
|
"learning_rate": 4.598809745423549e-05, |
|
"loss": 1.8672, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.458418846130371, |
|
"learning_rate": 4.597945505886546e-05, |
|
"loss": 2.0975, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.801342010498047, |
|
"learning_rate": 4.5970804178726176e-05, |
|
"loss": 1.9141, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.686854839324951, |
|
"learning_rate": 4.596214481731634e-05, |
|
"loss": 1.7676, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 7.215950012207031, |
|
"learning_rate": 4.595347697813806e-05, |
|
"loss": 1.775, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 7.41530179977417, |
|
"learning_rate": 4.59448006646969e-05, |
|
"loss": 1.79, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 4.15520715713501, |
|
"learning_rate": 4.5936115880501845e-05, |
|
"loss": 1.8232, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.490520000457764, |
|
"learning_rate": 4.59274226290653e-05, |
|
"loss": 1.7322, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 6.287522792816162, |
|
"learning_rate": 4.591872091390309e-05, |
|
"loss": 1.8003, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 20.15962791442871, |
|
"learning_rate": 4.591001073853448e-05, |
|
"loss": 1.8694, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 3.3742175102233887, |
|
"learning_rate": 4.590129210648213e-05, |
|
"loss": 1.6749, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 6.802846908569336, |
|
"learning_rate": 4.5892565021272154e-05, |
|
"loss": 1.8907, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 11.191157341003418, |
|
"learning_rate": 4.588382948643406e-05, |
|
"loss": 1.996, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 6.665444850921631, |
|
"learning_rate": 4.587508550550078e-05, |
|
"loss": 1.8376, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.446608543395996, |
|
"learning_rate": 4.586633308200866e-05, |
|
"loss": 1.8742, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.561806678771973, |
|
"learning_rate": 4.585757221949747e-05, |
|
"loss": 1.9791, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.850062847137451, |
|
"learning_rate": 4.584880292151038e-05, |
|
"loss": 1.786, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 6.454400539398193, |
|
"learning_rate": 4.5840025191593984e-05, |
|
"loss": 1.755, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 4.614055633544922, |
|
"learning_rate": 4.583123903329827e-05, |
|
"loss": 1.9708, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 4.978786468505859, |
|
"learning_rate": 4.582244445017665e-05, |
|
"loss": 1.6388, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.441361427307129, |
|
"learning_rate": 4.581364144578593e-05, |
|
"loss": 1.7346, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.76068115234375, |
|
"learning_rate": 4.580483002368634e-05, |
|
"loss": 1.9962, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 6.45198392868042, |
|
"learning_rate": 4.57960101874415e-05, |
|
"loss": 1.7792, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.766374111175537, |
|
"learning_rate": 4.5787181940618435e-05, |
|
"loss": 1.5923, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.983777046203613, |
|
"learning_rate": 4.577834528678757e-05, |
|
"loss": 1.8081, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.470437526702881, |
|
"learning_rate": 4.576950022952274e-05, |
|
"loss": 1.7369, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.393801212310791, |
|
"learning_rate": 4.5760646772401175e-05, |
|
"loss": 1.8261, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 15.60527229309082, |
|
"learning_rate": 4.575178491900349e-05, |
|
"loss": 1.6928, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 6.9888916015625, |
|
"learning_rate": 4.574291467291371e-05, |
|
"loss": 1.9217, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 4.985001564025879, |
|
"learning_rate": 4.5734036037719236e-05, |
|
"loss": 1.7271, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.762869834899902, |
|
"learning_rate": 4.5725149017010896e-05, |
|
"loss": 1.686, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.406029224395752, |
|
"learning_rate": 4.5716253614382866e-05, |
|
"loss": 1.7915, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.7803168296813965, |
|
"learning_rate": 4.570734983343276e-05, |
|
"loss": 1.9539, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 7.472147464752197, |
|
"learning_rate": 4.569843767776153e-05, |
|
"loss": 1.7136, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 7.438866138458252, |
|
"learning_rate": 4.568951715097356e-05, |
|
"loss": 1.7875, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 6.272189140319824, |
|
"learning_rate": 4.568058825667658e-05, |
|
"loss": 1.8589, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 4.803892612457275, |
|
"learning_rate": 4.5671650998481735e-05, |
|
"loss": 1.5895, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 7.044991493225098, |
|
"learning_rate": 4.566270538000354e-05, |
|
"loss": 1.7368, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 6.217586040496826, |
|
"learning_rate": 4.565375140485989e-05, |
|
"loss": 1.6122, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 4.539994239807129, |
|
"learning_rate": 4.564478907667207e-05, |
|
"loss": 1.5242, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.166835784912109, |
|
"learning_rate": 4.563581839906471e-05, |
|
"loss": 1.8427, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 6.602473258972168, |
|
"learning_rate": 4.5626839375665876e-05, |
|
"loss": 1.8977, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 3.6593120098114014, |
|
"learning_rate": 4.561785201010695e-05, |
|
"loss": 1.5374, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.412270545959473, |
|
"learning_rate": 4.560885630602272e-05, |
|
"loss": 1.6548, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 7.576125621795654, |
|
"learning_rate": 4.559985226705135e-05, |
|
"loss": 1.6514, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.029335975646973, |
|
"learning_rate": 4.559083989683434e-05, |
|
"loss": 1.7833, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.206794738769531, |
|
"learning_rate": 4.558181919901661e-05, |
|
"loss": 1.8478, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"eval_loss": 1.5044246912002563, |
|
"eval_runtime": 423.6381, |
|
"eval_samples_per_second": 50.092, |
|
"eval_steps_per_second": 0.392, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.662048816680908, |
|
"learning_rate": 4.5572790177246406e-05, |
|
"loss": 1.6828, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.3925862312316895, |
|
"learning_rate": 4.5563752835175354e-05, |
|
"loss": 1.7448, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.040633201599121, |
|
"learning_rate": 4.555470717645846e-05, |
|
"loss": 1.6673, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.8295183181762695, |
|
"learning_rate": 4.5545653204754065e-05, |
|
"loss": 1.5644, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.955106735229492, |
|
"learning_rate": 4.5536590923723906e-05, |
|
"loss": 1.819, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.5708818435668945, |
|
"learning_rate": 4.552752033703305e-05, |
|
"loss": 1.7287, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.586352825164795, |
|
"learning_rate": 4.5518441448349935e-05, |
|
"loss": 1.5666, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.7832932472229, |
|
"learning_rate": 4.550935426134636e-05, |
|
"loss": 1.9005, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.757806777954102, |
|
"learning_rate": 4.550025877969748e-05, |
|
"loss": 1.7659, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.320318698883057, |
|
"learning_rate": 4.54911550070818e-05, |
|
"loss": 1.6198, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.772813320159912, |
|
"learning_rate": 4.548204294718117e-05, |
|
"loss": 2.1057, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.337530612945557, |
|
"learning_rate": 4.5472922603680805e-05, |
|
"loss": 1.8698, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 4.934597015380859, |
|
"learning_rate": 4.5463793980269276e-05, |
|
"loss": 1.7916, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.132730007171631, |
|
"learning_rate": 4.5454657080638485e-05, |
|
"loss": 1.6805, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.090786933898926, |
|
"learning_rate": 4.54455119084837e-05, |
|
"loss": 1.8776, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.216485023498535, |
|
"learning_rate": 4.54363584675035e-05, |
|
"loss": 1.6573, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.11155891418457, |
|
"learning_rate": 4.542719676139987e-05, |
|
"loss": 1.6054, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 4.956602096557617, |
|
"learning_rate": 4.541802679387806e-05, |
|
"loss": 1.6549, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 4.46177339553833, |
|
"learning_rate": 4.540884856864672e-05, |
|
"loss": 1.6097, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.2392578125, |
|
"learning_rate": 4.539966208941783e-05, |
|
"loss": 1.9215, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.628352165222168, |
|
"learning_rate": 4.539046735990667e-05, |
|
"loss": 1.7484, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 8.802276611328125, |
|
"learning_rate": 4.538126438383192e-05, |
|
"loss": 1.5186, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 6.957813262939453, |
|
"learning_rate": 4.537205316491554e-05, |
|
"loss": 1.6704, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 6.49714994430542, |
|
"learning_rate": 4.536283370688286e-05, |
|
"loss": 1.7561, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.772355556488037, |
|
"learning_rate": 4.53536060134625e-05, |
|
"loss": 1.6856, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.173638343811035, |
|
"learning_rate": 4.5344370088386455e-05, |
|
"loss": 1.6953, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.821722030639648, |
|
"learning_rate": 4.533512593539004e-05, |
|
"loss": 2.0994, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.338065147399902, |
|
"learning_rate": 4.5325873558211875e-05, |
|
"loss": 1.7117, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.492509365081787, |
|
"learning_rate": 4.531661296059392e-05, |
|
"loss": 1.7399, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.744643688201904, |
|
"learning_rate": 4.530734414628146e-05, |
|
"loss": 1.7788, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.065397262573242, |
|
"learning_rate": 4.5298067119023114e-05, |
|
"loss": 1.603, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 6.232301235198975, |
|
"learning_rate": 4.5288781882570805e-05, |
|
"loss": 1.6918, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 6.795167446136475, |
|
"learning_rate": 4.527948844067977e-05, |
|
"loss": 1.8678, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.472856521606445, |
|
"learning_rate": 4.52701867971086e-05, |
|
"loss": 1.6887, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.1734843254089355, |
|
"learning_rate": 4.526087695561917e-05, |
|
"loss": 1.5962, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.166112422943115, |
|
"learning_rate": 4.525155891997668e-05, |
|
"loss": 1.5192, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 6.10178279876709, |
|
"learning_rate": 4.524223269394963e-05, |
|
"loss": 1.6182, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 7.099591255187988, |
|
"learning_rate": 4.5232898281309874e-05, |
|
"loss": 1.862, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.071378231048584, |
|
"learning_rate": 4.522355568583253e-05, |
|
"loss": 1.5438, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.862138748168945, |
|
"learning_rate": 4.521420491129605e-05, |
|
"loss": 1.7539, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.930329322814941, |
|
"learning_rate": 4.5204845961482204e-05, |
|
"loss": 1.7749, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.047465801239014, |
|
"learning_rate": 4.519547884017603e-05, |
|
"loss": 1.8615, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.920933723449707, |
|
"learning_rate": 4.518610355116592e-05, |
|
"loss": 1.5318, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.903204441070557, |
|
"learning_rate": 4.517672009824351e-05, |
|
"loss": 1.7197, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.1994242668151855, |
|
"learning_rate": 4.5167328485203796e-05, |
|
"loss": 1.623, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.404425621032715, |
|
"learning_rate": 4.515792871584505e-05, |
|
"loss": 1.6837, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.51246976852417, |
|
"learning_rate": 4.514852079396884e-05, |
|
"loss": 1.5511, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.787316799163818, |
|
"learning_rate": 4.513910472338002e-05, |
|
"loss": 1.6386, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 6.080197811126709, |
|
"learning_rate": 4.5129680507886764e-05, |
|
"loss": 1.5743, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 6.592625141143799, |
|
"learning_rate": 4.5120248151300535e-05, |
|
"loss": 1.8016, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"eval_loss": 1.4656002521514893, |
|
"eval_runtime": 423.3562, |
|
"eval_samples_per_second": 50.126, |
|
"eval_steps_per_second": 0.392, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 6.005305290222168, |
|
"learning_rate": 4.5110807657436075e-05, |
|
"loss": 1.6975, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.094872951507568, |
|
"learning_rate": 4.510135903011142e-05, |
|
"loss": 1.6844, |
|
"step": 11020 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.709961414337158, |
|
"learning_rate": 4.509190227314791e-05, |
|
"loss": 1.7298, |
|
"step": 11030 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.193809509277344, |
|
"learning_rate": 4.508243739037016e-05, |
|
"loss": 1.7358, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.351953983306885, |
|
"learning_rate": 4.507296438560607e-05, |
|
"loss": 1.4251, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.901798248291016, |
|
"learning_rate": 4.506348326268683e-05, |
|
"loss": 1.6316, |
|
"step": 11060 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.888655185699463, |
|
"learning_rate": 4.505399402544692e-05, |
|
"loss": 1.6291, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.988673210144043, |
|
"learning_rate": 4.5044496677724086e-05, |
|
"loss": 1.7475, |
|
"step": 11080 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 6.504331588745117, |
|
"learning_rate": 4.503499122335937e-05, |
|
"loss": 1.6634, |
|
"step": 11090 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 6.307519912719727, |
|
"learning_rate": 4.5025477666197066e-05, |
|
"loss": 1.8217, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.95925760269165, |
|
"learning_rate": 4.501595601008479e-05, |
|
"loss": 1.7216, |
|
"step": 11110 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 49.420379638671875, |
|
"learning_rate": 4.500642625887339e-05, |
|
"loss": 1.8224, |
|
"step": 11120 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.760485649108887, |
|
"learning_rate": 4.499688841641701e-05, |
|
"loss": 1.6322, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.579456806182861, |
|
"learning_rate": 4.498734248657306e-05, |
|
"loss": 1.6197, |
|
"step": 11140 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 6.184917449951172, |
|
"learning_rate": 4.497778847320223e-05, |
|
"loss": 1.6196, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.741893768310547, |
|
"learning_rate": 4.496822638016846e-05, |
|
"loss": 1.6867, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.965624809265137, |
|
"learning_rate": 4.495865621133897e-05, |
|
"loss": 1.3808, |
|
"step": 11170 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 6.604211807250977, |
|
"learning_rate": 4.494907797058425e-05, |
|
"loss": 1.5266, |
|
"step": 11180 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.604640483856201, |
|
"learning_rate": 4.493949166177806e-05, |
|
"loss": 1.6457, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 5.649612903594971, |
|
"learning_rate": 4.49298972887974e-05, |
|
"loss": 1.6705, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 6.1628031730651855, |
|
"learning_rate": 4.492029485552255e-05, |
|
"loss": 1.6294, |
|
"step": 11210 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.706023216247559, |
|
"learning_rate": 4.491068436583703e-05, |
|
"loss": 1.5566, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.297560214996338, |
|
"learning_rate": 4.490106582362765e-05, |
|
"loss": 1.6313, |
|
"step": 11230 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.6740803718566895, |
|
"learning_rate": 4.489143923278445e-05, |
|
"loss": 1.4474, |
|
"step": 11240 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.918575286865234, |
|
"learning_rate": 4.488180459720074e-05, |
|
"loss": 1.5736, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.530802249908447, |
|
"learning_rate": 4.4872161920773075e-05, |
|
"loss": 1.683, |
|
"step": 11260 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.266594409942627, |
|
"learning_rate": 4.486251120740127e-05, |
|
"loss": 1.6557, |
|
"step": 11270 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.779808521270752, |
|
"learning_rate": 4.485285246098837e-05, |
|
"loss": 1.9857, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.6034088134765625, |
|
"learning_rate": 4.48431856854407e-05, |
|
"loss": 1.6863, |
|
"step": 11290 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.2785186767578125, |
|
"learning_rate": 4.4833510884667805e-05, |
|
"loss": 1.6183, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.0368452072143555, |
|
"learning_rate": 4.482382806258249e-05, |
|
"loss": 1.8994, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.603665351867676, |
|
"learning_rate": 4.481413722310082e-05, |
|
"loss": 1.6505, |
|
"step": 11320 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.930195331573486, |
|
"learning_rate": 4.480443837014205e-05, |
|
"loss": 1.7675, |
|
"step": 11330 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.467337608337402, |
|
"learning_rate": 4.4794731507628734e-05, |
|
"loss": 1.6801, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 6.546854496002197, |
|
"learning_rate": 4.4785016639486635e-05, |
|
"loss": 1.7569, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.412653923034668, |
|
"learning_rate": 4.477529376964475e-05, |
|
"loss": 1.824, |
|
"step": 11360 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.92755126953125, |
|
"learning_rate": 4.476556290203533e-05, |
|
"loss": 1.6851, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.570308208465576, |
|
"learning_rate": 4.475582404059385e-05, |
|
"loss": 1.7238, |
|
"step": 11380 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.924283027648926, |
|
"learning_rate": 4.474607718925903e-05, |
|
"loss": 1.7165, |
|
"step": 11390 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.563042640686035, |
|
"learning_rate": 4.473632235197279e-05, |
|
"loss": 1.4755, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 6.890353679656982, |
|
"learning_rate": 4.4726559532680326e-05, |
|
"loss": 1.7279, |
|
"step": 11410 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.909914493560791, |
|
"learning_rate": 4.4716788735330016e-05, |
|
"loss": 1.6262, |
|
"step": 11420 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.578766345977783, |
|
"learning_rate": 4.47070099638735e-05, |
|
"loss": 1.3737, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 10.624035835266113, |
|
"learning_rate": 4.4697223222265625e-05, |
|
"loss": 1.8623, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.210607051849365, |
|
"learning_rate": 4.4687428514464466e-05, |
|
"loss": 1.5453, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.816832065582275, |
|
"learning_rate": 4.467762584443131e-05, |
|
"loss": 1.6239, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.2711405754089355, |
|
"learning_rate": 4.4667815216130696e-05, |
|
"loss": 1.7399, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.340164661407471, |
|
"learning_rate": 4.465799663353034e-05, |
|
"loss": 1.8794, |
|
"step": 11480 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.173604965209961, |
|
"learning_rate": 4.46481701006012e-05, |
|
"loss": 1.7089, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.420295238494873, |
|
"learning_rate": 4.463833562131743e-05, |
|
"loss": 1.4616, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"eval_loss": 1.3871608972549438, |
|
"eval_runtime": 423.5, |
|
"eval_samples_per_second": 50.109, |
|
"eval_steps_per_second": 0.392, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.479365348815918, |
|
"learning_rate": 4.462849319965643e-05, |
|
"loss": 1.6149, |
|
"step": 11510 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 7.298757553100586, |
|
"learning_rate": 4.461864283959878e-05, |
|
"loss": 1.7906, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.9603776931762695, |
|
"learning_rate": 4.46087845451283e-05, |
|
"loss": 1.7409, |
|
"step": 11530 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.1247758865356445, |
|
"learning_rate": 4.459891832023199e-05, |
|
"loss": 1.5607, |
|
"step": 11540 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.38535737991333, |
|
"learning_rate": 4.458904416890006e-05, |
|
"loss": 1.5341, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.868686676025391, |
|
"learning_rate": 4.4579162095125965e-05, |
|
"loss": 1.7445, |
|
"step": 11560 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.29506778717041, |
|
"learning_rate": 4.4569272102906314e-05, |
|
"loss": 1.5512, |
|
"step": 11570 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.732799530029297, |
|
"learning_rate": 4.455937419624093e-05, |
|
"loss": 1.7647, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.685189723968506, |
|
"learning_rate": 4.454946837913287e-05, |
|
"loss": 1.7306, |
|
"step": 11590 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.369485855102539, |
|
"learning_rate": 4.453955465558837e-05, |
|
"loss": 1.7788, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.7086992263793945, |
|
"learning_rate": 4.4529633029616824e-05, |
|
"loss": 1.673, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.129878997802734, |
|
"learning_rate": 4.451970350523089e-05, |
|
"loss": 1.7806, |
|
"step": 11620 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.230447292327881, |
|
"learning_rate": 4.450976608644637e-05, |
|
"loss": 1.7927, |
|
"step": 11630 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.560041904449463, |
|
"learning_rate": 4.4499820777282296e-05, |
|
"loss": 1.7338, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.671140670776367, |
|
"learning_rate": 4.4489867581760855e-05, |
|
"loss": 1.5663, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.239694595336914, |
|
"learning_rate": 4.447990650390745e-05, |
|
"loss": 1.6173, |
|
"step": 11660 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.434264659881592, |
|
"learning_rate": 4.446993754775066e-05, |
|
"loss": 1.4899, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.034209251403809, |
|
"learning_rate": 4.445996071732226e-05, |
|
"loss": 1.818, |
|
"step": 11680 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.072056770324707, |
|
"learning_rate": 4.4449976016657185e-05, |
|
"loss": 1.843, |
|
"step": 11690 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 3.8844799995422363, |
|
"learning_rate": 4.4439983449793585e-05, |
|
"loss": 1.6013, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.102245807647705, |
|
"learning_rate": 4.442998302077277e-05, |
|
"loss": 1.5863, |
|
"step": 11710 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.803674697875977, |
|
"learning_rate": 4.4419974733639244e-05, |
|
"loss": 1.6002, |
|
"step": 11720 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.105443954467773, |
|
"learning_rate": 4.440995859244067e-05, |
|
"loss": 1.5296, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.575257301330566, |
|
"learning_rate": 4.439993460122791e-05, |
|
"loss": 1.6627, |
|
"step": 11740 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.023557662963867, |
|
"learning_rate": 4.438990276405499e-05, |
|
"loss": 1.4708, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.555371284484863, |
|
"learning_rate": 4.43798630849791e-05, |
|
"loss": 1.4226, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.876458644866943, |
|
"learning_rate": 4.436981556806061e-05, |
|
"loss": 1.9134, |
|
"step": 11770 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.92462158203125, |
|
"learning_rate": 4.435976021736307e-05, |
|
"loss": 1.5589, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.126759052276611, |
|
"learning_rate": 4.4349697036953186e-05, |
|
"loss": 1.5, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.3090500831604, |
|
"learning_rate": 4.433962603090083e-05, |
|
"loss": 1.7171, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.329404830932617, |
|
"learning_rate": 4.432954720327904e-05, |
|
"loss": 1.5186, |
|
"step": 11810 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.43467378616333, |
|
"learning_rate": 4.431946055816403e-05, |
|
"loss": 1.6543, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.861675262451172, |
|
"learning_rate": 4.430936609963515e-05, |
|
"loss": 1.7306, |
|
"step": 11830 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.122840404510498, |
|
"learning_rate": 4.429926383177494e-05, |
|
"loss": 1.4537, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 7.410120964050293, |
|
"learning_rate": 4.4289153758669075e-05, |
|
"loss": 1.7531, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.340906620025635, |
|
"learning_rate": 4.4279035884406395e-05, |
|
"loss": 1.7773, |
|
"step": 11860 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.850555419921875, |
|
"learning_rate": 4.4268910213078896e-05, |
|
"loss": 1.6794, |
|
"step": 11870 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.56842041015625, |
|
"learning_rate": 4.4258776748781725e-05, |
|
"loss": 1.5125, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.323385715484619, |
|
"learning_rate": 4.424863549561319e-05, |
|
"loss": 1.6437, |
|
"step": 11890 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.173916339874268, |
|
"learning_rate": 4.423848645767473e-05, |
|
"loss": 1.5748, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.211900234222412, |
|
"learning_rate": 4.4228329639070954e-05, |
|
"loss": 1.5492, |
|
"step": 11910 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.843668460845947, |
|
"learning_rate": 4.42181650439096e-05, |
|
"loss": 1.4393, |
|
"step": 11920 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 7.61888313293457, |
|
"learning_rate": 4.4207992676301563e-05, |
|
"loss": 1.6835, |
|
"step": 11930 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.9813103675842285, |
|
"learning_rate": 4.419781254036088e-05, |
|
"loss": 1.3988, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 3.993699073791504, |
|
"learning_rate": 4.4187624640204716e-05, |
|
"loss": 1.6954, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.204382419586182, |
|
"learning_rate": 4.4177428979953394e-05, |
|
"loss": 1.5512, |
|
"step": 11960 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 7.01641321182251, |
|
"learning_rate": 4.416722556373037e-05, |
|
"loss": 1.5539, |
|
"step": 11970 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 6.7084059715271, |
|
"learning_rate": 4.415701439566223e-05, |
|
"loss": 1.7911, |
|
"step": 11980 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 7.116555213928223, |
|
"learning_rate": 4.4146795479878705e-05, |
|
"loss": 1.6712, |
|
"step": 11990 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.768601417541504, |
|
"learning_rate": 4.413656882051266e-05, |
|
"loss": 1.5553, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"eval_loss": 1.352138876914978, |
|
"eval_runtime": 423.4085, |
|
"eval_samples_per_second": 50.119, |
|
"eval_steps_per_second": 0.392, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.654323577880859, |
|
"learning_rate": 4.412633442170007e-05, |
|
"loss": 1.4904, |
|
"step": 12010 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.876643180847168, |
|
"learning_rate": 4.411609228758007e-05, |
|
"loss": 1.78, |
|
"step": 12020 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.229610443115234, |
|
"learning_rate": 4.41058424222949e-05, |
|
"loss": 1.4487, |
|
"step": 12030 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.311570167541504, |
|
"learning_rate": 4.409558482998994e-05, |
|
"loss": 1.6014, |
|
"step": 12040 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 6.150774955749512, |
|
"learning_rate": 4.40853195148137e-05, |
|
"loss": 1.5214, |
|
"step": 12050 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.989500522613525, |
|
"learning_rate": 4.407504648091779e-05, |
|
"loss": 1.6158, |
|
"step": 12060 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.883471488952637, |
|
"learning_rate": 4.406476573245697e-05, |
|
"loss": 1.5151, |
|
"step": 12070 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.21392822265625, |
|
"learning_rate": 4.4054477273589115e-05, |
|
"loss": 1.6264, |
|
"step": 12080 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 7.5838236808776855, |
|
"learning_rate": 4.404418110847518e-05, |
|
"loss": 1.7264, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.685169696807861, |
|
"learning_rate": 4.403387724127929e-05, |
|
"loss": 1.6152, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.65505313873291, |
|
"learning_rate": 4.4023565676168655e-05, |
|
"loss": 1.6846, |
|
"step": 12110 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 6.558564186096191, |
|
"learning_rate": 4.4013246417313604e-05, |
|
"loss": 1.7264, |
|
"step": 12120 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.605981349945068, |
|
"learning_rate": 4.400291946888758e-05, |
|
"loss": 1.5809, |
|
"step": 12130 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.0989670753479, |
|
"learning_rate": 4.399258483506714e-05, |
|
"loss": 1.2892, |
|
"step": 12140 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.56234884262085, |
|
"learning_rate": 4.398224252003193e-05, |
|
"loss": 1.4583, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.174875736236572, |
|
"learning_rate": 4.397189252796473e-05, |
|
"loss": 1.7491, |
|
"step": 12160 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.127572536468506, |
|
"learning_rate": 4.39615348630514e-05, |
|
"loss": 1.5392, |
|
"step": 12170 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.131232261657715, |
|
"learning_rate": 4.395116952948093e-05, |
|
"loss": 1.4782, |
|
"step": 12180 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 6.0297980308532715, |
|
"learning_rate": 4.394079653144538e-05, |
|
"loss": 1.6276, |
|
"step": 12190 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.747319221496582, |
|
"learning_rate": 4.393041587313993e-05, |
|
"loss": 1.3473, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.469211101531982, |
|
"learning_rate": 4.3920027558762855e-05, |
|
"loss": 1.7429, |
|
"step": 12210 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.476499080657959, |
|
"learning_rate": 4.390963159251552e-05, |
|
"loss": 1.7891, |
|
"step": 12220 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.6036200523376465, |
|
"learning_rate": 4.38992279786024e-05, |
|
"loss": 1.5147, |
|
"step": 12230 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 6.28443717956543, |
|
"learning_rate": 4.388881672123105e-05, |
|
"loss": 1.7051, |
|
"step": 12240 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.224653244018555, |
|
"learning_rate": 4.38783978246121e-05, |
|
"loss": 1.445, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.014994144439697, |
|
"learning_rate": 4.386797129295932e-05, |
|
"loss": 1.6525, |
|
"step": 12260 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.2429118156433105, |
|
"learning_rate": 4.385753713048951e-05, |
|
"loss": 1.5287, |
|
"step": 12270 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.849613189697266, |
|
"learning_rate": 4.38470953414226e-05, |
|
"loss": 1.497, |
|
"step": 12280 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.8472418785095215, |
|
"learning_rate": 4.383664592998158e-05, |
|
"loss": 1.6662, |
|
"step": 12290 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.683506965637207, |
|
"learning_rate": 4.382618890039252e-05, |
|
"loss": 1.5714, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.4897780418396, |
|
"learning_rate": 4.381572425688459e-05, |
|
"loss": 1.4527, |
|
"step": 12310 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.370377063751221, |
|
"learning_rate": 4.380525200369003e-05, |
|
"loss": 1.5004, |
|
"step": 12320 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.105432510375977, |
|
"learning_rate": 4.379477214504415e-05, |
|
"loss": 1.554, |
|
"step": 12330 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.320200443267822, |
|
"learning_rate": 4.3784284685185364e-05, |
|
"loss": 1.6465, |
|
"step": 12340 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.400229454040527, |
|
"learning_rate": 4.377378962835511e-05, |
|
"loss": 1.4433, |
|
"step": 12350 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.23899507522583, |
|
"learning_rate": 4.376328697879796e-05, |
|
"loss": 1.5693, |
|
"step": 12360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.675478935241699, |
|
"learning_rate": 4.375277674076149e-05, |
|
"loss": 1.3086, |
|
"step": 12370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.473839282989502, |
|
"learning_rate": 4.374225891849641e-05, |
|
"loss": 1.6332, |
|
"step": 12380 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 7.072377681732178, |
|
"learning_rate": 4.373173351625645e-05, |
|
"loss": 1.6003, |
|
"step": 12390 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.378538131713867, |
|
"learning_rate": 4.372120053829842e-05, |
|
"loss": 1.7182, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.857398986816406, |
|
"learning_rate": 4.371065998888221e-05, |
|
"loss": 1.4698, |
|
"step": 12410 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.611136436462402, |
|
"learning_rate": 4.370011187227076e-05, |
|
"loss": 1.473, |
|
"step": 12420 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.341707706451416, |
|
"learning_rate": 4.3689556192730044e-05, |
|
"loss": 1.8078, |
|
"step": 12430 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.309572696685791, |
|
"learning_rate": 4.3678992954529144e-05, |
|
"loss": 1.5006, |
|
"step": 12440 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 7.722212791442871, |
|
"learning_rate": 4.366842216194017e-05, |
|
"loss": 1.7005, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.579885482788086, |
|
"learning_rate": 4.3657843819238284e-05, |
|
"loss": 1.3846, |
|
"step": 12460 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 6.808528423309326, |
|
"learning_rate": 4.3647257930701724e-05, |
|
"loss": 1.6517, |
|
"step": 12470 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.950486183166504, |
|
"learning_rate": 4.3636664500611756e-05, |
|
"loss": 1.4428, |
|
"step": 12480 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 5.783468723297119, |
|
"learning_rate": 4.362606353325271e-05, |
|
"loss": 1.698, |
|
"step": 12490 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 6.811148643493652, |
|
"learning_rate": 4.3615455032911946e-05, |
|
"loss": 1.8498, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 1.3111711740493774, |
|
"eval_runtime": 423.3291, |
|
"eval_samples_per_second": 50.129, |
|
"eval_steps_per_second": 0.392, |
|
"step": 12500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 50400, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"total_flos": 0.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|