|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.6582890285161915, |
|
"eval_steps": 500, |
|
"global_step": 11000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 2.0414, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 53.515953063964844, |
|
"learning_rate": 6.5e-07, |
|
"loss": 1.8355, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 27.920907974243164, |
|
"learning_rate": 1.15e-06, |
|
"loss": 1.5172, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 20.47172737121582, |
|
"learning_rate": 1.65e-06, |
|
"loss": 1.2654, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.903529167175293, |
|
"learning_rate": 2.1499999999999997e-06, |
|
"loss": 1.1095, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.902841567993164, |
|
"learning_rate": 2.65e-06, |
|
"loss": 1.0017, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.9340908527374268, |
|
"learning_rate": 3.1500000000000003e-06, |
|
"loss": 0.8846, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 8.701367378234863, |
|
"learning_rate": 3.6499999999999998e-06, |
|
"loss": 0.8572, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.3205362558364868, |
|
"learning_rate": 4.15e-06, |
|
"loss": 0.839, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.0334558486938477, |
|
"learning_rate": 4.65e-06, |
|
"loss": 0.7838, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.8086520433425903, |
|
"learning_rate": 5.15e-06, |
|
"loss": 0.7782, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 29.50135040283203, |
|
"learning_rate": 5.65e-06, |
|
"loss": 0.7883, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.2708683013916016, |
|
"learning_rate": 6.15e-06, |
|
"loss": 0.7961, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 33.43790817260742, |
|
"learning_rate": 6.650000000000001e-06, |
|
"loss": 0.7626, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.2334959506988525, |
|
"learning_rate": 7.15e-06, |
|
"loss": 0.7732, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.7489686012268066, |
|
"learning_rate": 7.65e-06, |
|
"loss": 0.7715, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.358823776245117, |
|
"learning_rate": 8.15e-06, |
|
"loss": 0.7778, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.348245620727539, |
|
"learning_rate": 8.65e-06, |
|
"loss": 0.7619, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.9023125767707825, |
|
"learning_rate": 9.15e-06, |
|
"loss": 0.7746, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.9158114790916443, |
|
"learning_rate": 9.65e-06, |
|
"loss": 0.7592, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.6256226301193237, |
|
"learning_rate": 1.0150000000000001e-05, |
|
"loss": 0.7787, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3805778920650482, |
|
"learning_rate": 1.065e-05, |
|
"loss": 0.7811, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.6968041062355042, |
|
"learning_rate": 1.115e-05, |
|
"loss": 0.73, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.9035410284996033, |
|
"learning_rate": 1.1650000000000002e-05, |
|
"loss": 0.7518, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.6727488040924072, |
|
"learning_rate": 1.215e-05, |
|
"loss": 0.7805, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.4153461158275604, |
|
"learning_rate": 1.2650000000000001e-05, |
|
"loss": 0.7706, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.4846651554107666, |
|
"learning_rate": 1.3150000000000001e-05, |
|
"loss": 0.7492, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.6631388664245605, |
|
"learning_rate": 1.3650000000000001e-05, |
|
"loss": 0.7681, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.6325013041496277, |
|
"learning_rate": 1.415e-05, |
|
"loss": 0.7753, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.877907395362854, |
|
"learning_rate": 1.465e-05, |
|
"loss": 0.7188, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.37142279744148254, |
|
"learning_rate": 1.515e-05, |
|
"loss": 0.7204, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.7337246537208557, |
|
"learning_rate": 1.565e-05, |
|
"loss": 0.778, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5847220420837402, |
|
"learning_rate": 1.6150000000000003e-05, |
|
"loss": 0.7288, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.745180606842041, |
|
"learning_rate": 1.665e-05, |
|
"loss": 0.7531, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.357301265001297, |
|
"learning_rate": 1.7150000000000004e-05, |
|
"loss": 0.7448, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.9032486081123352, |
|
"learning_rate": 1.765e-05, |
|
"loss": 0.7651, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.2864232361316681, |
|
"learning_rate": 1.815e-05, |
|
"loss": 0.7193, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.8560261726379395, |
|
"learning_rate": 1.865e-05, |
|
"loss": 0.7421, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.2971792221069336, |
|
"learning_rate": 1.915e-05, |
|
"loss": 0.7171, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.40850627422332764, |
|
"learning_rate": 1.9650000000000003e-05, |
|
"loss": 0.7459, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3934139311313629, |
|
"learning_rate": 2.0150000000000002e-05, |
|
"loss": 0.7205, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.8674131631851196, |
|
"learning_rate": 2.065e-05, |
|
"loss": 0.752, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.36707818508148193, |
|
"learning_rate": 2.115e-05, |
|
"loss": 0.7494, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.5044310092926025, |
|
"learning_rate": 2.165e-05, |
|
"loss": 0.7595, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.9025150537490845, |
|
"learning_rate": 2.215e-05, |
|
"loss": 0.7379, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.5004140138626099, |
|
"learning_rate": 2.265e-05, |
|
"loss": 0.7886, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.544482707977295, |
|
"learning_rate": 2.3150000000000004e-05, |
|
"loss": 0.7259, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.33023321628570557, |
|
"learning_rate": 2.365e-05, |
|
"loss": 0.7333, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3548080325126648, |
|
"learning_rate": 2.415e-05, |
|
"loss": 0.7527, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.42450150847435, |
|
"learning_rate": 2.465e-05, |
|
"loss": 0.7443, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"eval_loss": 0.7428915500640869, |
|
"eval_runtime": 68.3143, |
|
"eval_samples_per_second": 29.276, |
|
"eval_steps_per_second": 0.922, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3299656808376312, |
|
"learning_rate": 2.515e-05, |
|
"loss": 0.7447, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.27258285880088806, |
|
"learning_rate": 2.5650000000000003e-05, |
|
"loss": 0.7278, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.23753009736537933, |
|
"learning_rate": 2.6150000000000002e-05, |
|
"loss": 0.7721, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.1074901819229126, |
|
"learning_rate": 2.6650000000000004e-05, |
|
"loss": 0.7383, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.428067147731781, |
|
"learning_rate": 2.7150000000000003e-05, |
|
"loss": 0.7508, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.35956478118896484, |
|
"learning_rate": 2.7650000000000005e-05, |
|
"loss": 0.7461, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.23017314076423645, |
|
"learning_rate": 2.815e-05, |
|
"loss": 0.7463, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.20370900630950928, |
|
"learning_rate": 2.865e-05, |
|
"loss": 0.7405, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.24586541950702667, |
|
"learning_rate": 2.915e-05, |
|
"loss": 0.6964, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.27857884764671326, |
|
"learning_rate": 2.965e-05, |
|
"loss": 0.7376, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.2655833065509796, |
|
"learning_rate": 3.015e-05, |
|
"loss": 0.7496, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.17174670100212097, |
|
"learning_rate": 3.065e-05, |
|
"loss": 0.7219, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.22892743349075317, |
|
"learning_rate": 3.115e-05, |
|
"loss": 0.7111, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.5913789868354797, |
|
"learning_rate": 3.1650000000000004e-05, |
|
"loss": 0.7097, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.25620004534721375, |
|
"learning_rate": 3.215e-05, |
|
"loss": 0.7306, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3311476409435272, |
|
"learning_rate": 3.265e-05, |
|
"loss": 0.7406, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.4708893597126007, |
|
"learning_rate": 3.3150000000000006e-05, |
|
"loss": 0.7071, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.22469288110733032, |
|
"learning_rate": 3.3650000000000005e-05, |
|
"loss": 0.7116, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2909330725669861, |
|
"learning_rate": 3.415e-05, |
|
"loss": 0.7592, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.21274766325950623, |
|
"learning_rate": 3.465e-05, |
|
"loss": 0.7144, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.23929230868816376, |
|
"learning_rate": 3.515e-05, |
|
"loss": 0.7382, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3021218180656433, |
|
"learning_rate": 3.565e-05, |
|
"loss": 0.7287, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.23557321727275848, |
|
"learning_rate": 3.615e-05, |
|
"loss": 0.7475, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.22648084163665771, |
|
"learning_rate": 3.665e-05, |
|
"loss": 0.7457, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.2707761824131012, |
|
"learning_rate": 3.715e-05, |
|
"loss": 0.7322, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.2799661457538605, |
|
"learning_rate": 3.765e-05, |
|
"loss": 0.7682, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.326861172914505, |
|
"learning_rate": 3.8150000000000006e-05, |
|
"loss": 0.73, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.31985723972320557, |
|
"learning_rate": 3.8650000000000004e-05, |
|
"loss": 0.744, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.21637533605098724, |
|
"learning_rate": 3.915e-05, |
|
"loss": 0.7161, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.26102888584136963, |
|
"learning_rate": 3.965e-05, |
|
"loss": 0.7553, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.2469174712896347, |
|
"learning_rate": 4.015000000000001e-05, |
|
"loss": 0.7444, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.2022310495376587, |
|
"learning_rate": 4.065e-05, |
|
"loss": 0.7089, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.1729898750782013, |
|
"learning_rate": 4.115e-05, |
|
"loss": 0.7122, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.22450707852840424, |
|
"learning_rate": 4.165e-05, |
|
"loss": 0.7099, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.22389698028564453, |
|
"learning_rate": 4.215e-05, |
|
"loss": 0.7445, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.24962696433067322, |
|
"learning_rate": 4.265e-05, |
|
"loss": 0.7052, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.23780404031276703, |
|
"learning_rate": 4.315e-05, |
|
"loss": 0.7423, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.2434270679950714, |
|
"learning_rate": 4.3650000000000004e-05, |
|
"loss": 0.7291, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3332350552082062, |
|
"learning_rate": 4.415e-05, |
|
"loss": 0.7306, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5084495544433594, |
|
"learning_rate": 4.465e-05, |
|
"loss": 0.735, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.27552103996276855, |
|
"learning_rate": 4.5150000000000006e-05, |
|
"loss": 0.7082, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.2184416800737381, |
|
"learning_rate": 4.5650000000000005e-05, |
|
"loss": 0.7066, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.21738290786743164, |
|
"learning_rate": 4.6150000000000004e-05, |
|
"loss": 0.6977, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.21888123452663422, |
|
"learning_rate": 4.665e-05, |
|
"loss": 0.7083, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.2516074776649475, |
|
"learning_rate": 4.715e-05, |
|
"loss": 0.7451, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.39361557364463806, |
|
"learning_rate": 4.765e-05, |
|
"loss": 0.7013, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.2461744099855423, |
|
"learning_rate": 4.815e-05, |
|
"loss": 0.7325, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.2554089426994324, |
|
"learning_rate": 4.8650000000000003e-05, |
|
"loss": 0.7384, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.2717374861240387, |
|
"learning_rate": 4.915e-05, |
|
"loss": 0.7172, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.35919347405433655, |
|
"learning_rate": 4.965e-05, |
|
"loss": 0.6851, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"eval_loss": 0.7169972658157349, |
|
"eval_runtime": 67.4497, |
|
"eval_samples_per_second": 29.652, |
|
"eval_steps_per_second": 0.934, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.19895273447036743, |
|
"learning_rate": 4.999999713608037e-05, |
|
"loss": 0.7104, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.21741873025894165, |
|
"learning_rate": 4.999994622197174e-05, |
|
"loss": 0.7217, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.3415576219558716, |
|
"learning_rate": 4.999983166535371e-05, |
|
"loss": 0.716, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.22503221035003662, |
|
"learning_rate": 4.99996534665179e-05, |
|
"loss": 0.7166, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.22469323873519897, |
|
"learning_rate": 4.999941162591795e-05, |
|
"loss": 0.7173, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.24848666787147522, |
|
"learning_rate": 4.999910614416952e-05, |
|
"loss": 0.7442, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.23361551761627197, |
|
"learning_rate": 4.999873702205027e-05, |
|
"loss": 0.7142, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.2513304054737091, |
|
"learning_rate": 4.999830426049987e-05, |
|
"loss": 0.6998, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.2846020758152008, |
|
"learning_rate": 4.999780786062003e-05, |
|
"loss": 0.6967, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2192545235157013, |
|
"learning_rate": 4.999724782367441e-05, |
|
"loss": 0.7259, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.37123605608940125, |
|
"learning_rate": 4.999662415108872e-05, |
|
"loss": 0.7228, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.3963713049888611, |
|
"learning_rate": 4.999593684445063e-05, |
|
"loss": 0.6848, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.24653227627277374, |
|
"learning_rate": 4.9995185905509836e-05, |
|
"loss": 0.7181, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.25218069553375244, |
|
"learning_rate": 4.999437133617799e-05, |
|
"loss": 0.7263, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.20412948727607727, |
|
"learning_rate": 4.9993493138528765e-05, |
|
"loss": 0.6965, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.21635311841964722, |
|
"learning_rate": 4.9992551314797775e-05, |
|
"loss": 0.7042, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.21901027858257294, |
|
"learning_rate": 4.999154586738264e-05, |
|
"loss": 0.6967, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.24104245007038116, |
|
"learning_rate": 4.9990476798842935e-05, |
|
"loss": 0.7009, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.2433982491493225, |
|
"learning_rate": 4.998934411190018e-05, |
|
"loss": 0.7052, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.5261262655258179, |
|
"learning_rate": 4.9988147809437876e-05, |
|
"loss": 0.7103, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.36266064643859863, |
|
"learning_rate": 4.998688789450146e-05, |
|
"loss": 0.7028, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.2509474754333496, |
|
"learning_rate": 4.9985564370298274e-05, |
|
"loss": 0.7047, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.1925598382949829, |
|
"learning_rate": 4.9984177240197665e-05, |
|
"loss": 0.6943, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.22494031488895416, |
|
"learning_rate": 4.998272650773083e-05, |
|
"loss": 0.6953, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.24152888357639313, |
|
"learning_rate": 4.998121217659092e-05, |
|
"loss": 0.7181, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.24296504259109497, |
|
"learning_rate": 4.997963425063297e-05, |
|
"loss": 0.6934, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2438274323940277, |
|
"learning_rate": 4.9977992733873906e-05, |
|
"loss": 0.7088, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.21081580221652985, |
|
"learning_rate": 4.997628763049257e-05, |
|
"loss": 0.6833, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2111068218946457, |
|
"learning_rate": 4.9974518944829626e-05, |
|
"loss": 0.6922, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.309772253036499, |
|
"learning_rate": 4.997268668138766e-05, |
|
"loss": 0.7195, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.2199670672416687, |
|
"learning_rate": 4.997079084483105e-05, |
|
"loss": 0.6924, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.21781674027442932, |
|
"learning_rate": 4.996883143998605e-05, |
|
"loss": 0.6873, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.2969052791595459, |
|
"learning_rate": 4.996680847184072e-05, |
|
"loss": 0.7135, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.23092682659626007, |
|
"learning_rate": 4.996472194554495e-05, |
|
"loss": 0.6827, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.29615628719329834, |
|
"learning_rate": 4.996257186641042e-05, |
|
"loss": 0.6738, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.23491479456424713, |
|
"learning_rate": 4.99603582399106e-05, |
|
"loss": 0.7396, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1986425668001175, |
|
"learning_rate": 4.9958081071680726e-05, |
|
"loss": 0.6787, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.19658410549163818, |
|
"learning_rate": 4.99557403675178e-05, |
|
"loss": 0.66, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.29404616355895996, |
|
"learning_rate": 4.995333613338057e-05, |
|
"loss": 0.6824, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.23689547181129456, |
|
"learning_rate": 4.9950868375389514e-05, |
|
"loss": 0.7008, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.22758638858795166, |
|
"learning_rate": 4.99483370998268e-05, |
|
"loss": 0.6946, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.20259805023670197, |
|
"learning_rate": 4.994574231313634e-05, |
|
"loss": 0.6734, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.21935436129570007, |
|
"learning_rate": 4.994308402192366e-05, |
|
"loss": 0.7094, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.23414015769958496, |
|
"learning_rate": 4.9940362232956026e-05, |
|
"loss": 0.7019, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.2623121440410614, |
|
"learning_rate": 4.993757695316228e-05, |
|
"loss": 0.6793, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.31137487292289734, |
|
"learning_rate": 4.993472818963295e-05, |
|
"loss": 0.6902, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.22092680633068085, |
|
"learning_rate": 4.993181594962013e-05, |
|
"loss": 0.6912, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.23723623156547546, |
|
"learning_rate": 4.992884024053754e-05, |
|
"loss": 0.652, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.26028233766555786, |
|
"learning_rate": 4.9925801069960454e-05, |
|
"loss": 0.6969, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.23856200277805328, |
|
"learning_rate": 4.992269844562572e-05, |
|
"loss": 0.6723, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"eval_loss": 0.6912096738815308, |
|
"eval_runtime": 67.244, |
|
"eval_samples_per_second": 29.742, |
|
"eval_steps_per_second": 0.937, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.167326420545578, |
|
"learning_rate": 4.9919532375431677e-05, |
|
"loss": 0.6897, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.26183101534843445, |
|
"learning_rate": 4.991630286743823e-05, |
|
"loss": 0.6844, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.25918036699295044, |
|
"learning_rate": 4.991300992986676e-05, |
|
"loss": 0.6645, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.2354690134525299, |
|
"learning_rate": 4.99096535711001e-05, |
|
"loss": 0.6672, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.24444998800754547, |
|
"learning_rate": 4.990623379968257e-05, |
|
"loss": 0.6749, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.2562585771083832, |
|
"learning_rate": 4.990275062431989e-05, |
|
"loss": 0.6652, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.22618699073791504, |
|
"learning_rate": 4.98992040538792e-05, |
|
"loss": 0.7163, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.20957091450691223, |
|
"learning_rate": 4.9895594097389044e-05, |
|
"loss": 0.7053, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.26973310112953186, |
|
"learning_rate": 4.989192076403928e-05, |
|
"loss": 0.6716, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.24841250479221344, |
|
"learning_rate": 4.9888184063181154e-05, |
|
"loss": 0.6884, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.25283706188201904, |
|
"learning_rate": 4.98843840043272e-05, |
|
"loss": 0.6632, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.196313738822937, |
|
"learning_rate": 4.988052059715126e-05, |
|
"loss": 0.712, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.21621187031269073, |
|
"learning_rate": 4.987659385148842e-05, |
|
"loss": 0.6732, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2180820256471634, |
|
"learning_rate": 4.987260377733502e-05, |
|
"loss": 0.7045, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.20110243558883667, |
|
"learning_rate": 4.986855038484862e-05, |
|
"loss": 0.6604, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.20813095569610596, |
|
"learning_rate": 4.9864433684347964e-05, |
|
"loss": 0.7051, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.22026021778583527, |
|
"learning_rate": 4.9860253686312964e-05, |
|
"loss": 0.6752, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.23409809172153473, |
|
"learning_rate": 4.9856010401384654e-05, |
|
"loss": 0.6805, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.1982721984386444, |
|
"learning_rate": 4.985170384036521e-05, |
|
"loss": 0.6888, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.24984286725521088, |
|
"learning_rate": 4.984733401421785e-05, |
|
"loss": 0.6741, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.24465042352676392, |
|
"learning_rate": 4.9842900934066874e-05, |
|
"loss": 0.6781, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.26656293869018555, |
|
"learning_rate": 4.98384046111976e-05, |
|
"loss": 0.7086, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.23073184490203857, |
|
"learning_rate": 4.9833845057056336e-05, |
|
"loss": 0.6966, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.23296333849430084, |
|
"learning_rate": 4.982922228325037e-05, |
|
"loss": 0.6769, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.22282017767429352, |
|
"learning_rate": 4.982453630154794e-05, |
|
"loss": 0.6581, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.22943562269210815, |
|
"learning_rate": 4.981978712387815e-05, |
|
"loss": 0.6803, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.29295608401298523, |
|
"learning_rate": 4.9814974762331034e-05, |
|
"loss": 0.6514, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.21845972537994385, |
|
"learning_rate": 4.981009922915743e-05, |
|
"loss": 0.691, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.24616730213165283, |
|
"learning_rate": 4.980516053676903e-05, |
|
"loss": 0.6948, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.2354935109615326, |
|
"learning_rate": 4.9800158697738264e-05, |
|
"loss": 0.6778, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.3475668430328369, |
|
"learning_rate": 4.979509372479837e-05, |
|
"loss": 0.6626, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2817700207233429, |
|
"learning_rate": 4.9789965630843265e-05, |
|
"loss": 0.6724, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.28306934237480164, |
|
"learning_rate": 4.978477442892758e-05, |
|
"loss": 0.6673, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2593139111995697, |
|
"learning_rate": 4.9779520132266575e-05, |
|
"loss": 0.6866, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.2618165612220764, |
|
"learning_rate": 4.9774202754236145e-05, |
|
"loss": 0.6671, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.22875259816646576, |
|
"learning_rate": 4.9768822308372784e-05, |
|
"loss": 0.6743, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.20143887400627136, |
|
"learning_rate": 4.976337880837351e-05, |
|
"loss": 0.6866, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.23163306713104248, |
|
"learning_rate": 4.975787226809587e-05, |
|
"loss": 0.6772, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.21628862619400024, |
|
"learning_rate": 4.975230270155791e-05, |
|
"loss": 0.6706, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.21055322885513306, |
|
"learning_rate": 4.9746670122938105e-05, |
|
"loss": 0.6536, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.20045976340770721, |
|
"learning_rate": 4.974097454657534e-05, |
|
"loss": 0.6477, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.20996223390102386, |
|
"learning_rate": 4.9735215986968874e-05, |
|
"loss": 0.6526, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1778746396303177, |
|
"learning_rate": 4.972939445877831e-05, |
|
"loss": 0.6951, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.2416403591632843, |
|
"learning_rate": 4.972350997682354e-05, |
|
"loss": 0.6703, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.23652783036231995, |
|
"learning_rate": 4.9717562556084735e-05, |
|
"loss": 0.6401, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.23598232865333557, |
|
"learning_rate": 4.9711552211702274e-05, |
|
"loss": 0.6668, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.22741574048995972, |
|
"learning_rate": 4.970547895897672e-05, |
|
"loss": 0.6765, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.20418991148471832, |
|
"learning_rate": 4.96993428133688e-05, |
|
"loss": 0.6771, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.24983160197734833, |
|
"learning_rate": 4.969314379049932e-05, |
|
"loss": 0.6656, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.2511340379714966, |
|
"learning_rate": 4.968688190614919e-05, |
|
"loss": 0.6605, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"eval_loss": 0.6730201244354248, |
|
"eval_runtime": 67.2807, |
|
"eval_samples_per_second": 29.726, |
|
"eval_steps_per_second": 0.936, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.22646427154541016, |
|
"learning_rate": 4.96805571762593e-05, |
|
"loss": 0.6729, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.2343759387731552, |
|
"learning_rate": 4.9674169616930574e-05, |
|
"loss": 0.6635, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.23573775589466095, |
|
"learning_rate": 4.966771924442385e-05, |
|
"loss": 0.654, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.2696162760257721, |
|
"learning_rate": 4.966120607515987e-05, |
|
"loss": 0.6728, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.25032511353492737, |
|
"learning_rate": 4.965463012571927e-05, |
|
"loss": 0.6743, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.24621817469596863, |
|
"learning_rate": 4.964799141284247e-05, |
|
"loss": 0.6596, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.23296469449996948, |
|
"learning_rate": 4.964128995342966e-05, |
|
"loss": 0.6471, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.22769325971603394, |
|
"learning_rate": 4.963452576454082e-05, |
|
"loss": 0.6499, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.2572191655635834, |
|
"learning_rate": 4.9627698863395564e-05, |
|
"loss": 0.6499, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.21782545745372772, |
|
"learning_rate": 4.962080926737319e-05, |
|
"loss": 0.6641, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.22925855219364166, |
|
"learning_rate": 4.9613856994012567e-05, |
|
"loss": 0.6618, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.2127605825662613, |
|
"learning_rate": 4.960684206101214e-05, |
|
"loss": 0.6821, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.2520267069339752, |
|
"learning_rate": 4.9599764486229865e-05, |
|
"loss": 0.6748, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.20282019674777985, |
|
"learning_rate": 4.9592624287683176e-05, |
|
"loss": 0.6674, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.46226009726524353, |
|
"learning_rate": 4.958542148354891e-05, |
|
"loss": 0.6422, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.2241724133491516, |
|
"learning_rate": 4.957815609216329e-05, |
|
"loss": 0.6612, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.23772084712982178, |
|
"learning_rate": 4.957082813202186e-05, |
|
"loss": 0.6614, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.2049807757139206, |
|
"learning_rate": 4.9563437621779465e-05, |
|
"loss": 0.6827, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.22666208446025848, |
|
"learning_rate": 4.955598458025015e-05, |
|
"loss": 0.6522, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.26957106590270996, |
|
"learning_rate": 4.954846902640718e-05, |
|
"loss": 0.6817, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.22548554837703705, |
|
"learning_rate": 4.954089097938294e-05, |
|
"loss": 0.6444, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.22197945415973663, |
|
"learning_rate": 4.9533250458468914e-05, |
|
"loss": 0.6779, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.22711919248104095, |
|
"learning_rate": 4.9525547483115617e-05, |
|
"loss": 0.6416, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.22576060891151428, |
|
"learning_rate": 4.951778207293255e-05, |
|
"loss": 0.6598, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.22785042226314545, |
|
"learning_rate": 4.950995424768818e-05, |
|
"loss": 0.6499, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.20107567310333252, |
|
"learning_rate": 4.9502064027309836e-05, |
|
"loss": 0.6509, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.23379752039909363, |
|
"learning_rate": 4.94941114318837e-05, |
|
"loss": 0.6551, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.22154928743839264, |
|
"learning_rate": 4.948609648165475e-05, |
|
"loss": 0.6428, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.23805582523345947, |
|
"learning_rate": 4.947801919702667e-05, |
|
"loss": 0.6633, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.24756357073783875, |
|
"learning_rate": 4.946987959856188e-05, |
|
"loss": 0.6561, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.23767632246017456, |
|
"learning_rate": 4.9461677706981374e-05, |
|
"loss": 0.6552, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.32177987694740295, |
|
"learning_rate": 4.9453413543164775e-05, |
|
"loss": 0.6705, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.29961881041526794, |
|
"learning_rate": 4.94450871281502e-05, |
|
"loss": 0.6551, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.2768830955028534, |
|
"learning_rate": 4.943669848313427e-05, |
|
"loss": 0.6468, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.24728989601135254, |
|
"learning_rate": 4.9428247629472e-05, |
|
"loss": 0.6506, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.2641989290714264, |
|
"learning_rate": 4.941973458867677e-05, |
|
"loss": 0.6517, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.18787774443626404, |
|
"learning_rate": 4.941115938242028e-05, |
|
"loss": 0.6355, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.26642099022865295, |
|
"learning_rate": 4.940252203253248e-05, |
|
"loss": 0.6811, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.24736198782920837, |
|
"learning_rate": 4.939382256100154e-05, |
|
"loss": 0.6493, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.3060832619667053, |
|
"learning_rate": 4.938506098997374e-05, |
|
"loss": 0.6707, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.24164025485515594, |
|
"learning_rate": 4.937623734175346e-05, |
|
"loss": 0.6508, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.26685982942581177, |
|
"learning_rate": 4.936735163880313e-05, |
|
"loss": 0.6441, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.17739726603031158, |
|
"learning_rate": 4.9358403903743124e-05, |
|
"loss": 0.6499, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.23560577630996704, |
|
"learning_rate": 4.9349394159351735e-05, |
|
"loss": 0.645, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.23137669265270233, |
|
"learning_rate": 4.9340322428565135e-05, |
|
"loss": 0.6615, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.3485463857650757, |
|
"learning_rate": 4.933118873447728e-05, |
|
"loss": 0.6283, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.20602945983409882, |
|
"learning_rate": 4.932199310033987e-05, |
|
"loss": 0.6831, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.21541930735111237, |
|
"learning_rate": 4.931273554956227e-05, |
|
"loss": 0.6722, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.2092689573764801, |
|
"learning_rate": 4.930341610571151e-05, |
|
"loss": 0.6609, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.23792122304439545, |
|
"learning_rate": 4.9294034792512126e-05, |
|
"loss": 0.6475, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_loss": 0.6643247008323669, |
|
"eval_runtime": 67.0911, |
|
"eval_samples_per_second": 29.81, |
|
"eval_steps_per_second": 0.939, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.2101823091506958, |
|
"learning_rate": 4.928459163384619e-05, |
|
"loss": 0.6604, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.1859540194272995, |
|
"learning_rate": 4.927508665375321e-05, |
|
"loss": 0.6252, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.2371070235967636, |
|
"learning_rate": 4.926551987643007e-05, |
|
"loss": 0.6413, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.2149161994457245, |
|
"learning_rate": 4.9255891326230964e-05, |
|
"loss": 0.6558, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.21805475652217865, |
|
"learning_rate": 4.9246201027667354e-05, |
|
"loss": 0.631, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.22705689072608948, |
|
"learning_rate": 4.9236449005407895e-05, |
|
"loss": 0.6785, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.21902668476104736, |
|
"learning_rate": 4.9226635284278355e-05, |
|
"loss": 0.657, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.21482336521148682, |
|
"learning_rate": 4.9216759889261586e-05, |
|
"loss": 0.6514, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.20008622109889984, |
|
"learning_rate": 4.9206822845497444e-05, |
|
"loss": 0.6516, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.205527663230896, |
|
"learning_rate": 4.919682417828271e-05, |
|
"loss": 0.6336, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.263959139585495, |
|
"learning_rate": 4.9186763913071065e-05, |
|
"loss": 0.6449, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.21950668096542358, |
|
"learning_rate": 4.917664207547297e-05, |
|
"loss": 0.6916, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.215659499168396, |
|
"learning_rate": 4.916645869125564e-05, |
|
"loss": 0.6441, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.22789780795574188, |
|
"learning_rate": 4.915621378634301e-05, |
|
"loss": 0.6576, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.27431604266166687, |
|
"learning_rate": 4.914590738681555e-05, |
|
"loss": 0.6487, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.21077977120876312, |
|
"learning_rate": 4.913553951891036e-05, |
|
"loss": 0.6688, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.2798519730567932, |
|
"learning_rate": 4.9125110209020954e-05, |
|
"loss": 0.6618, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.24790987372398376, |
|
"learning_rate": 4.911461948369731e-05, |
|
"loss": 0.6598, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.2008027732372284, |
|
"learning_rate": 4.91040673696457e-05, |
|
"loss": 0.6388, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.23889446258544922, |
|
"learning_rate": 4.9093453893728733e-05, |
|
"loss": 0.6526, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1997964233160019, |
|
"learning_rate": 4.908277908296518e-05, |
|
"loss": 0.6521, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.24335375428199768, |
|
"learning_rate": 4.907204296452997e-05, |
|
"loss": 0.6636, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.23431982100009918, |
|
"learning_rate": 4.906124556575411e-05, |
|
"loss": 0.6557, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.21532365679740906, |
|
"learning_rate": 4.90503869141246e-05, |
|
"loss": 0.628, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.22962962090969086, |
|
"learning_rate": 4.903946703728436e-05, |
|
"loss": 0.6422, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.22807002067565918, |
|
"learning_rate": 4.90284859630322e-05, |
|
"loss": 0.6631, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.27668192982673645, |
|
"learning_rate": 4.90174437193227e-05, |
|
"loss": 0.6778, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.2503020763397217, |
|
"learning_rate": 4.900634033426616e-05, |
|
"loss": 0.6483, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.3033277094364166, |
|
"learning_rate": 4.8995175836128536e-05, |
|
"loss": 0.6572, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.2776722311973572, |
|
"learning_rate": 4.898395025333136e-05, |
|
"loss": 0.662, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.31251537799835205, |
|
"learning_rate": 4.897266361445165e-05, |
|
"loss": 0.6617, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.38570669293403625, |
|
"learning_rate": 4.8961315948221884e-05, |
|
"loss": 0.6419, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.23277431726455688, |
|
"learning_rate": 4.894990728352988e-05, |
|
"loss": 0.6373, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.2519891858100891, |
|
"learning_rate": 4.893843764941874e-05, |
|
"loss": 0.6759, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.2358052134513855, |
|
"learning_rate": 4.892690707508677e-05, |
|
"loss": 0.6826, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.23103272914886475, |
|
"learning_rate": 4.8915315589887436e-05, |
|
"loss": 0.624, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.2128864973783493, |
|
"learning_rate": 4.8903663223329245e-05, |
|
"loss": 0.654, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.22060807049274445, |
|
"learning_rate": 4.889195000507568e-05, |
|
"loss": 0.6376, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.24493062496185303, |
|
"learning_rate": 4.888017596494517e-05, |
|
"loss": 0.6648, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.2580796480178833, |
|
"learning_rate": 4.886834113291094e-05, |
|
"loss": 0.6607, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.26906147599220276, |
|
"learning_rate": 4.8856445539101016e-05, |
|
"loss": 0.6441, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.2145451009273529, |
|
"learning_rate": 4.884448921379805e-05, |
|
"loss": 0.6543, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.2156788855791092, |
|
"learning_rate": 4.8832472187439345e-05, |
|
"loss": 0.6185, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.25632360577583313, |
|
"learning_rate": 4.882039449061673e-05, |
|
"loss": 0.639, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.2496800720691681, |
|
"learning_rate": 4.8808256154076436e-05, |
|
"loss": 0.6377, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.39942485094070435, |
|
"learning_rate": 4.8796057208719124e-05, |
|
"loss": 0.6083, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.27986374497413635, |
|
"learning_rate": 4.8783797685599706e-05, |
|
"loss": 0.6528, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.22164517641067505, |
|
"learning_rate": 4.877147761592733e-05, |
|
"loss": 0.6456, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.29858505725860596, |
|
"learning_rate": 4.875909703106527e-05, |
|
"loss": 0.643, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.22595979273319244, |
|
"learning_rate": 4.874665596253084e-05, |
|
"loss": 0.6419, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"eval_loss": 0.6583871841430664, |
|
"eval_runtime": 67.1961, |
|
"eval_samples_per_second": 29.764, |
|
"eval_steps_per_second": 0.938, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.19871610403060913, |
|
"learning_rate": 4.8734154441995364e-05, |
|
"loss": 0.6556, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2072724997997284, |
|
"learning_rate": 4.872159250128401e-05, |
|
"loss": 0.6764, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.22515198588371277, |
|
"learning_rate": 4.87089701723758e-05, |
|
"loss": 0.6703, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2292303442955017, |
|
"learning_rate": 4.869628748740347e-05, |
|
"loss": 0.6288, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.25133493542671204, |
|
"learning_rate": 4.8683544478653395e-05, |
|
"loss": 0.6652, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.29863354563713074, |
|
"learning_rate": 4.867074117856555e-05, |
|
"loss": 0.6352, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.2168973684310913, |
|
"learning_rate": 4.865787761973334e-05, |
|
"loss": 0.6562, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.2312694936990738, |
|
"learning_rate": 4.864495383490363e-05, |
|
"loss": 0.6463, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.2333671748638153, |
|
"learning_rate": 4.863196985697655e-05, |
|
"loss": 0.6267, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.23559433221817017, |
|
"learning_rate": 4.86189257190055e-05, |
|
"loss": 0.623, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.22314316034317017, |
|
"learning_rate": 4.860582145419703e-05, |
|
"loss": 0.6298, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.22701682150363922, |
|
"learning_rate": 4.859265709591073e-05, |
|
"loss": 0.6359, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.21683742105960846, |
|
"learning_rate": 4.857943267765919e-05, |
|
"loss": 0.6666, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.2519407272338867, |
|
"learning_rate": 4.856614823310788e-05, |
|
"loss": 0.6731, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.23536355793476105, |
|
"learning_rate": 4.855280379607509e-05, |
|
"loss": 0.629, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.19676193594932556, |
|
"learning_rate": 4.853939940053184e-05, |
|
"loss": 0.6329, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.23497731983661652, |
|
"learning_rate": 4.852593508060177e-05, |
|
"loss": 0.6584, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.2515724003314972, |
|
"learning_rate": 4.8512410870561084e-05, |
|
"loss": 0.6177, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.20130635797977448, |
|
"learning_rate": 4.8498826804838436e-05, |
|
"loss": 0.6645, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.23828361928462982, |
|
"learning_rate": 4.8485182918014876e-05, |
|
"loss": 0.6401, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.2598424255847931, |
|
"learning_rate": 4.847147924482371e-05, |
|
"loss": 0.6193, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.20697768032550812, |
|
"learning_rate": 4.845771582015046e-05, |
|
"loss": 0.626, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.19661648571491241, |
|
"learning_rate": 4.8443892679032775e-05, |
|
"loss": 0.6283, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.22154510021209717, |
|
"learning_rate": 4.843000985666028e-05, |
|
"loss": 0.6571, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.3334987759590149, |
|
"learning_rate": 4.841606738837458e-05, |
|
"loss": 0.6553, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.6604078412055969, |
|
"learning_rate": 4.8402065309669085e-05, |
|
"loss": 0.6239, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.22238047420978546, |
|
"learning_rate": 4.838800365618898e-05, |
|
"loss": 0.6725, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.21566419303417206, |
|
"learning_rate": 4.837388246373108e-05, |
|
"loss": 0.6597, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9337890148162842, |
|
"learning_rate": 4.83597017682438e-05, |
|
"loss": 0.6454, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.2716673016548157, |
|
"learning_rate": 4.8345461605827014e-05, |
|
"loss": 0.6526, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.23388756811618805, |
|
"learning_rate": 4.8331162012732e-05, |
|
"loss": 0.6642, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.21243353188037872, |
|
"learning_rate": 4.8316803025361304e-05, |
|
"loss": 0.6233, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.28600814938545227, |
|
"learning_rate": 4.8302384680268684e-05, |
|
"loss": 0.6559, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.19718654453754425, |
|
"learning_rate": 4.8287907014159004e-05, |
|
"loss": 0.6357, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2330627292394638, |
|
"learning_rate": 4.827337006388816e-05, |
|
"loss": 0.6692, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.18776822090148926, |
|
"learning_rate": 4.8258773866462927e-05, |
|
"loss": 0.6804, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.24929189682006836, |
|
"learning_rate": 4.8244118459040944e-05, |
|
"loss": 0.6455, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.23884710669517517, |
|
"learning_rate": 4.8229403878930566e-05, |
|
"loss": 0.6345, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.2579725682735443, |
|
"learning_rate": 4.821463016359078e-05, |
|
"loss": 0.6584, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.2741701900959015, |
|
"learning_rate": 4.8199797350631136e-05, |
|
"loss": 0.6463, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.2254386991262436, |
|
"learning_rate": 4.818490547781159e-05, |
|
"loss": 0.6399, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2172977328300476, |
|
"learning_rate": 4.816995458304249e-05, |
|
"loss": 0.661, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.23464016616344452, |
|
"learning_rate": 4.815494470438441e-05, |
|
"loss": 0.6516, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.27163979411125183, |
|
"learning_rate": 4.813987588004807e-05, |
|
"loss": 0.6451, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.20852161943912506, |
|
"learning_rate": 4.8124748148394285e-05, |
|
"loss": 0.6394, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2224467694759369, |
|
"learning_rate": 4.81095615479338e-05, |
|
"loss": 0.6427, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2205616980791092, |
|
"learning_rate": 4.8094316117327245e-05, |
|
"loss": 0.6491, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.21984492242336273, |
|
"learning_rate": 4.8079011895384985e-05, |
|
"loss": 0.6271, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.29770565032958984, |
|
"learning_rate": 4.806364892106707e-05, |
|
"loss": 0.6048, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.2440354973077774, |
|
"learning_rate": 4.8048227233483127e-05, |
|
"loss": 0.6307, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"eval_loss": 0.6532164216041565, |
|
"eval_runtime": 67.3224, |
|
"eval_samples_per_second": 29.708, |
|
"eval_steps_per_second": 0.936, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.25720447301864624, |
|
"learning_rate": 4.803274687189222e-05, |
|
"loss": 0.6337, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.21051880717277527, |
|
"learning_rate": 4.8017207875702814e-05, |
|
"loss": 0.6398, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.2977493107318878, |
|
"learning_rate": 4.800161028447261e-05, |
|
"loss": 0.6346, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.19749422371387482, |
|
"learning_rate": 4.798595413790848e-05, |
|
"loss": 0.6325, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.21522672474384308, |
|
"learning_rate": 4.7970239475866386e-05, |
|
"loss": 0.6244, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.24835069477558136, |
|
"learning_rate": 4.7954466338351224e-05, |
|
"loss": 0.612, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.25459495186805725, |
|
"learning_rate": 4.793863476551677e-05, |
|
"loss": 0.6317, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.28858670592308044, |
|
"learning_rate": 4.7922744797665544e-05, |
|
"loss": 0.6485, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.30466488003730774, |
|
"learning_rate": 4.790679647524873e-05, |
|
"loss": 0.6307, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.36087316274642944, |
|
"learning_rate": 4.789078983886607e-05, |
|
"loss": 0.6749, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.2436164766550064, |
|
"learning_rate": 4.787472492926575e-05, |
|
"loss": 0.6239, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.2527044415473938, |
|
"learning_rate": 4.78586017873443e-05, |
|
"loss": 0.6584, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.25492456555366516, |
|
"learning_rate": 4.784242045414651e-05, |
|
"loss": 0.6487, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.2293190211057663, |
|
"learning_rate": 4.782618097086528e-05, |
|
"loss": 0.646, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.7955949902534485, |
|
"learning_rate": 4.780988337884157e-05, |
|
"loss": 0.6391, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.2331811785697937, |
|
"learning_rate": 4.779352771956425e-05, |
|
"loss": 0.6267, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.23316140472888947, |
|
"learning_rate": 4.7777114034670006e-05, |
|
"loss": 0.6405, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.2253628969192505, |
|
"learning_rate": 4.776064236594327e-05, |
|
"loss": 0.6415, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.2378271222114563, |
|
"learning_rate": 4.774411275531606e-05, |
|
"loss": 0.6362, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.22514688968658447, |
|
"learning_rate": 4.7727525244867896e-05, |
|
"loss": 0.6375, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.20843249559402466, |
|
"learning_rate": 4.771087987682571e-05, |
|
"loss": 0.6166, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.2776215076446533, |
|
"learning_rate": 4.7694176693563705e-05, |
|
"loss": 0.6441, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.21339628100395203, |
|
"learning_rate": 4.767741573760327e-05, |
|
"loss": 0.6388, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.24429263174533844, |
|
"learning_rate": 4.766059705161288e-05, |
|
"loss": 0.6263, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2099929004907608, |
|
"learning_rate": 4.764372067840795e-05, |
|
"loss": 0.6383, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.22066868841648102, |
|
"learning_rate": 4.7626786660950784e-05, |
|
"loss": 0.6313, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.22953028976917267, |
|
"learning_rate": 4.760979504235038e-05, |
|
"loss": 0.6264, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.3549204170703888, |
|
"learning_rate": 4.759274586586242e-05, |
|
"loss": 0.6368, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.24681401252746582, |
|
"learning_rate": 4.757563917488909e-05, |
|
"loss": 0.6422, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.2417946755886078, |
|
"learning_rate": 4.755847501297898e-05, |
|
"loss": 0.6438, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.2778252959251404, |
|
"learning_rate": 4.7541253423827006e-05, |
|
"loss": 0.6312, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.20280030369758606, |
|
"learning_rate": 4.7523974451274275e-05, |
|
"loss": 0.6349, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.24433667957782745, |
|
"learning_rate": 4.7506638139307966e-05, |
|
"loss": 0.6648, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.3273336887359619, |
|
"learning_rate": 4.7489244532061225e-05, |
|
"loss": 0.6498, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.23943009972572327, |
|
"learning_rate": 4.747179367381307e-05, |
|
"loss": 0.6358, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.26670822501182556, |
|
"learning_rate": 4.745428560898824e-05, |
|
"loss": 0.6563, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.18882083892822266, |
|
"learning_rate": 4.7436720382157116e-05, |
|
"loss": 0.6358, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.23588411509990692, |
|
"learning_rate": 4.741909803803562e-05, |
|
"loss": 0.6396, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.24063749611377716, |
|
"learning_rate": 4.740141862148503e-05, |
|
"loss": 0.6412, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.19155845046043396, |
|
"learning_rate": 4.738368217751196e-05, |
|
"loss": 0.62, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.20259159803390503, |
|
"learning_rate": 4.736588875126816e-05, |
|
"loss": 0.6172, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.22824090719223022, |
|
"learning_rate": 4.734803838805048e-05, |
|
"loss": 0.6286, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.2356242537498474, |
|
"learning_rate": 4.7330131133300686e-05, |
|
"loss": 0.6121, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.22052697837352753, |
|
"learning_rate": 4.731216703260538e-05, |
|
"loss": 0.6402, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.2817922532558441, |
|
"learning_rate": 4.7294146131695874e-05, |
|
"loss": 0.6256, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.23476284742355347, |
|
"learning_rate": 4.7276068476448097e-05, |
|
"loss": 0.644, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.25303345918655396, |
|
"learning_rate": 4.725793411288242e-05, |
|
"loss": 0.649, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2615244686603546, |
|
"learning_rate": 4.723974308716361e-05, |
|
"loss": 0.6158, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2798680365085602, |
|
"learning_rate": 4.722149544560067e-05, |
|
"loss": 0.6149, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.19780561327934265, |
|
"learning_rate": 4.720319123464672e-05, |
|
"loss": 0.6167, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"eval_loss": 0.6494756937026978, |
|
"eval_runtime": 67.2363, |
|
"eval_samples_per_second": 29.746, |
|
"eval_steps_per_second": 0.937, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.22284342348575592, |
|
"learning_rate": 4.718483050089891e-05, |
|
"loss": 0.6282, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.242294043302536, |
|
"learning_rate": 4.7166413291098246e-05, |
|
"loss": 0.6418, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.19954660534858704, |
|
"learning_rate": 4.714793965212955e-05, |
|
"loss": 0.6293, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2239541858434677, |
|
"learning_rate": 4.712940963102126e-05, |
|
"loss": 0.6441, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2324070930480957, |
|
"learning_rate": 4.711082327494536e-05, |
|
"loss": 0.6355, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.27540045976638794, |
|
"learning_rate": 4.709218063121725e-05, |
|
"loss": 0.6476, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.37015092372894287, |
|
"learning_rate": 4.7073481747295614e-05, |
|
"loss": 0.6161, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.2678948938846588, |
|
"learning_rate": 4.7054726670782304e-05, |
|
"loss": 0.6404, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.2190527468919754, |
|
"learning_rate": 4.703591544942224e-05, |
|
"loss": 0.6619, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.23498305678367615, |
|
"learning_rate": 4.701704813110325e-05, |
|
"loss": 0.6297, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.2132258415222168, |
|
"learning_rate": 4.6998124763855984e-05, |
|
"loss": 0.6462, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2967916429042816, |
|
"learning_rate": 4.697914539585376e-05, |
|
"loss": 0.6347, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2503927946090698, |
|
"learning_rate": 4.6960110075412473e-05, |
|
"loss": 0.6264, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2261209934949875, |
|
"learning_rate": 4.694101885099045e-05, |
|
"loss": 0.6351, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2164410799741745, |
|
"learning_rate": 4.692187177118832e-05, |
|
"loss": 0.626, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.2934332489967346, |
|
"learning_rate": 4.690266888474893e-05, |
|
"loss": 0.6243, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.23883183300495148, |
|
"learning_rate": 4.688341024055718e-05, |
|
"loss": 0.624, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.1730484664440155, |
|
"learning_rate": 4.686409588763991e-05, |
|
"loss": 0.6473, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.27762770652770996, |
|
"learning_rate": 4.6844725875165775e-05, |
|
"loss": 0.6508, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.243989497423172, |
|
"learning_rate": 4.682530025244514e-05, |
|
"loss": 0.6305, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.21361881494522095, |
|
"learning_rate": 4.6805819068929925e-05, |
|
"loss": 0.5969, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.22126366198062897, |
|
"learning_rate": 4.678628237421348e-05, |
|
"loss": 0.6192, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.2569611370563507, |
|
"learning_rate": 4.6766690218030495e-05, |
|
"loss": 0.6235, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.26861709356307983, |
|
"learning_rate": 4.674704265025683e-05, |
|
"loss": 0.6413, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.19330435991287231, |
|
"learning_rate": 4.672733972090943e-05, |
|
"loss": 0.6413, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.25647568702697754, |
|
"learning_rate": 4.6707581480146136e-05, |
|
"loss": 0.6411, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.23879855871200562, |
|
"learning_rate": 4.6687767978265625e-05, |
|
"loss": 0.6188, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.25180870294570923, |
|
"learning_rate": 4.666789926570725e-05, |
|
"loss": 0.6388, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.26070743799209595, |
|
"learning_rate": 4.6647975393050904e-05, |
|
"loss": 0.6254, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.2934954762458801, |
|
"learning_rate": 4.662799641101691e-05, |
|
"loss": 0.6089, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.24827881157398224, |
|
"learning_rate": 4.6607962370465866e-05, |
|
"loss": 0.6233, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.7880712151527405, |
|
"learning_rate": 4.658787332239856e-05, |
|
"loss": 0.6305, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.23941326141357422, |
|
"learning_rate": 4.6567729317955796e-05, |
|
"loss": 0.6449, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.21050909161567688, |
|
"learning_rate": 4.654753040841829e-05, |
|
"loss": 0.6324, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.2372525930404663, |
|
"learning_rate": 4.6527276645206516e-05, |
|
"loss": 0.6098, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.23655132949352264, |
|
"learning_rate": 4.65069680798806e-05, |
|
"loss": 0.6457, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.26810187101364136, |
|
"learning_rate": 4.648660476414017e-05, |
|
"loss": 0.6327, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.2635047137737274, |
|
"learning_rate": 4.6466186749824235e-05, |
|
"loss": 0.6359, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.2955858111381531, |
|
"learning_rate": 4.6445714088911076e-05, |
|
"loss": 0.6412, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.2971380949020386, |
|
"learning_rate": 4.6425186833518054e-05, |
|
"loss": 0.61, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.22041000425815582, |
|
"learning_rate": 4.6404605035901505e-05, |
|
"loss": 0.6494, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.2463519424200058, |
|
"learning_rate": 4.638396874845666e-05, |
|
"loss": 0.6223, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.2519693374633789, |
|
"learning_rate": 4.636327802371742e-05, |
|
"loss": 0.6399, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.23022256791591644, |
|
"learning_rate": 4.6342532914356284e-05, |
|
"loss": 0.6106, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.21000300347805023, |
|
"learning_rate": 4.632173347318421e-05, |
|
"loss": 0.6463, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.2383776307106018, |
|
"learning_rate": 4.630087975315045e-05, |
|
"loss": 0.6202, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.2528972327709198, |
|
"learning_rate": 4.627997180734244e-05, |
|
"loss": 0.6401, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.20749187469482422, |
|
"learning_rate": 4.625900968898565e-05, |
|
"loss": 0.6541, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.2283320277929306, |
|
"learning_rate": 4.623799345144348e-05, |
|
"loss": 0.6311, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.21833132207393646, |
|
"learning_rate": 4.6216923148217096e-05, |
|
"loss": 0.6272, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"eval_loss": 0.6476519107818604, |
|
"eval_runtime": 67.279, |
|
"eval_samples_per_second": 29.727, |
|
"eval_steps_per_second": 0.936, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.2190130203962326, |
|
"learning_rate": 4.619579883294528e-05, |
|
"loss": 0.6405, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.32068029046058655, |
|
"learning_rate": 4.617462055940433e-05, |
|
"loss": 0.6101, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.2526598572731018, |
|
"learning_rate": 4.6153388381507886e-05, |
|
"loss": 0.6252, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.19768927991390228, |
|
"learning_rate": 4.613210235330686e-05, |
|
"loss": 0.6138, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.24341870844364166, |
|
"learning_rate": 4.611076252898919e-05, |
|
"loss": 0.6072, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.2283903807401657, |
|
"learning_rate": 4.60893689628798e-05, |
|
"loss": 0.6396, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.21138328313827515, |
|
"learning_rate": 4.606792170944041e-05, |
|
"loss": 0.6306, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.4939797818660736, |
|
"learning_rate": 4.604642082326944e-05, |
|
"loss": 0.6259, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.22861245274543762, |
|
"learning_rate": 4.60248663591018e-05, |
|
"loss": 0.6274, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.35914289951324463, |
|
"learning_rate": 4.6003258371808825e-05, |
|
"loss": 0.6357, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.2684130072593689, |
|
"learning_rate": 4.598159691639809e-05, |
|
"loss": 0.6299, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.2696840763092041, |
|
"learning_rate": 4.5959882048013294e-05, |
|
"loss": 0.6224, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.21357202529907227, |
|
"learning_rate": 4.59381138219341e-05, |
|
"loss": 0.6119, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.20233683288097382, |
|
"learning_rate": 4.591629229357601e-05, |
|
"loss": 0.5956, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.2905120849609375, |
|
"learning_rate": 4.5894417518490225e-05, |
|
"loss": 0.6106, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.2116033285856247, |
|
"learning_rate": 4.5872489552363475e-05, |
|
"loss": 0.6218, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.22969362139701843, |
|
"learning_rate": 4.585050845101791e-05, |
|
"loss": 0.6419, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.23150619864463806, |
|
"learning_rate": 4.582847427041097e-05, |
|
"loss": 0.6128, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.23322267830371857, |
|
"learning_rate": 4.580638706663517e-05, |
|
"loss": 0.6114, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.23113666474819183, |
|
"learning_rate": 4.578424689591805e-05, |
|
"loss": 0.6173, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.2632519602775574, |
|
"learning_rate": 4.5762053814621975e-05, |
|
"loss": 0.643, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.19099248945713043, |
|
"learning_rate": 4.573980787924399e-05, |
|
"loss": 0.6201, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.23567742109298706, |
|
"learning_rate": 4.5717509146415705e-05, |
|
"loss": 0.6163, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.21497173607349396, |
|
"learning_rate": 4.5695157672903144e-05, |
|
"loss": 0.6226, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.22423885762691498, |
|
"learning_rate": 4.567275351560658e-05, |
|
"loss": 0.6052, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.2486392855644226, |
|
"learning_rate": 4.5650296731560396e-05, |
|
"loss": 0.6102, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.19872063398361206, |
|
"learning_rate": 4.562778737793298e-05, |
|
"loss": 0.633, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.19403629004955292, |
|
"learning_rate": 4.560522551202651e-05, |
|
"loss": 0.6074, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.2101258486509323, |
|
"learning_rate": 4.558261119127686e-05, |
|
"loss": 0.6019, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.2554053068161011, |
|
"learning_rate": 4.555994447325344e-05, |
|
"loss": 0.6095, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.3209736943244934, |
|
"learning_rate": 4.5537225415659054e-05, |
|
"loss": 0.6095, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.1882716864347458, |
|
"learning_rate": 4.551445407632973e-05, |
|
"loss": 0.624, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.219781294465065, |
|
"learning_rate": 4.549163051323461e-05, |
|
"loss": 0.6212, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.1968742161989212, |
|
"learning_rate": 4.5468754784475764e-05, |
|
"loss": 0.6088, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.23859383165836334, |
|
"learning_rate": 4.5445826948288074e-05, |
|
"loss": 0.6429, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.21909688413143158, |
|
"learning_rate": 4.542284706303906e-05, |
|
"loss": 0.6163, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.23673193156719208, |
|
"learning_rate": 4.539981518722876e-05, |
|
"loss": 0.6261, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.23555058240890503, |
|
"learning_rate": 4.537673137948954e-05, |
|
"loss": 0.598, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.3052713871002197, |
|
"learning_rate": 4.5353595698586e-05, |
|
"loss": 0.6485, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.2587834298610687, |
|
"learning_rate": 4.533040820341477e-05, |
|
"loss": 0.6104, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.2710438370704651, |
|
"learning_rate": 4.53071689530044e-05, |
|
"loss": 0.6286, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.2709255516529083, |
|
"learning_rate": 4.528387800651517e-05, |
|
"loss": 0.6199, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.3711196780204773, |
|
"learning_rate": 4.5260535423239e-05, |
|
"loss": 0.6303, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.2359853833913803, |
|
"learning_rate": 4.523714126259923e-05, |
|
"loss": 0.6232, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.2610563039779663, |
|
"learning_rate": 4.5213695584150495e-05, |
|
"loss": 0.6266, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.23091551661491394, |
|
"learning_rate": 4.519019844757863e-05, |
|
"loss": 0.6264, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.20323573052883148, |
|
"learning_rate": 4.516664991270041e-05, |
|
"loss": 0.6137, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.2040981501340866, |
|
"learning_rate": 4.5143050039463476e-05, |
|
"loss": 0.6433, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.24421969056129456, |
|
"learning_rate": 4.511939888794617e-05, |
|
"loss": 0.6221, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.22230570018291473, |
|
"learning_rate": 4.5095696518357375e-05, |
|
"loss": 0.6002, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_loss": 0.6445114016532898, |
|
"eval_runtime": 67.1925, |
|
"eval_samples_per_second": 29.765, |
|
"eval_steps_per_second": 0.938, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.2543829381465912, |
|
"learning_rate": 4.5071942991036354e-05, |
|
"loss": 0.6598, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.29779505729675293, |
|
"learning_rate": 4.504813836645258e-05, |
|
"loss": 0.6196, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.2295481115579605, |
|
"learning_rate": 4.5024282705205635e-05, |
|
"loss": 0.6227, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.234407439827919, |
|
"learning_rate": 4.500037606802502e-05, |
|
"loss": 0.6363, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.2559088468551636, |
|
"learning_rate": 4.497641851577e-05, |
|
"loss": 0.6471, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.21824946999549866, |
|
"learning_rate": 4.495241010942945e-05, |
|
"loss": 0.6362, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.2934998571872711, |
|
"learning_rate": 4.4928350910121714e-05, |
|
"loss": 0.6285, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.2297659069299698, |
|
"learning_rate": 4.4904240979094436e-05, |
|
"loss": 0.6102, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.2708946466445923, |
|
"learning_rate": 4.488008037772441e-05, |
|
"loss": 0.6186, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.23141387104988098, |
|
"learning_rate": 4.485586916751743e-05, |
|
"loss": 0.6314, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.2494589388370514, |
|
"learning_rate": 4.483160741010809e-05, |
|
"loss": 0.6413, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.2377118617296219, |
|
"learning_rate": 4.480729516725971e-05, |
|
"loss": 0.617, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.2568589746952057, |
|
"learning_rate": 4.47829325008641e-05, |
|
"loss": 0.6143, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.23955343663692474, |
|
"learning_rate": 4.475851947294145e-05, |
|
"loss": 0.6257, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.2029058337211609, |
|
"learning_rate": 4.4734056145640135e-05, |
|
"loss": 0.6074, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.3426890969276428, |
|
"learning_rate": 4.470954258123661e-05, |
|
"loss": 0.6511, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.28450432419776917, |
|
"learning_rate": 4.4684978842135175e-05, |
|
"loss": 0.6227, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.2370108813047409, |
|
"learning_rate": 4.4660364990867895e-05, |
|
"loss": 0.6198, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.33506277203559875, |
|
"learning_rate": 4.463570109009441e-05, |
|
"loss": 0.6112, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.21070849895477295, |
|
"learning_rate": 4.461098720260173e-05, |
|
"loss": 0.606, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.3238009214401245, |
|
"learning_rate": 4.458622339130416e-05, |
|
"loss": 0.6413, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.23879234492778778, |
|
"learning_rate": 4.456140971924309e-05, |
|
"loss": 0.6229, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.24163304269313812, |
|
"learning_rate": 4.45365462495868e-05, |
|
"loss": 0.6268, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.22378313541412354, |
|
"learning_rate": 4.451163304563038e-05, |
|
"loss": 0.6218, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.21948544681072235, |
|
"learning_rate": 4.448667017079554e-05, |
|
"loss": 0.5945, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.2834113836288452, |
|
"learning_rate": 4.44616576886304e-05, |
|
"loss": 0.6449, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.2503283619880676, |
|
"learning_rate": 4.443659566280938e-05, |
|
"loss": 0.6243, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.35525229573249817, |
|
"learning_rate": 4.441148415713303e-05, |
|
"loss": 0.6283, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.2495538890361786, |
|
"learning_rate": 4.438632323552786e-05, |
|
"loss": 0.5959, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.24201400578022003, |
|
"learning_rate": 4.436111296204617e-05, |
|
"loss": 0.6242, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.24746832251548767, |
|
"learning_rate": 4.4335853400865906e-05, |
|
"loss": 0.5989, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.2049688845872879, |
|
"learning_rate": 4.4310544616290475e-05, |
|
"loss": 0.6375, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.2205064594745636, |
|
"learning_rate": 4.42851866727486e-05, |
|
"loss": 0.6253, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.2159041166305542, |
|
"learning_rate": 4.425977963479415e-05, |
|
"loss": 0.6027, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.2615651488304138, |
|
"learning_rate": 4.423432356710597e-05, |
|
"loss": 0.6134, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.23875150084495544, |
|
"learning_rate": 4.420881853448771e-05, |
|
"loss": 0.5926, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.25981611013412476, |
|
"learning_rate": 4.418326460186769e-05, |
|
"loss": 0.623, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.5433134436607361, |
|
"learning_rate": 4.415766183429871e-05, |
|
"loss": 0.6143, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.23302482068538666, |
|
"learning_rate": 4.413201029695788e-05, |
|
"loss": 0.625, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.2442236691713333, |
|
"learning_rate": 4.410631005514646e-05, |
|
"loss": 0.623, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.25277549028396606, |
|
"learning_rate": 4.4080561174289713e-05, |
|
"loss": 0.6298, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.26706215739250183, |
|
"learning_rate": 4.405476371993673e-05, |
|
"loss": 0.5957, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.19037441909313202, |
|
"learning_rate": 4.4028917757760216e-05, |
|
"loss": 0.6228, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.21772147715091705, |
|
"learning_rate": 4.4003023353556406e-05, |
|
"loss": 0.6402, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.19529011845588684, |
|
"learning_rate": 4.3977080573244836e-05, |
|
"loss": 0.5973, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.22065111994743347, |
|
"learning_rate": 4.395108948286818e-05, |
|
"loss": 0.6181, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.3971559405326843, |
|
"learning_rate": 4.3925050148592124e-05, |
|
"loss": 0.6217, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.2587713897228241, |
|
"learning_rate": 4.389896263670514e-05, |
|
"loss": 0.5931, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.23226861655712128, |
|
"learning_rate": 4.387282701361837e-05, |
|
"loss": 0.6207, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.2062036246061325, |
|
"learning_rate": 4.3846643345865425e-05, |
|
"loss": 0.6303, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"eval_loss": 0.6429007649421692, |
|
"eval_runtime": 67.2472, |
|
"eval_samples_per_second": 29.741, |
|
"eval_steps_per_second": 0.937, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.2225368171930313, |
|
"learning_rate": 4.3820411700102216e-05, |
|
"loss": 0.6123, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.2737579941749573, |
|
"learning_rate": 4.3794132143106795e-05, |
|
"loss": 0.6255, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.2531569004058838, |
|
"learning_rate": 4.376780474177918e-05, |
|
"loss": 0.5988, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.27578508853912354, |
|
"learning_rate": 4.37414295631412e-05, |
|
"loss": 0.6075, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.2573574185371399, |
|
"learning_rate": 4.37150066743363e-05, |
|
"loss": 0.6217, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.28215906023979187, |
|
"learning_rate": 4.3688536142629376e-05, |
|
"loss": 0.6202, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.23517820239067078, |
|
"learning_rate": 4.366201803540663e-05, |
|
"loss": 0.6056, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.26566624641418457, |
|
"learning_rate": 4.363545242017534e-05, |
|
"loss": 0.6046, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.20942912995815277, |
|
"learning_rate": 4.360883936456377e-05, |
|
"loss": 0.5939, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.24037909507751465, |
|
"learning_rate": 4.358217893632093e-05, |
|
"loss": 0.6218, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.22285695374011993, |
|
"learning_rate": 4.355547120331641e-05, |
|
"loss": 0.6332, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.2597566545009613, |
|
"learning_rate": 4.352871623354025e-05, |
|
"loss": 0.6626, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.37357744574546814, |
|
"learning_rate": 4.350191409510275e-05, |
|
"loss": 0.6143, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.24381165206432343, |
|
"learning_rate": 4.347506485623424e-05, |
|
"loss": 0.6369, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.27438944578170776, |
|
"learning_rate": 4.344816858528501e-05, |
|
"loss": 0.6535, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.20593568682670593, |
|
"learning_rate": 4.342122535072505e-05, |
|
"loss": 0.6251, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.25618818402290344, |
|
"learning_rate": 4.33942352211439e-05, |
|
"loss": 0.6182, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.20692601799964905, |
|
"learning_rate": 4.33671982652505e-05, |
|
"loss": 0.6261, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.2003064602613449, |
|
"learning_rate": 4.334011455187298e-05, |
|
"loss": 0.6331, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.22887033224105835, |
|
"learning_rate": 4.3312984149958516e-05, |
|
"loss": 0.6292, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.21389944851398468, |
|
"learning_rate": 4.328580712857313e-05, |
|
"loss": 0.6094, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.26887232065200806, |
|
"learning_rate": 4.32585835569015e-05, |
|
"loss": 0.5942, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.30249106884002686, |
|
"learning_rate": 4.3231313504246875e-05, |
|
"loss": 0.6473, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.2598324418067932, |
|
"learning_rate": 4.320399704003075e-05, |
|
"loss": 0.6198, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.27700358629226685, |
|
"learning_rate": 4.317663423379283e-05, |
|
"loss": 0.6378, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.22245779633522034, |
|
"learning_rate": 4.3149225155190766e-05, |
|
"loss": 0.6458, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.27054449915885925, |
|
"learning_rate": 4.3121769874e-05, |
|
"loss": 0.6347, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.22811339795589447, |
|
"learning_rate": 4.309426846011362e-05, |
|
"loss": 0.6382, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.27327001094818115, |
|
"learning_rate": 4.306672098354211e-05, |
|
"loss": 0.6114, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.24687005579471588, |
|
"learning_rate": 4.3039127514413266e-05, |
|
"loss": 0.5949, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.39029061794281006, |
|
"learning_rate": 4.3011488122971945e-05, |
|
"loss": 0.6281, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.2590855360031128, |
|
"learning_rate": 4.298380287957989e-05, |
|
"loss": 0.6068, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.18194353580474854, |
|
"learning_rate": 4.29560718547156e-05, |
|
"loss": 0.6114, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.8544417023658752, |
|
"learning_rate": 4.292829511897409e-05, |
|
"loss": 0.6336, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.23495988547801971, |
|
"learning_rate": 4.290047274306678e-05, |
|
"loss": 0.6185, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.20022150874137878, |
|
"learning_rate": 4.2872604797821246e-05, |
|
"loss": 0.6001, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.40812554955482483, |
|
"learning_rate": 4.284469135418108e-05, |
|
"loss": 0.6365, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.23241877555847168, |
|
"learning_rate": 4.28167324832057e-05, |
|
"loss": 0.6046, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.26784956455230713, |
|
"learning_rate": 4.2788728256070165e-05, |
|
"loss": 0.604, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.25370949506759644, |
|
"learning_rate": 4.2760678744065005e-05, |
|
"loss": 0.6368, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.2346085160970688, |
|
"learning_rate": 4.273258401859602e-05, |
|
"loss": 0.5944, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.32089290022850037, |
|
"learning_rate": 4.2704444151184117e-05, |
|
"loss": 0.6124, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.19013577699661255, |
|
"learning_rate": 4.267625921346513e-05, |
|
"loss": 0.6167, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.27597373723983765, |
|
"learning_rate": 4.2648029277189616e-05, |
|
"loss": 0.6002, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.2857840657234192, |
|
"learning_rate": 4.261975441422268e-05, |
|
"loss": 0.5885, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.22939664125442505, |
|
"learning_rate": 4.259143469654382e-05, |
|
"loss": 0.6086, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.24333885312080383, |
|
"learning_rate": 4.25630701962467e-05, |
|
"loss": 0.6066, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.2737683057785034, |
|
"learning_rate": 4.253466098553899e-05, |
|
"loss": 0.6338, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.23578789830207825, |
|
"learning_rate": 4.2506207136742196e-05, |
|
"loss": 0.6088, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.3092876076698303, |
|
"learning_rate": 4.247770872229143e-05, |
|
"loss": 0.6405, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"eval_loss": 0.6421223282814026, |
|
"eval_runtime": 67.2062, |
|
"eval_samples_per_second": 29.759, |
|
"eval_steps_per_second": 0.937, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.24720308184623718, |
|
"learning_rate": 4.2449165814735294e-05, |
|
"loss": 0.6489, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.2425301969051361, |
|
"learning_rate": 4.242057848673561e-05, |
|
"loss": 0.6237, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.1967163234949112, |
|
"learning_rate": 4.239194681106733e-05, |
|
"loss": 0.6256, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.24658535420894623, |
|
"learning_rate": 4.2363270860618264e-05, |
|
"loss": 0.5891, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.22012916207313538, |
|
"learning_rate": 4.233455070838895e-05, |
|
"loss": 0.6072, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.32060471177101135, |
|
"learning_rate": 4.230578642749245e-05, |
|
"loss": 0.62, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.2524150013923645, |
|
"learning_rate": 4.2276978091154164e-05, |
|
"loss": 0.6002, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.22831355035305023, |
|
"learning_rate": 4.2248125772711635e-05, |
|
"loss": 0.6245, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.3438965678215027, |
|
"learning_rate": 4.221922954561439e-05, |
|
"loss": 0.6127, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.20607294142246246, |
|
"learning_rate": 4.2190289483423704e-05, |
|
"loss": 0.6074, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.3653343915939331, |
|
"learning_rate": 4.216130565981249e-05, |
|
"loss": 0.6156, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.19855797290802002, |
|
"learning_rate": 4.213227814856501e-05, |
|
"loss": 0.6107, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.2605169415473938, |
|
"learning_rate": 4.210320702357678e-05, |
|
"loss": 0.6246, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.2300351858139038, |
|
"learning_rate": 4.207409235885435e-05, |
|
"loss": 0.6347, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.9489219188690186, |
|
"learning_rate": 4.2044934228515063e-05, |
|
"loss": 0.6217, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.2525247037410736, |
|
"learning_rate": 4.201573270678696e-05, |
|
"loss": 0.6115, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.2521913945674896, |
|
"learning_rate": 4.1986487868008525e-05, |
|
"loss": 0.5965, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.24830523133277893, |
|
"learning_rate": 4.195719978662851e-05, |
|
"loss": 0.618, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.23050466179847717, |
|
"learning_rate": 4.192786853720576e-05, |
|
"loss": 0.6135, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.21613763272762299, |
|
"learning_rate": 4.1898494194409e-05, |
|
"loss": 0.6155, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.22277826070785522, |
|
"learning_rate": 4.1869076833016676e-05, |
|
"loss": 0.611, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.2443763017654419, |
|
"learning_rate": 4.1839616527916736e-05, |
|
"loss": 0.5888, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.513171374797821, |
|
"learning_rate": 4.181011335410645e-05, |
|
"loss": 0.6096, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.21167334914207458, |
|
"learning_rate": 4.178056738669221e-05, |
|
"loss": 0.6108, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.22910219430923462, |
|
"learning_rate": 4.175097870088937e-05, |
|
"loss": 0.6172, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.2219991236925125, |
|
"learning_rate": 4.172134737202202e-05, |
|
"loss": 0.6188, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.266703337430954, |
|
"learning_rate": 4.16916734755228e-05, |
|
"loss": 0.6331, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.23374725878238678, |
|
"learning_rate": 4.166195708693273e-05, |
|
"loss": 0.6173, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.2778140902519226, |
|
"learning_rate": 4.163219828190099e-05, |
|
"loss": 0.6345, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.23954761028289795, |
|
"learning_rate": 4.160239713618474e-05, |
|
"loss": 0.6205, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.23126688599586487, |
|
"learning_rate": 4.1572553725648945e-05, |
|
"loss": 0.6136, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.209469735622406, |
|
"learning_rate": 4.1542668126266135e-05, |
|
"loss": 0.6181, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.24480338394641876, |
|
"learning_rate": 4.151274041411626e-05, |
|
"loss": 0.627, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.27108994126319885, |
|
"learning_rate": 4.148277066538648e-05, |
|
"loss": 0.6169, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.2842053472995758, |
|
"learning_rate": 4.145275895637095e-05, |
|
"loss": 0.6347, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.23858144879341125, |
|
"learning_rate": 4.142270536347066e-05, |
|
"loss": 0.6172, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.18353480100631714, |
|
"learning_rate": 4.139260996319321e-05, |
|
"loss": 0.5939, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.24537691473960876, |
|
"learning_rate": 4.136247283215263e-05, |
|
"loss": 0.6219, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.30072882771492004, |
|
"learning_rate": 4.1332294047069215e-05, |
|
"loss": 0.6368, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.2931046485900879, |
|
"learning_rate": 4.1302073684769256e-05, |
|
"loss": 0.6108, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.23465721309185028, |
|
"learning_rate": 4.1271811822184914e-05, |
|
"loss": 0.6262, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.21646186709403992, |
|
"learning_rate": 4.124150853635398e-05, |
|
"loss": 0.6221, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.19293636083602905, |
|
"learning_rate": 4.1211163904419716e-05, |
|
"loss": 0.6147, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.2320639044046402, |
|
"learning_rate": 4.1180778003630624e-05, |
|
"loss": 0.5902, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.24089358747005463, |
|
"learning_rate": 4.115035091134027e-05, |
|
"loss": 0.6307, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.2302493005990982, |
|
"learning_rate": 4.1119882705007094e-05, |
|
"loss": 0.6283, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.23400545120239258, |
|
"learning_rate": 4.108937346219417e-05, |
|
"loss": 0.612, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.21376444399356842, |
|
"learning_rate": 4.1058823260569056e-05, |
|
"loss": 0.6185, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.23962020874023438, |
|
"learning_rate": 4.1028232177903586e-05, |
|
"loss": 0.599, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.20366187393665314, |
|
"learning_rate": 4.099760029207366e-05, |
|
"loss": 0.6041, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"eval_loss": 0.6386786103248596, |
|
"eval_runtime": 67.1728, |
|
"eval_samples_per_second": 29.774, |
|
"eval_steps_per_second": 0.938, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.19989852607250214, |
|
"learning_rate": 4.096692768105904e-05, |
|
"loss": 0.6166, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.2329440414905548, |
|
"learning_rate": 4.093621442294318e-05, |
|
"loss": 0.6111, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.3589790165424347, |
|
"learning_rate": 4.0905460595912995e-05, |
|
"loss": 0.5906, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.21661555767059326, |
|
"learning_rate": 4.0874666278258696e-05, |
|
"loss": 0.6191, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.21175633370876312, |
|
"learning_rate": 4.0843831548373536e-05, |
|
"loss": 0.6092, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.2863156199455261, |
|
"learning_rate": 4.0812956484753686e-05, |
|
"loss": 0.615, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.2971532344818115, |
|
"learning_rate": 4.078204116599796e-05, |
|
"loss": 0.6144, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.3574163019657135, |
|
"learning_rate": 4.0751085670807684e-05, |
|
"loss": 0.6297, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.23878632485866547, |
|
"learning_rate": 4.072009007798643e-05, |
|
"loss": 0.6164, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.19213823974132538, |
|
"learning_rate": 4.0689054466439865e-05, |
|
"loss": 0.6161, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.23007014393806458, |
|
"learning_rate": 4.0657978915175524e-05, |
|
"loss": 0.5919, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.6034734845161438, |
|
"learning_rate": 4.062686350330262e-05, |
|
"loss": 0.601, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.234625443816185, |
|
"learning_rate": 4.059570831003184e-05, |
|
"loss": 0.6298, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.2770217955112457, |
|
"learning_rate": 4.056451341467514e-05, |
|
"loss": 0.6066, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.3099740147590637, |
|
"learning_rate": 4.053327889664556e-05, |
|
"loss": 0.6296, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.2302497923374176, |
|
"learning_rate": 4.050200483545697e-05, |
|
"loss": 0.5892, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.24834463000297546, |
|
"learning_rate": 4.047069131072396e-05, |
|
"loss": 0.6186, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.4216982126235962, |
|
"learning_rate": 4.043933840216154e-05, |
|
"loss": 0.6059, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.19061365723609924, |
|
"learning_rate": 4.040794618958499e-05, |
|
"loss": 0.5777, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.27531304955482483, |
|
"learning_rate": 4.0376514752909644e-05, |
|
"loss": 0.6293, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.29000842571258545, |
|
"learning_rate": 4.034504417215071e-05, |
|
"loss": 0.6534, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.2087034285068512, |
|
"learning_rate": 4.0313534527423014e-05, |
|
"loss": 0.6039, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.23176586627960205, |
|
"learning_rate": 4.028198589894086e-05, |
|
"loss": 0.6382, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.23297011852264404, |
|
"learning_rate": 4.0250398367017754e-05, |
|
"loss": 0.6058, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.27167224884033203, |
|
"learning_rate": 4.021877201206628e-05, |
|
"loss": 0.6133, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.2260424941778183, |
|
"learning_rate": 4.018710691459783e-05, |
|
"loss": 0.6564, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.20277553796768188, |
|
"learning_rate": 4.015540315522242e-05, |
|
"loss": 0.6115, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.23194430768489838, |
|
"learning_rate": 4.012366081464851e-05, |
|
"loss": 0.5972, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.4496138095855713, |
|
"learning_rate": 4.009187997368275e-05, |
|
"loss": 0.601, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.2352319061756134, |
|
"learning_rate": 4.0060060713229806e-05, |
|
"loss": 0.6126, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.2365293949842453, |
|
"learning_rate": 4.002820311429218e-05, |
|
"loss": 0.5925, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.27040597796440125, |
|
"learning_rate": 3.9996307257969936e-05, |
|
"loss": 0.6373, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.29712024331092834, |
|
"learning_rate": 3.996437322546053e-05, |
|
"loss": 0.6174, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.2615072727203369, |
|
"learning_rate": 3.993240109805863e-05, |
|
"loss": 0.6315, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.19362391531467438, |
|
"learning_rate": 3.990039095715587e-05, |
|
"loss": 0.6553, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.27068230509757996, |
|
"learning_rate": 3.986834288424064e-05, |
|
"loss": 0.5997, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.24307383596897125, |
|
"learning_rate": 3.9836256960897916e-05, |
|
"loss": 0.6098, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.2581513226032257, |
|
"learning_rate": 3.980413326880902e-05, |
|
"loss": 0.618, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.2150333821773529, |
|
"learning_rate": 3.9771971889751426e-05, |
|
"loss": 0.6039, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.28339913487434387, |
|
"learning_rate": 3.9739772905598525e-05, |
|
"loss": 0.6122, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.5444418787956238, |
|
"learning_rate": 3.970753639831949e-05, |
|
"loss": 0.6242, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.2532576322555542, |
|
"learning_rate": 3.967526244997896e-05, |
|
"loss": 0.6069, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.2285206913948059, |
|
"learning_rate": 3.9642951142736936e-05, |
|
"loss": 0.6039, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.2928161919116974, |
|
"learning_rate": 3.961060255884849e-05, |
|
"loss": 0.6098, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.3057214617729187, |
|
"learning_rate": 3.957821678066359e-05, |
|
"loss": 0.6299, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.2918532192707062, |
|
"learning_rate": 3.9545793890626925e-05, |
|
"loss": 0.6069, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.20853939652442932, |
|
"learning_rate": 3.951333397127762e-05, |
|
"loss": 0.6228, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.2272331267595291, |
|
"learning_rate": 3.948083710524909e-05, |
|
"loss": 0.6051, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.22748872637748718, |
|
"learning_rate": 3.9448303375268794e-05, |
|
"loss": 0.6259, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.23295633494853973, |
|
"learning_rate": 3.9415732864158026e-05, |
|
"loss": 0.5912, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"eval_loss": 0.6369640827178955, |
|
"eval_runtime": 67.3439, |
|
"eval_samples_per_second": 29.698, |
|
"eval_steps_per_second": 0.935, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.25066569447517395, |
|
"learning_rate": 3.9383125654831747e-05, |
|
"loss": 0.594, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.1922658085823059, |
|
"learning_rate": 3.935048183029831e-05, |
|
"loss": 0.6048, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.23781736195087433, |
|
"learning_rate": 3.93178014736593e-05, |
|
"loss": 0.6002, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.25118449330329895, |
|
"learning_rate": 3.9285084668109276e-05, |
|
"loss": 0.6013, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.2312859296798706, |
|
"learning_rate": 3.925233149693561e-05, |
|
"loss": 0.6118, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.25878220796585083, |
|
"learning_rate": 3.921954204351826e-05, |
|
"loss": 0.6091, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.25731998682022095, |
|
"learning_rate": 3.9186716391329485e-05, |
|
"loss": 0.6221, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.5314686894416809, |
|
"learning_rate": 3.9153854623933785e-05, |
|
"loss": 0.6488, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.2735700309276581, |
|
"learning_rate": 3.912095682498752e-05, |
|
"loss": 0.5929, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.24641837179660797, |
|
"learning_rate": 3.908802307823883e-05, |
|
"loss": 0.6052, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.2285420298576355, |
|
"learning_rate": 3.905505346752733e-05, |
|
"loss": 0.6117, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.33809909224510193, |
|
"learning_rate": 3.902204807678398e-05, |
|
"loss": 0.6045, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.2631922662258148, |
|
"learning_rate": 3.8989006990030776e-05, |
|
"loss": 0.5957, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.2324107438325882, |
|
"learning_rate": 3.89559302913806e-05, |
|
"loss": 0.6154, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.28898167610168457, |
|
"learning_rate": 3.8922818065037e-05, |
|
"loss": 0.607, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.26003435254096985, |
|
"learning_rate": 3.888967039529398e-05, |
|
"loss": 0.6149, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.203714519739151, |
|
"learning_rate": 3.885648736653574e-05, |
|
"loss": 0.6087, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.24271652102470398, |
|
"learning_rate": 3.8823269063236525e-05, |
|
"loss": 0.6175, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.32109883427619934, |
|
"learning_rate": 3.8790015569960346e-05, |
|
"loss": 0.5913, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.23378537595272064, |
|
"learning_rate": 3.8756726971360835e-05, |
|
"loss": 0.6011, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.30031922459602356, |
|
"learning_rate": 3.872340335218096e-05, |
|
"loss": 0.6384, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.18759281933307648, |
|
"learning_rate": 3.869004479725285e-05, |
|
"loss": 0.6099, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.18216826021671295, |
|
"learning_rate": 3.865665139149759e-05, |
|
"loss": 0.6137, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.215232253074646, |
|
"learning_rate": 3.862322321992495e-05, |
|
"loss": 0.6235, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.28549501299858093, |
|
"learning_rate": 3.858976036763323e-05, |
|
"loss": 0.6003, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.2499440610408783, |
|
"learning_rate": 3.855626291980901e-05, |
|
"loss": 0.6031, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.22503146529197693, |
|
"learning_rate": 3.852273096172694e-05, |
|
"loss": 0.6188, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.2220824956893921, |
|
"learning_rate": 3.848916457874952e-05, |
|
"loss": 0.6354, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.20054738223552704, |
|
"learning_rate": 3.845556385632688e-05, |
|
"loss": 0.6124, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.28167736530303955, |
|
"learning_rate": 3.842192887999659e-05, |
|
"loss": 0.6066, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.23211318254470825, |
|
"learning_rate": 3.83882597353834e-05, |
|
"loss": 0.6366, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.24710510671138763, |
|
"learning_rate": 3.835455650819905e-05, |
|
"loss": 0.5974, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.20109450817108154, |
|
"learning_rate": 3.832081928424205e-05, |
|
"loss": 0.6194, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.228953555226326, |
|
"learning_rate": 3.828704814939742e-05, |
|
"loss": 0.6056, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.24477165937423706, |
|
"learning_rate": 3.8253243189636554e-05, |
|
"loss": 0.6425, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.2542911767959595, |
|
"learning_rate": 3.8219404491016956e-05, |
|
"loss": 0.6043, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.22980964183807373, |
|
"learning_rate": 3.8185532139681964e-05, |
|
"loss": 0.5909, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.2753266394138336, |
|
"learning_rate": 3.8151626221860636e-05, |
|
"loss": 0.6307, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.2397356778383255, |
|
"learning_rate": 3.811768682386747e-05, |
|
"loss": 0.6254, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.2399650514125824, |
|
"learning_rate": 3.808371403210219e-05, |
|
"loss": 0.6151, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.26702845096588135, |
|
"learning_rate": 3.8049707933049524e-05, |
|
"loss": 0.652, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.2288055717945099, |
|
"learning_rate": 3.8015668613279e-05, |
|
"loss": 0.5837, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.25483304262161255, |
|
"learning_rate": 3.798159615944471e-05, |
|
"loss": 0.6001, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.21478036046028137, |
|
"learning_rate": 3.79474906582851e-05, |
|
"loss": 0.6437, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.24260424077510834, |
|
"learning_rate": 3.7913352196622765e-05, |
|
"loss": 0.5938, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.2208358496427536, |
|
"learning_rate": 3.7879180861364155e-05, |
|
"loss": 0.6158, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.19732904434204102, |
|
"learning_rate": 3.7844976739499454e-05, |
|
"loss": 0.5967, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.2552759349346161, |
|
"learning_rate": 3.781073991810229e-05, |
|
"loss": 0.6201, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.22178280353546143, |
|
"learning_rate": 3.7776470484329554e-05, |
|
"loss": 0.5918, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.23287595808506012, |
|
"learning_rate": 3.7742168525421115e-05, |
|
"loss": 0.6121, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_loss": 0.6359540820121765, |
|
"eval_runtime": 67.1833, |
|
"eval_samples_per_second": 29.769, |
|
"eval_steps_per_second": 0.938, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.3245614469051361, |
|
"learning_rate": 3.7707834128699695e-05, |
|
"loss": 0.6239, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.20530427992343903, |
|
"learning_rate": 3.767346738157057e-05, |
|
"loss": 0.5835, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.23378130793571472, |
|
"learning_rate": 3.763906837152136e-05, |
|
"loss": 0.6247, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.2955145835876465, |
|
"learning_rate": 3.760463718612182e-05, |
|
"loss": 0.603, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.23302963376045227, |
|
"learning_rate": 3.757017391302363e-05, |
|
"loss": 0.607, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.2423810064792633, |
|
"learning_rate": 3.7535678639960145e-05, |
|
"loss": 0.6153, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.25609883666038513, |
|
"learning_rate": 3.7501151454746184e-05, |
|
"loss": 0.6355, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.22590801119804382, |
|
"learning_rate": 3.746659244527779e-05, |
|
"loss": 0.5878, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.3146507740020752, |
|
"learning_rate": 3.7432001699532066e-05, |
|
"loss": 0.6061, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.22446873784065247, |
|
"learning_rate": 3.739737930556685e-05, |
|
"loss": 0.593, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.19154445827007294, |
|
"learning_rate": 3.736272535152058e-05, |
|
"loss": 0.6231, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.22120869159698486, |
|
"learning_rate": 3.732803992561204e-05, |
|
"loss": 0.6028, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.22038020193576813, |
|
"learning_rate": 3.72933231161401e-05, |
|
"loss": 0.5896, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.22278408706188202, |
|
"learning_rate": 3.725857501148356e-05, |
|
"loss": 0.6177, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.2636203467845917, |
|
"learning_rate": 3.722379570010087e-05, |
|
"loss": 0.6137, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.2235359400510788, |
|
"learning_rate": 3.718898527052993e-05, |
|
"loss": 0.6156, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.24960432946681976, |
|
"learning_rate": 3.715414381138785e-05, |
|
"loss": 0.6181, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.2168387770652771, |
|
"learning_rate": 3.7119271411370736e-05, |
|
"loss": 0.5881, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.2279047667980194, |
|
"learning_rate": 3.708436815925348e-05, |
|
"loss": 0.626, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.23511163890361786, |
|
"learning_rate": 3.704943414388946e-05, |
|
"loss": 0.6249, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.2570114731788635, |
|
"learning_rate": 3.7014469454210425e-05, |
|
"loss": 0.605, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.2473715841770172, |
|
"learning_rate": 3.697947417922619e-05, |
|
"loss": 0.6271, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.2647952735424042, |
|
"learning_rate": 3.694444840802443e-05, |
|
"loss": 0.6002, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.21034955978393555, |
|
"learning_rate": 3.690939222977045e-05, |
|
"loss": 0.6136, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.2274123579263687, |
|
"learning_rate": 3.6874305733706967e-05, |
|
"loss": 0.5838, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.22881461679935455, |
|
"learning_rate": 3.6839189009153886e-05, |
|
"loss": 0.6043, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.2513927221298218, |
|
"learning_rate": 3.680404214550806e-05, |
|
"loss": 0.6119, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.23109400272369385, |
|
"learning_rate": 3.676886523224306e-05, |
|
"loss": 0.5832, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.2706112563610077, |
|
"learning_rate": 3.673365835890896e-05, |
|
"loss": 0.61, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.2524433732032776, |
|
"learning_rate": 3.669842161513211e-05, |
|
"loss": 0.5975, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.20997729897499084, |
|
"learning_rate": 3.6663155090614895e-05, |
|
"loss": 0.6122, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.21376433968544006, |
|
"learning_rate": 3.66278588751355e-05, |
|
"loss": 0.5988, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.23716038465499878, |
|
"learning_rate": 3.659253305854772e-05, |
|
"loss": 0.6063, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.2442622035741806, |
|
"learning_rate": 3.655717773078068e-05, |
|
"loss": 0.6223, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.2357431948184967, |
|
"learning_rate": 3.652179298183866e-05, |
|
"loss": 0.6066, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.2631849944591522, |
|
"learning_rate": 3.6486378901800814e-05, |
|
"loss": 0.5991, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.3156498968601227, |
|
"learning_rate": 3.645093558082098e-05, |
|
"loss": 0.6226, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.2296159863471985, |
|
"learning_rate": 3.641546310912741e-05, |
|
"loss": 0.603, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.21041952073574066, |
|
"learning_rate": 3.63799615770226e-05, |
|
"loss": 0.5871, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.2268192023038864, |
|
"learning_rate": 3.634443107488301e-05, |
|
"loss": 0.5855, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.24145358800888062, |
|
"learning_rate": 3.630887169315883e-05, |
|
"loss": 0.6153, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.2399817854166031, |
|
"learning_rate": 3.62732835223738e-05, |
|
"loss": 0.6173, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.21857813000679016, |
|
"learning_rate": 3.6237666653124934e-05, |
|
"loss": 0.6205, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.2975575029850006, |
|
"learning_rate": 3.62020211760823e-05, |
|
"loss": 0.6071, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.23792515695095062, |
|
"learning_rate": 3.6166347181988805e-05, |
|
"loss": 0.6046, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.26107633113861084, |
|
"learning_rate": 3.613064476165993e-05, |
|
"loss": 0.6321, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.25213637948036194, |
|
"learning_rate": 3.6094914005983546e-05, |
|
"loss": 0.5989, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.21624846756458282, |
|
"learning_rate": 3.605915500591964e-05, |
|
"loss": 0.6212, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.22757196426391602, |
|
"learning_rate": 3.6023367852500126e-05, |
|
"loss": 0.6179, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.24339766800403595, |
|
"learning_rate": 3.5987552636828544e-05, |
|
"loss": 0.613, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"eval_loss": 0.6343603730201721, |
|
"eval_runtime": 67.246, |
|
"eval_samples_per_second": 29.742, |
|
"eval_steps_per_second": 0.937, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.23893538117408752, |
|
"learning_rate": 3.5951709450079916e-05, |
|
"loss": 0.631, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.23567819595336914, |
|
"learning_rate": 3.5915838383500445e-05, |
|
"loss": 0.6115, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.3273671865463257, |
|
"learning_rate": 3.5879939528407317e-05, |
|
"loss": 0.5974, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.22168773412704468, |
|
"learning_rate": 3.5844012976188454e-05, |
|
"loss": 0.5933, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.3699816167354584, |
|
"learning_rate": 3.580805881830231e-05, |
|
"loss": 0.6095, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.23126068711280823, |
|
"learning_rate": 3.577207714627756e-05, |
|
"loss": 0.6214, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.21293146908283234, |
|
"learning_rate": 3.5736068051712985e-05, |
|
"loss": 0.6085, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.22205476462841034, |
|
"learning_rate": 3.570003162627714e-05, |
|
"loss": 0.594, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.28807544708251953, |
|
"learning_rate": 3.5663967961708156e-05, |
|
"loss": 0.6233, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.2474154680967331, |
|
"learning_rate": 3.562787714981353e-05, |
|
"loss": 0.6122, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.3010067641735077, |
|
"learning_rate": 3.5591759282469836e-05, |
|
"loss": 0.6027, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.21274247765541077, |
|
"learning_rate": 3.555561445162255e-05, |
|
"loss": 0.6373, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.22347737848758698, |
|
"learning_rate": 3.551944274928578e-05, |
|
"loss": 0.6108, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.24143600463867188, |
|
"learning_rate": 3.548324426754204e-05, |
|
"loss": 0.6406, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.2388855516910553, |
|
"learning_rate": 3.544701909854201e-05, |
|
"loss": 0.6053, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.22766222059726715, |
|
"learning_rate": 3.541076733450433e-05, |
|
"loss": 0.5907, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.26293396949768066, |
|
"learning_rate": 3.537448906771532e-05, |
|
"loss": 0.6113, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.3174780607223511, |
|
"learning_rate": 3.5338184390528765e-05, |
|
"loss": 0.6083, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.2787187099456787, |
|
"learning_rate": 3.530185339536571e-05, |
|
"loss": 0.6206, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.25283700227737427, |
|
"learning_rate": 3.526549617471417e-05, |
|
"loss": 0.6078, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.2156975120306015, |
|
"learning_rate": 3.522911282112894e-05, |
|
"loss": 0.5923, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.253841757774353, |
|
"learning_rate": 3.5192703427231344e-05, |
|
"loss": 0.6046, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.2438536435365677, |
|
"learning_rate": 3.5156268085708974e-05, |
|
"loss": 0.6121, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.2552967667579651, |
|
"learning_rate": 3.511980688931551e-05, |
|
"loss": 0.6314, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.4905732572078705, |
|
"learning_rate": 3.5083319930870414e-05, |
|
"loss": 0.5972, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.2145986407995224, |
|
"learning_rate": 3.504680730325876e-05, |
|
"loss": 0.6234, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.24331778287887573, |
|
"learning_rate": 3.501026909943095e-05, |
|
"loss": 0.6076, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.2849058210849762, |
|
"learning_rate": 3.4973705412402514e-05, |
|
"loss": 0.6067, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.2842152714729309, |
|
"learning_rate": 3.4937116335253836e-05, |
|
"loss": 0.6028, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.24628552794456482, |
|
"learning_rate": 3.490050196112995e-05, |
|
"loss": 0.6119, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.237552210688591, |
|
"learning_rate": 3.486386238324029e-05, |
|
"loss": 0.5985, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.2442472279071808, |
|
"learning_rate": 3.482719769485841e-05, |
|
"loss": 0.6007, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.29659032821655273, |
|
"learning_rate": 3.479050798932186e-05, |
|
"loss": 0.6082, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.2106475681066513, |
|
"learning_rate": 3.475379336003183e-05, |
|
"loss": 0.6247, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.23286166787147522, |
|
"learning_rate": 3.471705390045295e-05, |
|
"loss": 0.5975, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.2842426002025604, |
|
"learning_rate": 3.468028970411308e-05, |
|
"loss": 0.6206, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.2059846669435501, |
|
"learning_rate": 3.4643500864603075e-05, |
|
"loss": 0.6074, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.24341388046741486, |
|
"learning_rate": 3.4606687475576473e-05, |
|
"loss": 0.6219, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.25778728723526, |
|
"learning_rate": 3.456984963074934e-05, |
|
"loss": 0.6136, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.2483309656381607, |
|
"learning_rate": 3.453298742389999e-05, |
|
"loss": 0.6033, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.26595956087112427, |
|
"learning_rate": 3.4496100948868774e-05, |
|
"loss": 0.6104, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.3210870623588562, |
|
"learning_rate": 3.445919029955779e-05, |
|
"loss": 0.5867, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.2549401819705963, |
|
"learning_rate": 3.442225556993071e-05, |
|
"loss": 0.6011, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.2380758821964264, |
|
"learning_rate": 3.438529685401248e-05, |
|
"loss": 0.613, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.1969003528356552, |
|
"learning_rate": 3.4348314245889114e-05, |
|
"loss": 0.5812, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.38220301270484924, |
|
"learning_rate": 3.431130783970747e-05, |
|
"loss": 0.6103, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.3186286687850952, |
|
"learning_rate": 3.427427772967496e-05, |
|
"loss": 0.5989, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.2749629020690918, |
|
"learning_rate": 3.423722401005936e-05, |
|
"loss": 0.6073, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.2259276658296585, |
|
"learning_rate": 3.420014677518854e-05, |
|
"loss": 0.627, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.22658781707286835, |
|
"learning_rate": 3.416304611945022e-05, |
|
"loss": 0.6126, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"eval_loss": 0.6338043212890625, |
|
"eval_runtime": 67.2252, |
|
"eval_samples_per_second": 29.751, |
|
"eval_steps_per_second": 0.937, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.24936778843402863, |
|
"learning_rate": 3.4125922137291774e-05, |
|
"loss": 0.6181, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.25658896565437317, |
|
"learning_rate": 3.4088774923219936e-05, |
|
"loss": 0.572, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.2544236481189728, |
|
"learning_rate": 3.4051604571800584e-05, |
|
"loss": 0.5965, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.24977684020996094, |
|
"learning_rate": 3.4014411177658494e-05, |
|
"loss": 0.5839, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.29316794872283936, |
|
"learning_rate": 3.397719483547712e-05, |
|
"loss": 0.6268, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.31263527274131775, |
|
"learning_rate": 3.393995563999832e-05, |
|
"loss": 0.601, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.2534136474132538, |
|
"learning_rate": 3.390269368602212e-05, |
|
"loss": 0.6274, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.20804938673973083, |
|
"learning_rate": 3.38654090684065e-05, |
|
"loss": 0.6127, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.3475063443183899, |
|
"learning_rate": 3.382810188206712e-05, |
|
"loss": 0.602, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.21181778609752655, |
|
"learning_rate": 3.379077222197713e-05, |
|
"loss": 0.6136, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.3004330098628998, |
|
"learning_rate": 3.3753420183166845e-05, |
|
"loss": 0.5797, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.17146137356758118, |
|
"learning_rate": 3.3716045860723565e-05, |
|
"loss": 0.581, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.23555147647857666, |
|
"learning_rate": 3.367864934979133e-05, |
|
"loss": 0.6013, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.21328724920749664, |
|
"learning_rate": 3.364123074557066e-05, |
|
"loss": 0.6053, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.21844583749771118, |
|
"learning_rate": 3.36037901433183e-05, |
|
"loss": 0.6241, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.2694018483161926, |
|
"learning_rate": 3.356632763834702e-05, |
|
"loss": 0.5905, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.30904027819633484, |
|
"learning_rate": 3.3528843326025334e-05, |
|
"loss": 0.6002, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.35731956362724304, |
|
"learning_rate": 3.349133730177729e-05, |
|
"loss": 0.6118, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.33931079506874084, |
|
"learning_rate": 3.345380966108218e-05, |
|
"loss": 0.5879, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.21542824804782867, |
|
"learning_rate": 3.3416260499474334e-05, |
|
"loss": 0.5831, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.27431559562683105, |
|
"learning_rate": 3.3378689912542885e-05, |
|
"loss": 0.597, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.37733927369117737, |
|
"learning_rate": 3.3341097995931483e-05, |
|
"loss": 0.6167, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.23641188442707062, |
|
"learning_rate": 3.3303484845338095e-05, |
|
"loss": 0.5875, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.19541165232658386, |
|
"learning_rate": 3.326585055651475e-05, |
|
"loss": 0.583, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.22675809264183044, |
|
"learning_rate": 3.3228195225267255e-05, |
|
"loss": 0.5963, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.2038368433713913, |
|
"learning_rate": 3.319051894745503e-05, |
|
"loss": 0.5973, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.21011051535606384, |
|
"learning_rate": 3.3152821818990786e-05, |
|
"loss": 0.5982, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.26622429490089417, |
|
"learning_rate": 3.3115103935840326e-05, |
|
"loss": 0.6, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.25305575132369995, |
|
"learning_rate": 3.307736539402227e-05, |
|
"loss": 0.6024, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.2928634583950043, |
|
"learning_rate": 3.303960628960788e-05, |
|
"loss": 0.585, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.2512208819389343, |
|
"learning_rate": 3.3001826718720694e-05, |
|
"loss": 0.6148, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.22255320847034454, |
|
"learning_rate": 3.29640267775364e-05, |
|
"loss": 0.5983, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.30739256739616394, |
|
"learning_rate": 3.292620656228253e-05, |
|
"loss": 0.5949, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.25877636671066284, |
|
"learning_rate": 3.288836616923823e-05, |
|
"loss": 0.6027, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.2691468298435211, |
|
"learning_rate": 3.2850505694734005e-05, |
|
"loss": 0.5976, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.22831767797470093, |
|
"learning_rate": 3.281262523515149e-05, |
|
"loss": 0.6169, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.2406102865934372, |
|
"learning_rate": 3.277472488692319e-05, |
|
"loss": 0.5938, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.24443283677101135, |
|
"learning_rate": 3.273680474653224e-05, |
|
"loss": 0.6063, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.25821536779403687, |
|
"learning_rate": 3.269886491051217e-05, |
|
"loss": 0.6056, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.2244919389486313, |
|
"learning_rate": 3.266090547544664e-05, |
|
"loss": 0.5799, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.2633070647716522, |
|
"learning_rate": 3.2622926537969205e-05, |
|
"loss": 0.6082, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.2933361530303955, |
|
"learning_rate": 3.258492819476308e-05, |
|
"loss": 0.6109, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.23176881670951843, |
|
"learning_rate": 3.2546910542560855e-05, |
|
"loss": 0.5844, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.2534427046775818, |
|
"learning_rate": 3.250887367814429e-05, |
|
"loss": 0.6032, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.23787447810173035, |
|
"learning_rate": 3.247081769834406e-05, |
|
"loss": 0.5859, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.23194755613803864, |
|
"learning_rate": 3.243274270003949e-05, |
|
"loss": 0.6103, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.24576812982559204, |
|
"learning_rate": 3.239464878015833e-05, |
|
"loss": 0.5964, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.24313139915466309, |
|
"learning_rate": 3.2356536035676485e-05, |
|
"loss": 0.6119, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.2334582507610321, |
|
"learning_rate": 3.231840456361781e-05, |
|
"loss": 0.5947, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.31086283922195435, |
|
"learning_rate": 3.2280254461053813e-05, |
|
"loss": 0.5932, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"eval_loss": 0.6343629956245422, |
|
"eval_runtime": 67.3178, |
|
"eval_samples_per_second": 29.71, |
|
"eval_steps_per_second": 0.936, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.3237190842628479, |
|
"learning_rate": 3.224208582510342e-05, |
|
"loss": 0.5944, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.22707690298557281, |
|
"learning_rate": 3.220389875293276e-05, |
|
"loss": 0.6017, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.24644169211387634, |
|
"learning_rate": 3.216569334175488e-05, |
|
"loss": 0.6058, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.2204134464263916, |
|
"learning_rate": 3.212746968882952e-05, |
|
"loss": 0.5961, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.280127614736557, |
|
"learning_rate": 3.208922789146287e-05, |
|
"loss": 0.5885, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.24572765827178955, |
|
"learning_rate": 3.205096804700729e-05, |
|
"loss": 0.6038, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.23512475192546844, |
|
"learning_rate": 3.2012690252861085e-05, |
|
"loss": 0.6248, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.2967652678489685, |
|
"learning_rate": 3.197439460646826e-05, |
|
"loss": 0.5897, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.8382120728492737, |
|
"learning_rate": 3.193608120531826e-05, |
|
"loss": 0.6157, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.26500606536865234, |
|
"learning_rate": 3.189775014694575e-05, |
|
"loss": 0.5879, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.22318434715270996, |
|
"learning_rate": 3.1859401528930325e-05, |
|
"loss": 0.6026, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.25416818261146545, |
|
"learning_rate": 3.182103544889628e-05, |
|
"loss": 0.5955, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.2655612826347351, |
|
"learning_rate": 3.1782652004512367e-05, |
|
"loss": 0.5644, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.25753074884414673, |
|
"learning_rate": 3.174425129349156e-05, |
|
"loss": 0.5813, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.4179852306842804, |
|
"learning_rate": 3.1705833413590766e-05, |
|
"loss": 0.5831, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.26823192834854126, |
|
"learning_rate": 3.16673984626106e-05, |
|
"loss": 0.609, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.23745331168174744, |
|
"learning_rate": 3.162894653839515e-05, |
|
"loss": 0.5846, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.23219923675060272, |
|
"learning_rate": 3.1590477738831704e-05, |
|
"loss": 0.6069, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.22652706503868103, |
|
"learning_rate": 3.155199216185051e-05, |
|
"loss": 0.5775, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.22930103540420532, |
|
"learning_rate": 3.151348990542452e-05, |
|
"loss": 0.5936, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.19906581938266754, |
|
"learning_rate": 3.1474971067569146e-05, |
|
"loss": 0.6061, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.24898895621299744, |
|
"learning_rate": 3.1436435746342046e-05, |
|
"loss": 0.5913, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.30313482880592346, |
|
"learning_rate": 3.139788403984279e-05, |
|
"loss": 0.5724, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.24632027745246887, |
|
"learning_rate": 3.135931604621271e-05, |
|
"loss": 0.5828, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.23896867036819458, |
|
"learning_rate": 3.1320731863634533e-05, |
|
"loss": 0.6245, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.20651116967201233, |
|
"learning_rate": 3.128213159033226e-05, |
|
"loss": 0.5987, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.24778035283088684, |
|
"learning_rate": 3.124351532457084e-05, |
|
"loss": 0.5993, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.3181811273097992, |
|
"learning_rate": 3.120488316465592e-05, |
|
"loss": 0.5981, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.33533188700675964, |
|
"learning_rate": 3.116623520893361e-05, |
|
"loss": 0.6235, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.3419838845729828, |
|
"learning_rate": 3.1127571555790234e-05, |
|
"loss": 0.6245, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.34298205375671387, |
|
"learning_rate": 3.10888923036521e-05, |
|
"loss": 0.6195, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.27872177958488464, |
|
"learning_rate": 3.105019755098519e-05, |
|
"loss": 0.5843, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.35872554779052734, |
|
"learning_rate": 3.1011487396294966e-05, |
|
"loss": 0.6106, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.29521918296813965, |
|
"learning_rate": 3.0972761938126085e-05, |
|
"loss": 0.5885, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.20505386590957642, |
|
"learning_rate": 3.0934021275062196e-05, |
|
"loss": 0.5906, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.2266906499862671, |
|
"learning_rate": 3.089526550572561e-05, |
|
"loss": 0.6021, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.3935636281967163, |
|
"learning_rate": 3.085649472877713e-05, |
|
"loss": 0.6167, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.25832274556159973, |
|
"learning_rate": 3.0817709042915744e-05, |
|
"loss": 0.6253, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.3040432333946228, |
|
"learning_rate": 3.077890854687839e-05, |
|
"loss": 0.6024, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.33817920088768005, |
|
"learning_rate": 3.074009333943974e-05, |
|
"loss": 0.5923, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.31015434861183167, |
|
"learning_rate": 3.070126351941188e-05, |
|
"loss": 0.5873, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.3499102294445038, |
|
"learning_rate": 3.0662419185644115e-05, |
|
"loss": 0.6071, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.42582130432128906, |
|
"learning_rate": 3.062356043702268e-05, |
|
"loss": 0.5789, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.22721824049949646, |
|
"learning_rate": 3.058468737247054e-05, |
|
"loss": 0.5973, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.28947141766548157, |
|
"learning_rate": 3.054580009094706e-05, |
|
"loss": 0.6056, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.2677144408226013, |
|
"learning_rate": 3.0506898691447844e-05, |
|
"loss": 0.5715, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.2382638156414032, |
|
"learning_rate": 3.0467983273004398e-05, |
|
"loss": 0.5976, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.29905447363853455, |
|
"learning_rate": 3.0429053934683928e-05, |
|
"loss": 0.6101, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.27092835307121277, |
|
"learning_rate": 3.0390110775589086e-05, |
|
"loss": 0.5924, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.2604258954524994, |
|
"learning_rate": 3.0351153894857698e-05, |
|
"loss": 0.5927, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"eval_loss": 0.633247435092926, |
|
"eval_runtime": 67.1714, |
|
"eval_samples_per_second": 29.775, |
|
"eval_steps_per_second": 0.938, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.4717111885547638, |
|
"learning_rate": 3.0312183391662523e-05, |
|
"loss": 0.6141, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.29630929231643677, |
|
"learning_rate": 3.0273199365210992e-05, |
|
"loss": 0.6167, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.2589368224143982, |
|
"learning_rate": 3.023420191474498e-05, |
|
"loss": 0.5991, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.28849098086357117, |
|
"learning_rate": 3.019519113954053e-05, |
|
"loss": 0.6362, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.2815060317516327, |
|
"learning_rate": 3.0156167138907583e-05, |
|
"loss": 0.5963, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.273151695728302, |
|
"learning_rate": 3.0117130012189775e-05, |
|
"loss": 0.5971, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.4395759403705597, |
|
"learning_rate": 3.0078079858764145e-05, |
|
"loss": 0.5809, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.2783837616443634, |
|
"learning_rate": 3.0039016778040906e-05, |
|
"loss": 0.6155, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.26403704285621643, |
|
"learning_rate": 2.9999940869463156e-05, |
|
"loss": 0.5951, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.2775236666202545, |
|
"learning_rate": 2.9960852232506677e-05, |
|
"loss": 0.5948, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.2422449290752411, |
|
"learning_rate": 2.992175096667964e-05, |
|
"loss": 0.6123, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.3329131007194519, |
|
"learning_rate": 2.988263717152236e-05, |
|
"loss": 0.621, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.2817908227443695, |
|
"learning_rate": 2.9843510946607058e-05, |
|
"loss": 0.5881, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.35110849142074585, |
|
"learning_rate": 2.9804372391537587e-05, |
|
"loss": 0.6114, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.2615245580673218, |
|
"learning_rate": 2.9765221605949212e-05, |
|
"loss": 0.5923, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.2712041139602661, |
|
"learning_rate": 2.97260586895083e-05, |
|
"loss": 0.6016, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.24361445009708405, |
|
"learning_rate": 2.9686883741912125e-05, |
|
"loss": 0.6129, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.31295427680015564, |
|
"learning_rate": 2.964769686288858e-05, |
|
"loss": 0.5826, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.31069615483283997, |
|
"learning_rate": 2.9608498152195928e-05, |
|
"loss": 0.5942, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.2735722064971924, |
|
"learning_rate": 2.9569287709622563e-05, |
|
"loss": 0.6068, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.2219727486371994, |
|
"learning_rate": 2.953006563498673e-05, |
|
"loss": 0.5923, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.2567574977874756, |
|
"learning_rate": 2.94908320281363e-05, |
|
"loss": 0.5893, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.2815946936607361, |
|
"learning_rate": 2.9451586988948492e-05, |
|
"loss": 0.5921, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.31510668992996216, |
|
"learning_rate": 2.941233061732963e-05, |
|
"loss": 0.6057, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.26856091618537903, |
|
"learning_rate": 2.9373063013214897e-05, |
|
"loss": 0.5932, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.2610081434249878, |
|
"learning_rate": 2.933378427656806e-05, |
|
"loss": 0.6147, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.607680082321167, |
|
"learning_rate": 2.9294494507381225e-05, |
|
"loss": 0.5894, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.22400206327438354, |
|
"learning_rate": 2.925519380567459e-05, |
|
"loss": 0.5905, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.23891247808933258, |
|
"learning_rate": 2.9215882271496187e-05, |
|
"loss": 0.6151, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.26922714710235596, |
|
"learning_rate": 2.9176560004921605e-05, |
|
"loss": 0.5743, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.23906663060188293, |
|
"learning_rate": 2.9137227106053773e-05, |
|
"loss": 0.5635, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.328803688287735, |
|
"learning_rate": 2.9097883675022685e-05, |
|
"loss": 0.6124, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.23414617776870728, |
|
"learning_rate": 2.9058529811985145e-05, |
|
"loss": 0.5927, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.2562233507633209, |
|
"learning_rate": 2.9019165617124515e-05, |
|
"loss": 0.5924, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.2643386125564575, |
|
"learning_rate": 2.8979791190650445e-05, |
|
"loss": 0.6075, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.2533874809741974, |
|
"learning_rate": 2.8940406632798655e-05, |
|
"loss": 0.602, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.24434837698936462, |
|
"learning_rate": 2.8901012043830637e-05, |
|
"loss": 0.6164, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.2734259068965912, |
|
"learning_rate": 2.886160752403343e-05, |
|
"loss": 0.6107, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.28115034103393555, |
|
"learning_rate": 2.8822193173719347e-05, |
|
"loss": 0.6327, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.3063926696777344, |
|
"learning_rate": 2.8782769093225727e-05, |
|
"loss": 0.6123, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.37148985266685486, |
|
"learning_rate": 2.87433353829147e-05, |
|
"loss": 0.5871, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.2425338476896286, |
|
"learning_rate": 2.8703892143172874e-05, |
|
"loss": 0.6112, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.20781515538692474, |
|
"learning_rate": 2.8664439474411142e-05, |
|
"loss": 0.6139, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.2456682324409485, |
|
"learning_rate": 2.8624977477064397e-05, |
|
"loss": 0.5967, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.42635104060173035, |
|
"learning_rate": 2.858550625159127e-05, |
|
"loss": 0.5918, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.28098204731941223, |
|
"learning_rate": 2.8546025898473898e-05, |
|
"loss": 0.6112, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.2819831073284149, |
|
"learning_rate": 2.8506536518217634e-05, |
|
"loss": 0.5838, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.2633509933948517, |
|
"learning_rate": 2.8467038211350845e-05, |
|
"loss": 0.5969, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.411672979593277, |
|
"learning_rate": 2.8427531078424585e-05, |
|
"loss": 0.6103, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.35813358426094055, |
|
"learning_rate": 2.8388015220012404e-05, |
|
"loss": 0.5883, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"eval_loss": 0.6317408680915833, |
|
"eval_runtime": 67.2696, |
|
"eval_samples_per_second": 29.731, |
|
"eval_steps_per_second": 0.937, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.30284374952316284, |
|
"learning_rate": 2.8348490736710047e-05, |
|
"loss": 0.5987, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.24432061612606049, |
|
"learning_rate": 2.8308957729135226e-05, |
|
"loss": 0.6017, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.3139924108982086, |
|
"learning_rate": 2.8269416297927354e-05, |
|
"loss": 0.5923, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.2257777899503708, |
|
"learning_rate": 2.8229866543747263e-05, |
|
"loss": 0.5962, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.27044352889060974, |
|
"learning_rate": 2.8190308567277018e-05, |
|
"loss": 0.6025, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.24292701482772827, |
|
"learning_rate": 2.8150742469219583e-05, |
|
"loss": 0.6209, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.2715989947319031, |
|
"learning_rate": 2.8111168350298605e-05, |
|
"loss": 0.584, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.27733975648880005, |
|
"learning_rate": 2.8071586311258146e-05, |
|
"loss": 0.6006, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.22047729790210724, |
|
"learning_rate": 2.8031996452862443e-05, |
|
"loss": 0.6166, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.29286694526672363, |
|
"learning_rate": 2.7992398875895626e-05, |
|
"loss": 0.5847, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.3046290874481201, |
|
"learning_rate": 2.7952793681161472e-05, |
|
"loss": 0.5896, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.35147929191589355, |
|
"learning_rate": 2.7913180969483165e-05, |
|
"loss": 0.5912, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.3445433974266052, |
|
"learning_rate": 2.787356084170301e-05, |
|
"loss": 0.5868, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.2513396143913269, |
|
"learning_rate": 2.783393339868221e-05, |
|
"loss": 0.5864, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.2578549385070801, |
|
"learning_rate": 2.7794298741300566e-05, |
|
"loss": 0.6003, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.24509668350219727, |
|
"learning_rate": 2.7754656970456276e-05, |
|
"loss": 0.5681, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.2571702003479004, |
|
"learning_rate": 2.7715008187065607e-05, |
|
"loss": 0.5718, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.21920159459114075, |
|
"learning_rate": 2.7675352492062708e-05, |
|
"loss": 0.5838, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.4092113673686981, |
|
"learning_rate": 2.7635689986399303e-05, |
|
"loss": 0.5871, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.23578539490699768, |
|
"learning_rate": 2.759602077104448e-05, |
|
"loss": 0.6008, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.3018145263195038, |
|
"learning_rate": 2.7556344946984393e-05, |
|
"loss": 0.5997, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.2498631775379181, |
|
"learning_rate": 2.7516662615222005e-05, |
|
"loss": 0.5932, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.26965656876564026, |
|
"learning_rate": 2.7476973876776873e-05, |
|
"loss": 0.5662, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.263055682182312, |
|
"learning_rate": 2.743727883268485e-05, |
|
"loss": 0.6244, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.24924887716770172, |
|
"learning_rate": 2.7397577583997837e-05, |
|
"loss": 0.6451, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.24575600028038025, |
|
"learning_rate": 2.7357870231783535e-05, |
|
"loss": 0.6254, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.23808762431144714, |
|
"learning_rate": 2.7318156877125184e-05, |
|
"loss": 0.5941, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.25935983657836914, |
|
"learning_rate": 2.7278437621121306e-05, |
|
"loss": 0.5965, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.23470312356948853, |
|
"learning_rate": 2.723871256488544e-05, |
|
"loss": 0.5981, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.282685786485672, |
|
"learning_rate": 2.719898180954589e-05, |
|
"loss": 0.6036, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.2658996284008026, |
|
"learning_rate": 2.715924545624549e-05, |
|
"loss": 0.6023, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.22930899262428284, |
|
"learning_rate": 2.711950360614129e-05, |
|
"loss": 0.5816, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.27134084701538086, |
|
"learning_rate": 2.7079756360404358e-05, |
|
"loss": 0.5873, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.23598235845565796, |
|
"learning_rate": 2.7040003820219483e-05, |
|
"loss": 0.5954, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.23500126600265503, |
|
"learning_rate": 2.700024608678494e-05, |
|
"loss": 0.5989, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.21222622692584991, |
|
"learning_rate": 2.696048326131223e-05, |
|
"loss": 0.5756, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.27849477529525757, |
|
"learning_rate": 2.6920715445025814e-05, |
|
"loss": 0.6158, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.26395028829574585, |
|
"learning_rate": 2.6880942739162844e-05, |
|
"loss": 0.5845, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.27106571197509766, |
|
"learning_rate": 2.6841165244972937e-05, |
|
"loss": 0.5866, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.2842698395252228, |
|
"learning_rate": 2.68013830637179e-05, |
|
"loss": 0.5813, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.3903340995311737, |
|
"learning_rate": 2.6761596296671444e-05, |
|
"loss": 0.5909, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.21686884760856628, |
|
"learning_rate": 2.6721805045118985e-05, |
|
"loss": 0.6051, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.2636258602142334, |
|
"learning_rate": 2.6682009410357343e-05, |
|
"loss": 0.6066, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.27400442957878113, |
|
"learning_rate": 2.664220949369451e-05, |
|
"loss": 0.6107, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.2363210916519165, |
|
"learning_rate": 2.6602405396449354e-05, |
|
"loss": 0.6099, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.2572976350784302, |
|
"learning_rate": 2.65625972199514e-05, |
|
"loss": 0.6098, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.24827510118484497, |
|
"learning_rate": 2.652278506554057e-05, |
|
"loss": 0.599, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.25139114260673523, |
|
"learning_rate": 2.6482969034566875e-05, |
|
"loss": 0.5938, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.2517510950565338, |
|
"learning_rate": 2.644314922839023e-05, |
|
"loss": 0.6115, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.2278544306755066, |
|
"learning_rate": 2.640332574838015e-05, |
|
"loss": 0.6023, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"eval_loss": 0.6308007836341858, |
|
"eval_runtime": 67.2785, |
|
"eval_samples_per_second": 29.727, |
|
"eval_steps_per_second": 0.936, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.19135211408138275, |
|
"learning_rate": 2.63634986959155e-05, |
|
"loss": 0.5877, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.25551867485046387, |
|
"learning_rate": 2.632366817238424e-05, |
|
"loss": 0.6095, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.21738716959953308, |
|
"learning_rate": 2.628383427918317e-05, |
|
"loss": 0.5754, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.261692613363266, |
|
"learning_rate": 2.624399711771766e-05, |
|
"loss": 0.6184, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.26923197507858276, |
|
"learning_rate": 2.6204156789401407e-05, |
|
"loss": 0.5911, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.27664855122566223, |
|
"learning_rate": 2.616431339565617e-05, |
|
"loss": 0.5788, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.24067947268486023, |
|
"learning_rate": 2.6124467037911504e-05, |
|
"loss": 0.6112, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.2814284563064575, |
|
"learning_rate": 2.6084617817604516e-05, |
|
"loss": 0.6022, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.2756374776363373, |
|
"learning_rate": 2.6044765836179602e-05, |
|
"loss": 0.6149, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.25635477900505066, |
|
"learning_rate": 2.6004911195088187e-05, |
|
"loss": 0.5833, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.2209189385175705, |
|
"learning_rate": 2.5965053995788468e-05, |
|
"loss": 0.596, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.7075193524360657, |
|
"learning_rate": 2.5925194339745136e-05, |
|
"loss": 0.5932, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.28784462809562683, |
|
"learning_rate": 2.5885332328429163e-05, |
|
"loss": 0.6118, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.26419249176979065, |
|
"learning_rate": 2.5845468063317496e-05, |
|
"loss": 0.618, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.23297163844108582, |
|
"learning_rate": 2.580560164589284e-05, |
|
"loss": 0.5814, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.2898586094379425, |
|
"learning_rate": 2.5765733177643355e-05, |
|
"loss": 0.5962, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.2931255102157593, |
|
"learning_rate": 2.5725862760062437e-05, |
|
"loss": 0.6289, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.27760767936706543, |
|
"learning_rate": 2.5685990494648444e-05, |
|
"loss": 0.5976, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.2815667688846588, |
|
"learning_rate": 2.5646116482904432e-05, |
|
"loss": 0.5649, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.22442054748535156, |
|
"learning_rate": 2.5606240826337908e-05, |
|
"loss": 0.5873, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.26230359077453613, |
|
"learning_rate": 2.5566363626460565e-05, |
|
"loss": 0.5881, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.3227124512195587, |
|
"learning_rate": 2.5526484984788023e-05, |
|
"loss": 0.5871, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.2664228677749634, |
|
"learning_rate": 2.5486605002839574e-05, |
|
"loss": 0.6118, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.3071444034576416, |
|
"learning_rate": 2.5446723782137926e-05, |
|
"loss": 0.6102, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.3024210035800934, |
|
"learning_rate": 2.540684142420892e-05, |
|
"loss": 0.6361, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.2285085916519165, |
|
"learning_rate": 2.536695803058133e-05, |
|
"loss": 0.5904, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.2410888373851776, |
|
"learning_rate": 2.5327073702786536e-05, |
|
"loss": 0.6251, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.27157413959503174, |
|
"learning_rate": 2.5287188542358298e-05, |
|
"loss": 0.6109, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.22924353182315826, |
|
"learning_rate": 2.524730265083251e-05, |
|
"loss": 0.614, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.26990529894828796, |
|
"learning_rate": 2.5207416129746913e-05, |
|
"loss": 0.6039, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.2284127026796341, |
|
"learning_rate": 2.5167529080640862e-05, |
|
"loss": 0.584, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.23105378448963165, |
|
"learning_rate": 2.512764160505504e-05, |
|
"loss": 0.6111, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.2851148247718811, |
|
"learning_rate": 2.508775380453125e-05, |
|
"loss": 0.5742, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.2170216143131256, |
|
"learning_rate": 2.5047865780612078e-05, |
|
"loss": 0.6303, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.3306771516799927, |
|
"learning_rate": 2.5007977634840713e-05, |
|
"loss": 0.6104, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.27079054713249207, |
|
"learning_rate": 2.4968089468760643e-05, |
|
"loss": 0.5814, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.3222564160823822, |
|
"learning_rate": 2.4928201383915392e-05, |
|
"loss": 0.601, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.21210065484046936, |
|
"learning_rate": 2.4888313481848298e-05, |
|
"loss": 0.5942, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.20585991442203522, |
|
"learning_rate": 2.484842586410222e-05, |
|
"loss": 0.588, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.22469113767147064, |
|
"learning_rate": 2.4808538632219316e-05, |
|
"loss": 0.6187, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.28604716062545776, |
|
"learning_rate": 2.4768651887740736e-05, |
|
"loss": 0.6074, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.2785724699497223, |
|
"learning_rate": 2.4728765732206393e-05, |
|
"loss": 0.6013, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.25105738639831543, |
|
"learning_rate": 2.4688880267154722e-05, |
|
"loss": 0.6189, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.25772786140441895, |
|
"learning_rate": 2.4648995594122367e-05, |
|
"loss": 0.6039, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.2568160593509674, |
|
"learning_rate": 2.4609111814643988e-05, |
|
"loss": 0.6091, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.2306566685438156, |
|
"learning_rate": 2.456922903025195e-05, |
|
"loss": 0.594, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.256623774766922, |
|
"learning_rate": 2.4529347342476082e-05, |
|
"loss": 0.5884, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.30249103903770447, |
|
"learning_rate": 2.4489466852843444e-05, |
|
"loss": 0.5972, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.31607022881507874, |
|
"learning_rate": 2.4449587662878032e-05, |
|
"loss": 0.572, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.3034932613372803, |
|
"learning_rate": 2.4409709874100524e-05, |
|
"loss": 0.5898, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"eval_loss": 0.6311084032058716, |
|
"eval_runtime": 67.1728, |
|
"eval_samples_per_second": 29.774, |
|
"eval_steps_per_second": 0.938, |
|
"step": 11000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 20690, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 2.378226272582815e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|