|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 97.3913043478261, |
|
"eval_steps": 500, |
|
"global_step": 2800, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.34782608695652173, |
|
"grad_norm": 0.14778953790664673, |
|
"learning_rate": 0.00019999370567547008, |
|
"loss": 0.5359, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.6956521739130435, |
|
"grad_norm": 0.15012609958648682, |
|
"learning_rate": 0.00019997482349425066, |
|
"loss": 0.5405, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 1.0434782608695652, |
|
"grad_norm": 0.14203248918056488, |
|
"learning_rate": 0.00019994335583335335, |
|
"loss": 0.5344, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 1.391304347826087, |
|
"grad_norm": 0.19145642220973969, |
|
"learning_rate": 0.00019989930665413147, |
|
"loss": 0.5738, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.7391304347826086, |
|
"grad_norm": 0.17617826163768768, |
|
"learning_rate": 0.00019984268150178167, |
|
"loss": 0.5102, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 2.0869565217391304, |
|
"grad_norm": 0.23997928202152252, |
|
"learning_rate": 0.0001997734875046456, |
|
"loss": 0.5288, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 2.4347826086956523, |
|
"grad_norm": 0.30887937545776367, |
|
"learning_rate": 0.0001996917333733128, |
|
"loss": 0.4797, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.782608695652174, |
|
"grad_norm": 0.3976322412490845, |
|
"learning_rate": 0.00019959742939952392, |
|
"loss": 0.5502, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 3.130434782608696, |
|
"grad_norm": 0.32993584871292114, |
|
"learning_rate": 0.00019949058745487522, |
|
"loss": 0.494, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 3.4782608695652173, |
|
"grad_norm": 0.5071695446968079, |
|
"learning_rate": 0.00019937122098932428, |
|
"loss": 0.517, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.8260869565217392, |
|
"grad_norm": 0.4845665991306305, |
|
"learning_rate": 0.00019923934502949644, |
|
"loss": 0.4654, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 4.173913043478261, |
|
"grad_norm": 0.6004394888877869, |
|
"learning_rate": 0.00019909497617679348, |
|
"loss": 0.4522, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 4.521739130434782, |
|
"grad_norm": 0.6237146258354187, |
|
"learning_rate": 0.00019893813260530368, |
|
"loss": 0.4527, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 4.869565217391305, |
|
"grad_norm": 0.7019842267036438, |
|
"learning_rate": 0.00019876883405951377, |
|
"loss": 0.4402, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 5.217391304347826, |
|
"grad_norm": 0.7478927373886108, |
|
"learning_rate": 0.0001985871018518236, |
|
"loss": 0.4429, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 5.565217391304348, |
|
"grad_norm": 1.045157790184021, |
|
"learning_rate": 0.00019839295885986296, |
|
"loss": 0.3951, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 5.913043478260869, |
|
"grad_norm": 0.8758353590965271, |
|
"learning_rate": 0.00019818642952361187, |
|
"loss": 0.4242, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 6.260869565217392, |
|
"grad_norm": 0.8154057264328003, |
|
"learning_rate": 0.00019796753984232358, |
|
"loss": 0.4026, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 6.608695652173913, |
|
"grad_norm": 0.9479079246520996, |
|
"learning_rate": 0.00019773631737125192, |
|
"loss": 0.3915, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 6.956521739130435, |
|
"grad_norm": 0.9628075957298279, |
|
"learning_rate": 0.00019749279121818235, |
|
"loss": 0.3919, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 7.304347826086957, |
|
"grad_norm": 0.9826076626777649, |
|
"learning_rate": 0.00019723699203976766, |
|
"loss": 0.3807, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 7.6521739130434785, |
|
"grad_norm": 0.9666227102279663, |
|
"learning_rate": 0.0001969689520376687, |
|
"loss": 0.3632, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 1.1111929416656494, |
|
"learning_rate": 0.00019668870495450066, |
|
"loss": 0.3697, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 8.347826086956522, |
|
"grad_norm": 1.2887296676635742, |
|
"learning_rate": 0.00019639628606958533, |
|
"loss": 0.3355, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 8.695652173913043, |
|
"grad_norm": 1.0423779487609863, |
|
"learning_rate": 0.00019609173219450998, |
|
"loss": 0.3564, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 9.043478260869565, |
|
"grad_norm": 1.165279746055603, |
|
"learning_rate": 0.00019577508166849304, |
|
"loss": 0.3651, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 9.391304347826088, |
|
"grad_norm": 0.8437550067901611, |
|
"learning_rate": 0.00019544637435355808, |
|
"loss": 0.3241, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 9.73913043478261, |
|
"grad_norm": 1.1493055820465088, |
|
"learning_rate": 0.00019510565162951537, |
|
"loss": 0.3374, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 10.08695652173913, |
|
"grad_norm": 1.084230661392212, |
|
"learning_rate": 0.0001947529563887529, |
|
"loss": 0.3315, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 10.434782608695652, |
|
"grad_norm": 0.9703076481819153, |
|
"learning_rate": 0.00019438833303083678, |
|
"loss": 0.2926, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 10.782608695652174, |
|
"grad_norm": 1.3537240028381348, |
|
"learning_rate": 0.0001940118274569219, |
|
"loss": 0.3258, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 11.130434782608695, |
|
"grad_norm": 1.3062912225723267, |
|
"learning_rate": 0.00019362348706397373, |
|
"loss": 0.3321, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 11.478260869565217, |
|
"grad_norm": 1.2595503330230713, |
|
"learning_rate": 0.00019322336073880142, |
|
"loss": 0.3005, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 11.826086956521738, |
|
"grad_norm": 0.9087198376655579, |
|
"learning_rate": 0.0001928114988519039, |
|
"loss": 0.2949, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 12.173913043478262, |
|
"grad_norm": 0.9392287135124207, |
|
"learning_rate": 0.0001923879532511287, |
|
"loss": 0.2837, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 12.521739130434783, |
|
"grad_norm": 1.3128340244293213, |
|
"learning_rate": 0.0001919527772551451, |
|
"loss": 0.3006, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 12.869565217391305, |
|
"grad_norm": 1.3989088535308838, |
|
"learning_rate": 0.00019150602564673198, |
|
"loss": 0.3049, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 13.217391304347826, |
|
"grad_norm": 1.1957728862762451, |
|
"learning_rate": 0.00019104775466588161, |
|
"loss": 0.2726, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 13.565217391304348, |
|
"grad_norm": 1.3441591262817383, |
|
"learning_rate": 0.00019057802200271942, |
|
"loss": 0.2832, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 13.91304347826087, |
|
"grad_norm": 1.244665265083313, |
|
"learning_rate": 0.0001900968867902419, |
|
"loss": 0.2884, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 14.26086956521739, |
|
"grad_norm": 0.9919373989105225, |
|
"learning_rate": 0.00018960440959687254, |
|
"loss": 0.2537, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 14.608695652173914, |
|
"grad_norm": 1.333085298538208, |
|
"learning_rate": 0.0001891006524188368, |
|
"loss": 0.2825, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 14.956521739130435, |
|
"grad_norm": 1.2832859754562378, |
|
"learning_rate": 0.000188585678672358, |
|
"loss": 0.2792, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 15.304347826086957, |
|
"grad_norm": 0.9661863446235657, |
|
"learning_rate": 0.0001880595531856738, |
|
"loss": 0.255, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 15.652173913043478, |
|
"grad_norm": 1.5152740478515625, |
|
"learning_rate": 0.00018752234219087538, |
|
"loss": 0.2693, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"grad_norm": 1.2663978338241577, |
|
"learning_rate": 0.00018697411331556956, |
|
"loss": 0.2627, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 16.347826086956523, |
|
"grad_norm": 1.4080078601837158, |
|
"learning_rate": 0.0001864149355743655, |
|
"loss": 0.2587, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 16.695652173913043, |
|
"grad_norm": 1.4147775173187256, |
|
"learning_rate": 0.00018584487936018661, |
|
"loss": 0.255, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 17.043478260869566, |
|
"grad_norm": 1.1311839818954468, |
|
"learning_rate": 0.00018526401643540922, |
|
"loss": 0.2579, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 17.391304347826086, |
|
"grad_norm": 1.230575680732727, |
|
"learning_rate": 0.00018467241992282843, |
|
"loss": 0.241, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 17.73913043478261, |
|
"grad_norm": 1.2601960897445679, |
|
"learning_rate": 0.00018407016429645303, |
|
"loss": 0.2398, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 18.08695652173913, |
|
"grad_norm": 0.9139521718025208, |
|
"learning_rate": 0.00018345732537213027, |
|
"loss": 0.251, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 18.434782608695652, |
|
"grad_norm": 1.2161707878112793, |
|
"learning_rate": 0.00018283398029800166, |
|
"loss": 0.2453, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 18.782608695652176, |
|
"grad_norm": 1.2001985311508179, |
|
"learning_rate": 0.00018220020754479102, |
|
"loss": 0.2396, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 19.130434782608695, |
|
"grad_norm": 1.3439651727676392, |
|
"learning_rate": 0.00018155608689592604, |
|
"loss": 0.2415, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 19.47826086956522, |
|
"grad_norm": 1.2576191425323486, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 0.2343, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 19.82608695652174, |
|
"grad_norm": 1.5092381238937378, |
|
"learning_rate": 0.0001802371275480378, |
|
"loss": 0.2338, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 20.17391304347826, |
|
"grad_norm": 1.624345302581787, |
|
"learning_rate": 0.00017956245488817812, |
|
"loss": 0.2256, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 20.52173913043478, |
|
"grad_norm": 1.2802064418792725, |
|
"learning_rate": 0.00017887776639008914, |
|
"loss": 0.2315, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 20.869565217391305, |
|
"grad_norm": 1.1998881101608276, |
|
"learning_rate": 0.000178183148246803, |
|
"loss": 0.2315, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 21.217391304347824, |
|
"grad_norm": 1.1652300357818604, |
|
"learning_rate": 0.0001774786879013601, |
|
"loss": 0.2216, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 21.565217391304348, |
|
"grad_norm": 1.262691617012024, |
|
"learning_rate": 0.00017683633177790272, |
|
"loss": 0.2244, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 21.91304347826087, |
|
"grad_norm": 1.546859860420227, |
|
"learning_rate": 0.00017611341658242395, |
|
"loss": 0.2223, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 22.26086956521739, |
|
"grad_norm": 1.4874248504638672, |
|
"learning_rate": 0.00017538091973604415, |
|
"loss": 0.2187, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 22.608695652173914, |
|
"grad_norm": 1.5900665521621704, |
|
"learning_rate": 0.00017463893345022067, |
|
"loss": 0.2229, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 22.956521739130434, |
|
"grad_norm": 1.3051114082336426, |
|
"learning_rate": 0.0001738875511310031, |
|
"loss": 0.2242, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 23.304347826086957, |
|
"grad_norm": 1.0650899410247803, |
|
"learning_rate": 0.0001731268673672747, |
|
"loss": 0.216, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 23.652173913043477, |
|
"grad_norm": 1.1210111379623413, |
|
"learning_rate": 0.00017235697791884494, |
|
"loss": 0.214, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"grad_norm": 1.0874310731887817, |
|
"learning_rate": 0.00017157797970439464, |
|
"loss": 0.2244, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 24.347826086956523, |
|
"grad_norm": 1.6198880672454834, |
|
"learning_rate": 0.00017078997078927521, |
|
"loss": 0.2093, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 24.695652173913043, |
|
"grad_norm": 1.6299738883972168, |
|
"learning_rate": 0.00016999305037316352, |
|
"loss": 0.2164, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 25.043478260869566, |
|
"grad_norm": 1.2034841775894165, |
|
"learning_rate": 0.00016918731877757406, |
|
"loss": 0.2078, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 25.391304347826086, |
|
"grad_norm": 1.052951693534851, |
|
"learning_rate": 0.00016837287743322975, |
|
"loss": 0.2079, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 25.73913043478261, |
|
"grad_norm": 1.6947507858276367, |
|
"learning_rate": 0.00016754982886729318, |
|
"loss": 0.2054, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 26.08695652173913, |
|
"grad_norm": 1.0883299112319946, |
|
"learning_rate": 0.00016680181159555013, |
|
"loss": 0.2056, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 26.434782608695652, |
|
"grad_norm": 1.4576375484466553, |
|
"learning_rate": 0.0001659626956410861, |
|
"loss": 0.2074, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 26.782608695652176, |
|
"grad_norm": 1.139978289604187, |
|
"learning_rate": 0.00016511527587435737, |
|
"loss": 0.1993, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 27.130434782608695, |
|
"grad_norm": 1.2391374111175537, |
|
"learning_rate": 0.0001642596589740644, |
|
"loss": 0.2084, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 27.47826086956522, |
|
"grad_norm": 1.4511348009109497, |
|
"learning_rate": 0.00016348268421631797, |
|
"loss": 0.2087, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 27.82608695652174, |
|
"grad_norm": 1.458491325378418, |
|
"learning_rate": 0.00016261179034692826, |
|
"loss": 0.1976, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 28.17391304347826, |
|
"grad_norm": 1.1205899715423584, |
|
"learning_rate": 0.00016173301449898166, |
|
"loss": 0.1933, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 28.52173913043478, |
|
"grad_norm": 1.3411377668380737, |
|
"learning_rate": 0.00016084646729848568, |
|
"loss": 0.1986, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 28.869565217391305, |
|
"grad_norm": 1.4019867181777954, |
|
"learning_rate": 0.00015995226034975617, |
|
"loss": 0.206, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 29.217391304347824, |
|
"grad_norm": 1.2646626234054565, |
|
"learning_rate": 0.00015905050622136777, |
|
"loss": 0.2006, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 29.565217391304348, |
|
"grad_norm": 1.4075453281402588, |
|
"learning_rate": 0.00015814131843198308, |
|
"loss": 0.195, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 29.91304347826087, |
|
"grad_norm": 1.1889333724975586, |
|
"learning_rate": 0.0001572248114360622, |
|
"loss": 0.1932, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 30.26086956521739, |
|
"grad_norm": 0.9080250859260559, |
|
"learning_rate": 0.0001563011006094544, |
|
"loss": 0.1872, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 30.608695652173914, |
|
"grad_norm": 1.183297872543335, |
|
"learning_rate": 0.0001553703022348741, |
|
"loss": 0.1918, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 30.956521739130434, |
|
"grad_norm": 1.1751201152801514, |
|
"learning_rate": 0.00015443253348726202, |
|
"loss": 0.1983, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 31.304347826086957, |
|
"grad_norm": 0.909827709197998, |
|
"learning_rate": 0.0001534879124190348, |
|
"loss": 0.186, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 31.652173913043477, |
|
"grad_norm": 1.1720420122146606, |
|
"learning_rate": 0.00015253655794522368, |
|
"loss": 0.1853, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"grad_norm": 1.4185489416122437, |
|
"learning_rate": 0.00015157858982850475, |
|
"loss": 0.1995, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 32.34782608695652, |
|
"grad_norm": 1.5267356634140015, |
|
"learning_rate": 0.00015061412866412225, |
|
"loss": 0.1829, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 32.69565217391305, |
|
"grad_norm": 1.2880375385284424, |
|
"learning_rate": 0.00014964329586470752, |
|
"loss": 0.1909, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 33.04347826086956, |
|
"grad_norm": 1.1732306480407715, |
|
"learning_rate": 0.0001486662136449946, |
|
"loss": 0.1868, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 33.391304347826086, |
|
"grad_norm": 1.338199496269226, |
|
"learning_rate": 0.00014768300500643517, |
|
"loss": 0.1896, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 33.73913043478261, |
|
"grad_norm": 1.0183218717575073, |
|
"learning_rate": 0.0001466937937217143, |
|
"loss": 0.184, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 34.08695652173913, |
|
"grad_norm": 1.0611428022384644, |
|
"learning_rate": 0.00014569870431916903, |
|
"loss": 0.1803, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 34.43478260869565, |
|
"grad_norm": 1.1321274042129517, |
|
"learning_rate": 0.00014469786206711214, |
|
"loss": 0.1766, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 34.78260869565217, |
|
"grad_norm": 1.025775671005249, |
|
"learning_rate": 0.0001436913929580623, |
|
"loss": 0.186, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 35.130434782608695, |
|
"grad_norm": 1.3901698589324951, |
|
"learning_rate": 0.00014267942369288364, |
|
"loss": 0.1872, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 35.47826086956522, |
|
"grad_norm": 1.1713916063308716, |
|
"learning_rate": 0.00014166208166483548, |
|
"loss": 0.179, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 35.82608695652174, |
|
"grad_norm": 1.1632108688354492, |
|
"learning_rate": 0.0001406394949435355, |
|
"loss": 0.1849, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 36.17391304347826, |
|
"grad_norm": 0.9232921600341797, |
|
"learning_rate": 0.00013961179225883735, |
|
"loss": 0.1837, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 36.52173913043478, |
|
"grad_norm": 1.1833399534225464, |
|
"learning_rate": 0.00013857910298462544, |
|
"loss": 0.1681, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 36.869565217391305, |
|
"grad_norm": 1.3527874946594238, |
|
"learning_rate": 0.00013754155712252832, |
|
"loss": 0.1906, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 37.21739130434783, |
|
"grad_norm": 1.2061622142791748, |
|
"learning_rate": 0.00013649928528555342, |
|
"loss": 0.1692, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 37.56521739130435, |
|
"grad_norm": 1.4383926391601562, |
|
"learning_rate": 0.00013545241868164457, |
|
"loss": 0.1767, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 37.91304347826087, |
|
"grad_norm": 1.1512194871902466, |
|
"learning_rate": 0.00013440108909716468, |
|
"loss": 0.1798, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 38.26086956521739, |
|
"grad_norm": 1.2778269052505493, |
|
"learning_rate": 0.00013334542888030553, |
|
"loss": 0.1779, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 38.608695652173914, |
|
"grad_norm": 1.2878704071044922, |
|
"learning_rate": 0.00013228557092442712, |
|
"loss": 0.1734, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 38.95652173913044, |
|
"grad_norm": 1.2846708297729492, |
|
"learning_rate": 0.00013122164865132807, |
|
"loss": 0.1743, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 39.30434782608695, |
|
"grad_norm": 1.3258745670318604, |
|
"learning_rate": 0.00013015379599444957, |
|
"loss": 0.1687, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 39.65217391304348, |
|
"grad_norm": 1.3389649391174316, |
|
"learning_rate": 0.00012908214738201513, |
|
"loss": 0.1698, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"grad_norm": 0.9111236929893494, |
|
"learning_rate": 0.00012800683772010767, |
|
"loss": 0.1828, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 40.34782608695652, |
|
"grad_norm": 1.401234745979309, |
|
"learning_rate": 0.00012692800237568686, |
|
"loss": 0.1676, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 40.69565217391305, |
|
"grad_norm": 1.0602619647979736, |
|
"learning_rate": 0.00012584577715954814, |
|
"loss": 0.1737, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 41.04347826086956, |
|
"grad_norm": 1.310141921043396, |
|
"learning_rate": 0.000124760298309226, |
|
"loss": 0.1751, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 41.391304347826086, |
|
"grad_norm": 0.8857985138893127, |
|
"learning_rate": 0.00012367170247184355, |
|
"loss": 0.1629, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 41.73913043478261, |
|
"grad_norm": 1.1576647758483887, |
|
"learning_rate": 0.0001225801266869104, |
|
"loss": 0.1672, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 42.08695652173913, |
|
"grad_norm": 1.0252318382263184, |
|
"learning_rate": 0.00012148570836907133, |
|
"loss": 0.1778, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 42.43478260869565, |
|
"grad_norm": 1.6137018203735352, |
|
"learning_rate": 0.00012038858529080768, |
|
"loss": 0.1632, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 42.78260869565217, |
|
"grad_norm": 1.087752342224121, |
|
"learning_rate": 0.0001192888955650935, |
|
"loss": 0.167, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 43.130434782608695, |
|
"grad_norm": 0.9813684821128845, |
|
"learning_rate": 0.00011818677762800909, |
|
"loss": 0.1711, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 43.47826086956522, |
|
"grad_norm": 1.5694411993026733, |
|
"learning_rate": 0.00011708237022131381, |
|
"loss": 0.166, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 43.82608695652174, |
|
"grad_norm": 1.595716953277588, |
|
"learning_rate": 0.0001159758123749802, |
|
"loss": 0.1676, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 44.17391304347826, |
|
"grad_norm": 1.0079954862594604, |
|
"learning_rate": 0.00011486724338969232, |
|
"loss": 0.1673, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 44.52173913043478, |
|
"grad_norm": 1.288737177848816, |
|
"learning_rate": 0.00011375680281930919, |
|
"loss": 0.1629, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 44.869565217391305, |
|
"grad_norm": 1.429558277130127, |
|
"learning_rate": 0.00011264463045329728, |
|
"loss": 0.1681, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 45.21739130434783, |
|
"grad_norm": 1.1483312845230103, |
|
"learning_rate": 0.00011153086629913267, |
|
"loss": 0.1585, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 45.56521739130435, |
|
"grad_norm": 0.9982392191886902, |
|
"learning_rate": 0.00011041565056467614, |
|
"loss": 0.1621, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 45.91304347826087, |
|
"grad_norm": 1.022334337234497, |
|
"learning_rate": 0.00010929912364052268, |
|
"loss": 0.1671, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 46.26086956521739, |
|
"grad_norm": 1.0012588500976562, |
|
"learning_rate": 0.00010818142608232849, |
|
"loss": 0.1617, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 46.608695652173914, |
|
"grad_norm": 1.2156851291656494, |
|
"learning_rate": 0.00010706269859311669, |
|
"loss": 0.1576, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 46.95652173913044, |
|
"grad_norm": 1.0152043104171753, |
|
"learning_rate": 0.00010594308200556483, |
|
"loss": 0.1701, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 47.30434782608695, |
|
"grad_norm": 1.2514891624450684, |
|
"learning_rate": 0.00010482271726427598, |
|
"loss": 0.1551, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 47.65217391304348, |
|
"grad_norm": 1.1592861413955688, |
|
"learning_rate": 0.00010370174540803556, |
|
"loss": 0.1649, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"grad_norm": 1.2036534547805786, |
|
"learning_rate": 0.00010258030755205665, |
|
"loss": 0.1627, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 48.34782608695652, |
|
"grad_norm": 1.158408284187317, |
|
"learning_rate": 0.00010145854487021532, |
|
"loss": 0.1528, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 48.69565217391305, |
|
"grad_norm": 1.2595423460006714, |
|
"learning_rate": 0.00010033659857727893, |
|
"loss": 0.1647, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 49.04347826086956, |
|
"grad_norm": 0.9286451935768127, |
|
"learning_rate": 9.921460991112891e-05, |
|
"loss": 0.1594, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 49.391304347826086, |
|
"grad_norm": 1.1650205850601196, |
|
"learning_rate": 9.809272011498085e-05, |
|
"loss": 0.1571, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 49.73913043478261, |
|
"grad_norm": 1.3685368299484253, |
|
"learning_rate": 9.697107041960417e-05, |
|
"loss": 0.1556, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 50.08695652173913, |
|
"grad_norm": 1.0636142492294312, |
|
"learning_rate": 9.584980202554266e-05, |
|
"loss": 0.1592, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 50.43478260869565, |
|
"grad_norm": 1.2848167419433594, |
|
"learning_rate": 9.472905608533941e-05, |
|
"loss": 0.1525, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 50.78260869565217, |
|
"grad_norm": 1.0434962511062622, |
|
"learning_rate": 9.360897368576772e-05, |
|
"loss": 0.1577, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 51.130434782608695, |
|
"grad_norm": 0.8662859797477722, |
|
"learning_rate": 9.248969583007005e-05, |
|
"loss": 0.1577, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 51.47826086956522, |
|
"grad_norm": 1.0361707210540771, |
|
"learning_rate": 9.137136342020768e-05, |
|
"loss": 0.1545, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 51.82608695652174, |
|
"grad_norm": 1.1432064771652222, |
|
"learning_rate": 9.025411723912298e-05, |
|
"loss": 0.1561, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 52.17391304347826, |
|
"grad_norm": 1.0413988828659058, |
|
"learning_rate": 8.913809793301681e-05, |
|
"loss": 0.1528, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 52.52173913043478, |
|
"grad_norm": 1.250156044960022, |
|
"learning_rate": 8.802344599364312e-05, |
|
"loss": 0.1562, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 52.869565217391305, |
|
"grad_norm": 1.0235179662704468, |
|
"learning_rate": 8.691030174062273e-05, |
|
"loss": 0.1522, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 53.21739130434783, |
|
"grad_norm": 1.0572556257247925, |
|
"learning_rate": 8.579880530377926e-05, |
|
"loss": 0.151, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 53.56521739130435, |
|
"grad_norm": 1.239871621131897, |
|
"learning_rate": 8.468909660549837e-05, |
|
"loss": 0.1532, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 53.91304347826087, |
|
"grad_norm": 1.0449697971343994, |
|
"learning_rate": 8.358131534311372e-05, |
|
"loss": 0.1573, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 54.26086956521739, |
|
"grad_norm": 1.0971099138259888, |
|
"learning_rate": 8.247560097132076e-05, |
|
"loss": 0.1503, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 54.608695652173914, |
|
"grad_norm": 1.1196969747543335, |
|
"learning_rate": 8.13720926846214e-05, |
|
"loss": 0.1518, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 54.95652173913044, |
|
"grad_norm": 1.1394826173782349, |
|
"learning_rate": 8.027092939980115e-05, |
|
"loss": 0.1548, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 55.30434782608695, |
|
"grad_norm": 1.3418184518814087, |
|
"learning_rate": 7.917224973844152e-05, |
|
"loss": 0.1498, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 55.65217391304348, |
|
"grad_norm": 0.8179703950881958, |
|
"learning_rate": 7.807619200946942e-05, |
|
"loss": 0.1506, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 56.0, |
|
"grad_norm": 1.1934970617294312, |
|
"learning_rate": 7.69828941917458e-05, |
|
"loss": 0.155, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 56.34782608695652, |
|
"grad_norm": 1.0389273166656494, |
|
"learning_rate": 7.589249391669616e-05, |
|
"loss": 0.1493, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 56.69565217391305, |
|
"grad_norm": 0.9254215359687805, |
|
"learning_rate": 7.48051284509844e-05, |
|
"loss": 0.151, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 57.04347826086956, |
|
"grad_norm": 0.7930333614349365, |
|
"learning_rate": 7.372093467923302e-05, |
|
"loss": 0.1515, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 57.391304347826086, |
|
"grad_norm": 1.052694320678711, |
|
"learning_rate": 7.264004908679112e-05, |
|
"loss": 0.1445, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 57.73913043478261, |
|
"grad_norm": 1.1571382284164429, |
|
"learning_rate": 7.156260774255262e-05, |
|
"loss": 0.1514, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 58.08695652173913, |
|
"grad_norm": 0.9378436207771301, |
|
"learning_rate": 7.048874628182722e-05, |
|
"loss": 0.1529, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 58.43478260869565, |
|
"grad_norm": 1.0736242532730103, |
|
"learning_rate": 6.94185998892655e-05, |
|
"loss": 0.148, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 58.78260869565217, |
|
"grad_norm": 1.4897611141204834, |
|
"learning_rate": 6.835230328184138e-05, |
|
"loss": 0.1507, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 59.130434782608695, |
|
"grad_norm": 0.9510363936424255, |
|
"learning_rate": 6.728999069189263e-05, |
|
"loss": 0.1463, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 59.47826086956522, |
|
"grad_norm": 1.2587374448776245, |
|
"learning_rate": 6.623179585022316e-05, |
|
"loss": 0.1439, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 59.82608695652174, |
|
"grad_norm": 1.156571626663208, |
|
"learning_rate": 6.517785196926792e-05, |
|
"loss": 0.1499, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 60.17391304347826, |
|
"grad_norm": 0.9936209917068481, |
|
"learning_rate": 6.412829172632338e-05, |
|
"loss": 0.1508, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 60.52173913043478, |
|
"grad_norm": 1.0916430950164795, |
|
"learning_rate": 6.308324724684518e-05, |
|
"loss": 0.1467, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 60.869565217391305, |
|
"grad_norm": 1.1473065614700317, |
|
"learning_rate": 6.20428500878154e-05, |
|
"loss": 0.1482, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 61.21739130434783, |
|
"grad_norm": 0.9215027093887329, |
|
"learning_rate": 6.1007231221181206e-05, |
|
"loss": 0.144, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 61.56521739130435, |
|
"grad_norm": 1.0244927406311035, |
|
"learning_rate": 5.997652101736726e-05, |
|
"loss": 0.1437, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 61.91304347826087, |
|
"grad_norm": 1.461363673210144, |
|
"learning_rate": 5.8950849228864025e-05, |
|
"loss": 0.1536, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 62.26086956521739, |
|
"grad_norm": 1.039952039718628, |
|
"learning_rate": 5.793034497389345e-05, |
|
"loss": 0.1428, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 62.608695652173914, |
|
"grad_norm": 1.1120175123214722, |
|
"learning_rate": 5.69151367201548e-05, |
|
"loss": 0.149, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 62.95652173913044, |
|
"grad_norm": 0.9631905555725098, |
|
"learning_rate": 5.590535226865238e-05, |
|
"loss": 0.15, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 63.30434782608695, |
|
"grad_norm": 0.892376184463501, |
|
"learning_rate": 5.490111873760705e-05, |
|
"loss": 0.1456, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 63.65217391304348, |
|
"grad_norm": 0.7706930637359619, |
|
"learning_rate": 5.390256254645378e-05, |
|
"loss": 0.1459, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 64.0, |
|
"grad_norm": 1.2269357442855835, |
|
"learning_rate": 5.290980939992713e-05, |
|
"loss": 0.146, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 64.34782608695652, |
|
"grad_norm": 0.9604114294052124, |
|
"learning_rate": 5.192298427223676e-05, |
|
"loss": 0.1433, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 64.69565217391305, |
|
"grad_norm": 1.1727708578109741, |
|
"learning_rate": 5.094221139133482e-05, |
|
"loss": 0.1445, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 65.04347826086956, |
|
"grad_norm": 0.9599679112434387, |
|
"learning_rate": 4.996761422327737e-05, |
|
"loss": 0.1465, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 65.3913043478261, |
|
"grad_norm": 0.9806995391845703, |
|
"learning_rate": 4.899931545668164e-05, |
|
"loss": 0.1436, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 65.73913043478261, |
|
"grad_norm": 0.948320746421814, |
|
"learning_rate": 4.8037436987281194e-05, |
|
"loss": 0.1441, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 66.08695652173913, |
|
"grad_norm": 0.8844676613807678, |
|
"learning_rate": 4.708209990258095e-05, |
|
"loss": 0.1431, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 66.43478260869566, |
|
"grad_norm": 1.0380223989486694, |
|
"learning_rate": 4.613342446661383e-05, |
|
"loss": 0.1406, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 66.78260869565217, |
|
"grad_norm": 1.1254124641418457, |
|
"learning_rate": 4.519153010480118e-05, |
|
"loss": 0.15, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 67.1304347826087, |
|
"grad_norm": 1.0098830461502075, |
|
"learning_rate": 4.425653538891874e-05, |
|
"loss": 0.1448, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 67.47826086956522, |
|
"grad_norm": 0.9620192646980286, |
|
"learning_rate": 4.332855802217e-05, |
|
"loss": 0.142, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 67.82608695652173, |
|
"grad_norm": 0.9209885001182556, |
|
"learning_rate": 4.2407714824369024e-05, |
|
"loss": 0.1447, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 68.17391304347827, |
|
"grad_norm": 1.016014814376831, |
|
"learning_rate": 4.149412171723438e-05, |
|
"loss": 0.1433, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 68.52173913043478, |
|
"grad_norm": 0.9538384079933167, |
|
"learning_rate": 4.058789370979615e-05, |
|
"loss": 0.1408, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 68.8695652173913, |
|
"grad_norm": 1.0899274349212646, |
|
"learning_rate": 3.96891448839179e-05, |
|
"loss": 0.1432, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 69.21739130434783, |
|
"grad_norm": 1.1336071491241455, |
|
"learning_rate": 3.879798837993521e-05, |
|
"loss": 0.1447, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 69.56521739130434, |
|
"grad_norm": 1.2166575193405151, |
|
"learning_rate": 3.7914536382412956e-05, |
|
"loss": 0.1404, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 69.91304347826087, |
|
"grad_norm": 1.0494683980941772, |
|
"learning_rate": 3.703890010602274e-05, |
|
"loss": 0.1444, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 70.26086956521739, |
|
"grad_norm": 1.330414891242981, |
|
"learning_rate": 3.61711897815424e-05, |
|
"loss": 0.1403, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 70.6086956521739, |
|
"grad_norm": 0.8312104344367981, |
|
"learning_rate": 3.531151464197957e-05, |
|
"loss": 0.1419, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 70.95652173913044, |
|
"grad_norm": 0.9882224798202515, |
|
"learning_rate": 3.445998290882062e-05, |
|
"loss": 0.145, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 71.30434782608695, |
|
"grad_norm": 0.822264552116394, |
|
"learning_rate": 3.3616701778407065e-05, |
|
"loss": 0.1416, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 71.65217391304348, |
|
"grad_norm": 1.1315103769302368, |
|
"learning_rate": 3.2781777408441e-05, |
|
"loss": 0.1412, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 72.0, |
|
"grad_norm": 1.1338590383529663, |
|
"learning_rate": 3.195531490462128e-05, |
|
"loss": 0.1426, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 72.34782608695652, |
|
"grad_norm": 1.0739569664001465, |
|
"learning_rate": 3.1137418307412116e-05, |
|
"loss": 0.1378, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 72.69565217391305, |
|
"grad_norm": 0.9419929385185242, |
|
"learning_rate": 3.0328190578945815e-05, |
|
"loss": 0.1399, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 73.04347826086956, |
|
"grad_norm": 1.0321906805038452, |
|
"learning_rate": 2.952773359006119e-05, |
|
"loss": 0.1457, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 73.3913043478261, |
|
"grad_norm": 1.0655653476715088, |
|
"learning_rate": 2.8736148107479467e-05, |
|
"loss": 0.1362, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 73.73913043478261, |
|
"grad_norm": 1.0423299074172974, |
|
"learning_rate": 2.7953533781119055e-05, |
|
"loss": 0.1429, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 74.08695652173913, |
|
"grad_norm": 1.0007259845733643, |
|
"learning_rate": 2.7179989131550977e-05, |
|
"loss": 0.1415, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 74.43478260869566, |
|
"grad_norm": 1.0571916103363037, |
|
"learning_rate": 2.6415611537596496e-05, |
|
"loss": 0.1388, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 74.78260869565217, |
|
"grad_norm": 1.294371247291565, |
|
"learning_rate": 2.5660497224068415e-05, |
|
"loss": 0.1404, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 75.1304347826087, |
|
"grad_norm": 0.877185583114624, |
|
"learning_rate": 2.4914741249657658e-05, |
|
"loss": 0.14, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 75.47826086956522, |
|
"grad_norm": 0.8726389408111572, |
|
"learning_rate": 2.4178437494966688e-05, |
|
"loss": 0.1413, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 75.82608695652173, |
|
"grad_norm": 1.0243898630142212, |
|
"learning_rate": 2.34516786506912e-05, |
|
"loss": 0.1385, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 76.17391304347827, |
|
"grad_norm": 0.9572955369949341, |
|
"learning_rate": 2.2734556205951596e-05, |
|
"loss": 0.1366, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 76.52173913043478, |
|
"grad_norm": 0.9090381264686584, |
|
"learning_rate": 2.2027160436775794e-05, |
|
"loss": 0.1379, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 76.8695652173913, |
|
"grad_norm": 1.1555663347244263, |
|
"learning_rate": 2.1329580394734638e-05, |
|
"loss": 0.1418, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 77.21739130434783, |
|
"grad_norm": 0.8747707605361938, |
|
"learning_rate": 2.064190389573153e-05, |
|
"loss": 0.1386, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 77.56521739130434, |
|
"grad_norm": 1.1187694072723389, |
|
"learning_rate": 1.996421750894758e-05, |
|
"loss": 0.1398, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 77.91304347826087, |
|
"grad_norm": 1.1600162982940674, |
|
"learning_rate": 1.9296606545943774e-05, |
|
"loss": 0.1405, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 78.26086956521739, |
|
"grad_norm": 1.069093942642212, |
|
"learning_rate": 1.863915504992131e-05, |
|
"loss": 0.1375, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 78.6086956521739, |
|
"grad_norm": 0.9685916304588318, |
|
"learning_rate": 1.799194578514176e-05, |
|
"loss": 0.1397, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 78.95652173913044, |
|
"grad_norm": 1.3331691026687622, |
|
"learning_rate": 1.735506022650817e-05, |
|
"loss": 0.1387, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 79.30434782608695, |
|
"grad_norm": 1.0143738985061646, |
|
"learning_rate": 1.6728578549308416e-05, |
|
"loss": 0.141, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 79.65217391304348, |
|
"grad_norm": 1.0567244291305542, |
|
"learning_rate": 1.611257961912227e-05, |
|
"loss": 0.1367, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"grad_norm": 1.1921055316925049, |
|
"learning_rate": 1.5507140981893253e-05, |
|
"loss": 0.1393, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 80.34782608695652, |
|
"grad_norm": 1.2248806953430176, |
|
"learning_rate": 1.4912338854166696e-05, |
|
"loss": 0.136, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 80.69565217391305, |
|
"grad_norm": 1.2577537298202515, |
|
"learning_rate": 1.4328248113495047e-05, |
|
"loss": 0.1396, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 81.04347826086956, |
|
"grad_norm": 0.9396703839302063, |
|
"learning_rate": 1.375494228901184e-05, |
|
"loss": 0.1365, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 81.3913043478261, |
|
"grad_norm": 0.9471282958984375, |
|
"learning_rate": 1.3192493552175366e-05, |
|
"loss": 0.1387, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 81.73913043478261, |
|
"grad_norm": 1.1310616731643677, |
|
"learning_rate": 1.2640972707683252e-05, |
|
"loss": 0.1383, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 82.08695652173913, |
|
"grad_norm": 1.1975988149642944, |
|
"learning_rate": 1.2100449184559093e-05, |
|
"loss": 0.1396, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 82.43478260869566, |
|
"grad_norm": 0.9346061944961548, |
|
"learning_rate": 1.1570991027412304e-05, |
|
"loss": 0.1362, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 82.78260869565217, |
|
"grad_norm": 1.1160486936569214, |
|
"learning_rate": 1.105266488787221e-05, |
|
"loss": 0.1379, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 83.1304347826087, |
|
"grad_norm": 1.1199781894683838, |
|
"learning_rate": 1.0545536016197499e-05, |
|
"loss": 0.1349, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 83.47826086956522, |
|
"grad_norm": 1.0365668535232544, |
|
"learning_rate": 1.0049668253062105e-05, |
|
"loss": 0.1351, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 83.82608695652173, |
|
"grad_norm": 0.9818322062492371, |
|
"learning_rate": 9.565124021518545e-06, |
|
"loss": 0.1404, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 84.17391304347827, |
|
"grad_norm": 0.9654858112335205, |
|
"learning_rate": 9.091964319139645e-06, |
|
"loss": 0.1358, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 84.52173913043478, |
|
"grad_norm": 0.9763336777687073, |
|
"learning_rate": 8.63024871033985e-06, |
|
"loss": 0.1384, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 84.8695652173913, |
|
"grad_norm": 1.2957910299301147, |
|
"learning_rate": 8.1800353188768e-06, |
|
"loss": 0.1383, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 85.21739130434783, |
|
"grad_norm": 1.1846587657928467, |
|
"learning_rate": 7.74138082053436e-06, |
|
"loss": 0.1335, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 85.56521739130434, |
|
"grad_norm": 0.9407715797424316, |
|
"learning_rate": 7.314340435987921e-06, |
|
"loss": 0.1364, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 85.91304347826087, |
|
"grad_norm": 1.1265534162521362, |
|
"learning_rate": 6.8989679238528395e-06, |
|
"loss": 0.1401, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 86.26086956521739, |
|
"grad_norm": 1.0955746173858643, |
|
"learning_rate": 6.4953155739169495e-06, |
|
"loss": 0.1365, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 86.6086956521739, |
|
"grad_norm": 1.100103497505188, |
|
"learning_rate": 6.103434200558011e-06, |
|
"loss": 0.1385, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 86.95652173913044, |
|
"grad_norm": 1.0393922328948975, |
|
"learning_rate": 5.723373136346854e-06, |
|
"loss": 0.1355, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 87.30434782608695, |
|
"grad_norm": 0.9539739489555359, |
|
"learning_rate": 5.355180225837053e-06, |
|
"loss": 0.1358, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 87.65217391304348, |
|
"grad_norm": 0.9914501309394836, |
|
"learning_rate": 4.998901819541979e-06, |
|
"loss": 0.1368, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 88.0, |
|
"grad_norm": 1.0592278242111206, |
|
"learning_rate": 4.65458276809988e-06, |
|
"loss": 0.1353, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 88.34782608695652, |
|
"grad_norm": 1.090806484222412, |
|
"learning_rate": 4.322266416627785e-06, |
|
"loss": 0.1346, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 88.69565217391305, |
|
"grad_norm": 1.2616488933563232, |
|
"learning_rate": 4.001994599264957e-06, |
|
"loss": 0.1368, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 89.04347826086956, |
|
"grad_norm": 1.0017528533935547, |
|
"learning_rate": 3.6938076339065207e-06, |
|
"loss": 0.1376, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 89.3913043478261, |
|
"grad_norm": 1.0998265743255615, |
|
"learning_rate": 3.3977443171279823e-06, |
|
"loss": 0.1392, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 89.73913043478261, |
|
"grad_norm": 1.062251329421997, |
|
"learning_rate": 3.1138419193012857e-06, |
|
"loss": 0.1327, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 90.08695652173913, |
|
"grad_norm": 1.0146702527999878, |
|
"learning_rate": 2.842136179902988e-06, |
|
"loss": 0.1369, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 90.43478260869566, |
|
"grad_norm": 1.108476996421814, |
|
"learning_rate": 2.582661303015066e-06, |
|
"loss": 0.1344, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 90.78260869565217, |
|
"grad_norm": 0.9756413102149963, |
|
"learning_rate": 2.3354499530191975e-06, |
|
"loss": 0.1389, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 91.1304347826087, |
|
"grad_norm": 0.9541319012641907, |
|
"learning_rate": 2.1005332504846597e-06, |
|
"loss": 0.1385, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 91.47826086956522, |
|
"grad_norm": 0.95691978931427, |
|
"learning_rate": 1.8779407682507077e-06, |
|
"loss": 0.1359, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 91.82608695652173, |
|
"grad_norm": 0.9363892674446106, |
|
"learning_rate": 1.667700527703775e-06, |
|
"loss": 0.1364, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 92.17391304347827, |
|
"grad_norm": 1.0452864170074463, |
|
"learning_rate": 1.469838995249928e-06, |
|
"loss": 0.1334, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 92.52173913043478, |
|
"grad_norm": 0.9851593375205994, |
|
"learning_rate": 1.2843810789831102e-06, |
|
"loss": 0.1366, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 92.8695652173913, |
|
"grad_norm": 1.0549771785736084, |
|
"learning_rate": 1.1113501255495485e-06, |
|
"loss": 0.1374, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 93.21739130434783, |
|
"grad_norm": 1.1178303956985474, |
|
"learning_rate": 9.50767917208728e-07, |
|
"loss": 0.1369, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 93.56521739130434, |
|
"grad_norm": 1.08258056640625, |
|
"learning_rate": 8.026546690913406e-07, |
|
"loss": 0.1365, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 93.91304347826087, |
|
"grad_norm": 1.0680522918701172, |
|
"learning_rate": 6.670290266543644e-07, |
|
"loss": 0.1346, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 94.26086956521739, |
|
"grad_norm": 1.1444164514541626, |
|
"learning_rate": 5.439080633339866e-07, |
|
"loss": 0.1363, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 94.6086956521739, |
|
"grad_norm": 0.940956175327301, |
|
"learning_rate": 4.333072783961889e-07, |
|
"loss": 0.1343, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 94.95652173913044, |
|
"grad_norm": 1.1029889583587646, |
|
"learning_rate": 3.3524059498565297e-07, |
|
"loss": 0.1384, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 95.30434782608695, |
|
"grad_norm": 1.11199951171875, |
|
"learning_rate": 2.497203583729957e-07, |
|
"loss": 0.1368, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 95.65217391304348, |
|
"grad_norm": 1.0574318170547485, |
|
"learning_rate": 1.7675733440066789e-07, |
|
"loss": 0.1357, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 96.0, |
|
"grad_norm": 1.0447269678115845, |
|
"learning_rate": 1.1636070812770516e-07, |
|
"loss": 0.1367, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 96.34782608695652, |
|
"grad_norm": 1.0507004261016846, |
|
"learning_rate": 6.853808267344164e-08, |
|
"loss": 0.1338, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 96.69565217391305, |
|
"grad_norm": 1.0259993076324463, |
|
"learning_rate": 3.3295478260364496e-08, |
|
"loss": 0.1377, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 97.04347826086956, |
|
"grad_norm": 1.029090166091919, |
|
"learning_rate": 1.0637331456264666e-08, |
|
"loss": 0.136, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 97.3913043478261, |
|
"grad_norm": 1.1072170734405518, |
|
"learning_rate": 5.664946157168949e-10, |
|
"loss": 0.1359, |
|
"step": 2800 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2800, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 100, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.6986530764947456e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|