|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.4344629729245113, |
|
"eval_steps": 2000, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 3.0000000000000004e-08, |
|
"loss": 1.4288, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.25, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 1.5985, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 9e-08, |
|
"loss": 1.3356, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 1.3789, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 1.5000000000000002e-07, |
|
"loss": 1.362, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.8125, |
|
"learning_rate": 1.8e-07, |
|
"loss": 1.3439, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 2.1000000000000003e-07, |
|
"loss": 1.3946, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.5, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 1.3411, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 2.7e-07, |
|
"loss": 1.3421, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 1.4355, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 3.3e-07, |
|
"loss": 1.5107, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 3.6e-07, |
|
"loss": 1.2753, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 3.9e-07, |
|
"loss": 1.3473, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.40625, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 1.4292, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.875, |
|
"learning_rate": 4.5e-07, |
|
"loss": 1.4616, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 1.3935, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.625, |
|
"learning_rate": 5.100000000000001e-07, |
|
"loss": 1.4381, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 5.4e-07, |
|
"loss": 1.3679, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 7.375, |
|
"learning_rate": 5.7e-07, |
|
"loss": 1.4046, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 7.0, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.3214, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 6.3e-07, |
|
"loss": 1.328, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.25, |
|
"learning_rate": 6.6e-07, |
|
"loss": 1.3994, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 7.375, |
|
"learning_rate": 6.900000000000001e-07, |
|
"loss": 1.3372, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 7.2e-07, |
|
"loss": 1.4327, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 7.5e-07, |
|
"loss": 1.3006, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 7.8e-07, |
|
"loss": 1.4955, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 8.100000000000001e-07, |
|
"loss": 1.2202, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 8.400000000000001e-07, |
|
"loss": 1.3608, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.75, |
|
"learning_rate": 8.699999999999999e-07, |
|
"loss": 1.3799, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 9e-07, |
|
"loss": 1.3552, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 9.3e-07, |
|
"loss": 1.4482, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 1.4208, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 9.9e-07, |
|
"loss": 1.373, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 1.0200000000000002e-06, |
|
"loss": 1.3638, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.05e-06, |
|
"loss": 1.4984, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.25, |
|
"learning_rate": 1.08e-06, |
|
"loss": 1.4193, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 1.11e-06, |
|
"loss": 1.4644, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.625, |
|
"learning_rate": 1.14e-06, |
|
"loss": 1.2974, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 7.0, |
|
"learning_rate": 1.17e-06, |
|
"loss": 1.4013, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.5707, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 6.5, |
|
"learning_rate": 1.2299999999999999e-06, |
|
"loss": 1.4111, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 1.26e-06, |
|
"loss": 1.3019, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 7.625, |
|
"learning_rate": 1.29e-06, |
|
"loss": 1.4128, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 1.32e-06, |
|
"loss": 1.2438, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.25, |
|
"learning_rate": 1.35e-06, |
|
"loss": 1.3703, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 1.3800000000000001e-06, |
|
"loss": 1.3877, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 1.41e-06, |
|
"loss": 1.3418, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 1.44e-06, |
|
"loss": 1.3734, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 1.4700000000000001e-06, |
|
"loss": 1.3055, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.4607, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 1.498421052631579e-06, |
|
"loss": 1.3326, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.75, |
|
"learning_rate": 1.4968421052631579e-06, |
|
"loss": 1.3793, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 1.4952631578947368e-06, |
|
"loss": 1.3474, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 1.4936842105263159e-06, |
|
"loss": 1.3504, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 1.4921052631578948e-06, |
|
"loss": 1.29, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.625, |
|
"learning_rate": 1.4905263157894737e-06, |
|
"loss": 1.3552, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 1.4889473684210526e-06, |
|
"loss": 1.2646, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 1.4873684210526315e-06, |
|
"loss": 1.2683, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 7.03125, |
|
"learning_rate": 1.4857894736842106e-06, |
|
"loss": 1.3948, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 7.0, |
|
"learning_rate": 1.4842105263157895e-06, |
|
"loss": 1.2919, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 1.4826315789473684e-06, |
|
"loss": 1.3664, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.75, |
|
"learning_rate": 1.4810526315789473e-06, |
|
"loss": 1.3198, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 1.4794736842105265e-06, |
|
"loss": 1.3457, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 1.4778947368421054e-06, |
|
"loss": 1.2258, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 1.4763157894736843e-06, |
|
"loss": 1.2654, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.5, |
|
"learning_rate": 1.4747368421052632e-06, |
|
"loss": 1.2623, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 1.473157894736842e-06, |
|
"loss": 1.3726, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 1.4715789473684212e-06, |
|
"loss": 1.2427, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 1.4700000000000001e-06, |
|
"loss": 1.2959, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 1.468421052631579e-06, |
|
"loss": 1.2209, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 1.466842105263158e-06, |
|
"loss": 1.297, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 5.625, |
|
"learning_rate": 1.4652631578947368e-06, |
|
"loss": 1.3002, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 1.463684210526316e-06, |
|
"loss": 1.3578, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 1.4621052631578946e-06, |
|
"loss": 1.3381, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 6.25, |
|
"learning_rate": 1.4605263157894738e-06, |
|
"loss": 1.2755, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 1.4589473684210527e-06, |
|
"loss": 1.3443, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.4573684210526316e-06, |
|
"loss": 1.3165, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 5.125, |
|
"learning_rate": 1.4557894736842105e-06, |
|
"loss": 1.3626, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 6.625, |
|
"learning_rate": 1.4542105263157894e-06, |
|
"loss": 1.3729, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.125, |
|
"learning_rate": 1.4526315789473685e-06, |
|
"loss": 1.3209, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 1.4510526315789474e-06, |
|
"loss": 1.2652, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 1.4494736842105263e-06, |
|
"loss": 1.2958, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 1.4478947368421052e-06, |
|
"loss": 1.271, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 3.5, |
|
"learning_rate": 1.4463157894736844e-06, |
|
"loss": 1.2985, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 1.4447368421052633e-06, |
|
"loss": 1.3285, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 1.4431578947368422e-06, |
|
"loss": 1.2987, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 1.441578947368421e-06, |
|
"loss": 1.3378, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.875, |
|
"learning_rate": 1.44e-06, |
|
"loss": 1.2942, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.438421052631579e-06, |
|
"loss": 1.259, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 1.436842105263158e-06, |
|
"loss": 1.2991, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 1.435263157894737e-06, |
|
"loss": 1.2565, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 1.4336842105263158e-06, |
|
"loss": 1.1737, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.4321052631578947e-06, |
|
"loss": 1.2779, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.375, |
|
"learning_rate": 1.4305263157894738e-06, |
|
"loss": 1.3104, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 1.4289473684210525e-06, |
|
"loss": 1.2818, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 1.4273684210526317e-06, |
|
"loss": 1.3147, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 1.4257894736842106e-06, |
|
"loss": 1.2595, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.125, |
|
"learning_rate": 1.4242105263157895e-06, |
|
"loss": 1.3695, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 1.4226315789473684e-06, |
|
"loss": 1.1634, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 1.4210526315789473e-06, |
|
"loss": 1.2587, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 7.625, |
|
"learning_rate": 1.4194736842105264e-06, |
|
"loss": 1.2994, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 1.4178947368421053e-06, |
|
"loss": 1.3351, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 1.4163157894736842e-06, |
|
"loss": 1.2404, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.0, |
|
"learning_rate": 1.4147368421052631e-06, |
|
"loss": 1.3211, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 1.4131578947368422e-06, |
|
"loss": 1.2811, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.4115789473684211e-06, |
|
"loss": 1.2131, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.41e-06, |
|
"loss": 1.1713, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 1.408421052631579e-06, |
|
"loss": 1.2058, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 1.4068421052631579e-06, |
|
"loss": 1.3375, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 1.405263157894737e-06, |
|
"loss": 1.3032, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 1.4036842105263159e-06, |
|
"loss": 1.3644, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 1.4021052631578948e-06, |
|
"loss": 1.2786, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 1.4005263157894737e-06, |
|
"loss": 1.2067, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 1.3989473684210526e-06, |
|
"loss": 1.3127, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 4.0, |
|
"learning_rate": 1.3973684210526317e-06, |
|
"loss": 1.2442, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.875, |
|
"learning_rate": 1.3957894736842104e-06, |
|
"loss": 1.2307, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.3942105263157895e-06, |
|
"loss": 1.2828, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 1.3926315789473685e-06, |
|
"loss": 1.2676, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 1.3910526315789476e-06, |
|
"loss": 1.1911, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 1.3894736842105263e-06, |
|
"loss": 1.3177, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 1.3878947368421052e-06, |
|
"loss": 1.2891, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 1.3863157894736843e-06, |
|
"loss": 1.303, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.3847368421052632e-06, |
|
"loss": 1.2586, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 1.383157894736842e-06, |
|
"loss": 1.3141, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.381578947368421e-06, |
|
"loss": 1.2174, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.3800000000000001e-06, |
|
"loss": 1.2609, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.75, |
|
"learning_rate": 1.378421052631579e-06, |
|
"loss": 1.1323, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.376842105263158e-06, |
|
"loss": 1.2328, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 1.3752631578947368e-06, |
|
"loss": 1.3087, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 1.3736842105263158e-06, |
|
"loss": 1.2428, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.46875, |
|
"learning_rate": 1.3721052631578949e-06, |
|
"loss": 1.235, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 1.3705263157894738e-06, |
|
"loss": 1.2108, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.3689473684210527e-06, |
|
"loss": 1.1704, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.3673684210526316e-06, |
|
"loss": 1.3411, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 1.3657894736842107e-06, |
|
"loss": 1.239, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 1.3642105263157896e-06, |
|
"loss": 1.1661, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.3626315789473683e-06, |
|
"loss": 1.338, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 1.3610526315789474e-06, |
|
"loss": 1.2926, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.875, |
|
"learning_rate": 1.3594736842105263e-06, |
|
"loss": 1.2866, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 1.3578947368421055e-06, |
|
"loss": 1.3333, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 1.3563157894736842e-06, |
|
"loss": 1.2472, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 3.390625, |
|
"learning_rate": 1.3547368421052633e-06, |
|
"loss": 1.1796, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 6.125, |
|
"learning_rate": 1.3531578947368422e-06, |
|
"loss": 1.321, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.351578947368421e-06, |
|
"loss": 1.2529, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 1.35e-06, |
|
"loss": 1.2682, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 1.348421052631579e-06, |
|
"loss": 1.358, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 1.346842105263158e-06, |
|
"loss": 1.3318, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 1.345263157894737e-06, |
|
"loss": 1.2531, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 1.3436842105263158e-06, |
|
"loss": 1.2796, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 1.3421052631578947e-06, |
|
"loss": 1.2315, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 4.71875, |
|
"learning_rate": 1.3405263157894736e-06, |
|
"loss": 1.2501, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 1.3389473684210528e-06, |
|
"loss": 1.3444, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 1.3373684210526317e-06, |
|
"loss": 1.2065, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 1.3357894736842106e-06, |
|
"loss": 1.3091, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.3342105263157895e-06, |
|
"loss": 1.1932, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 1.3326315789473686e-06, |
|
"loss": 1.1837, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 1.3310526315789475e-06, |
|
"loss": 1.1756, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 1.3294736842105262e-06, |
|
"loss": 1.3163, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 1.3278947368421053e-06, |
|
"loss": 1.1937, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.3263157894736842e-06, |
|
"loss": 1.3058, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 1.3247368421052633e-06, |
|
"loss": 1.2718, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.323157894736842e-06, |
|
"loss": 1.2027, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 1.3215789473684212e-06, |
|
"loss": 1.2548, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 5.875, |
|
"learning_rate": 1.32e-06, |
|
"loss": 1.3534, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.318421052631579e-06, |
|
"loss": 1.2223, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.3168421052631579e-06, |
|
"loss": 1.2684, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 1.3152631578947368e-06, |
|
"loss": 1.2611, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.71875, |
|
"learning_rate": 1.313684210526316e-06, |
|
"loss": 1.3116, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 5.5, |
|
"learning_rate": 1.3121052631578948e-06, |
|
"loss": 1.303, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 1.3105263157894737e-06, |
|
"loss": 1.2399, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.3089473684210526e-06, |
|
"loss": 1.2954, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 5.125, |
|
"learning_rate": 1.3073684210526315e-06, |
|
"loss": 1.2689, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 5.625, |
|
"learning_rate": 1.3057894736842107e-06, |
|
"loss": 1.3605, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.3042105263157896e-06, |
|
"loss": 1.2873, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 5.5, |
|
"learning_rate": 1.3026315789473685e-06, |
|
"loss": 1.2686, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 1.3010526315789474e-06, |
|
"loss": 1.244, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.2994736842105265e-06, |
|
"loss": 1.2044, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 1.2978947368421054e-06, |
|
"loss": 1.2026, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 1.296315789473684e-06, |
|
"loss": 1.306, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.2947368421052632e-06, |
|
"loss": 1.2968, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 6.75, |
|
"learning_rate": 1.2931578947368421e-06, |
|
"loss": 1.2308, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 1.2915789473684212e-06, |
|
"loss": 1.3022, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 1.29e-06, |
|
"loss": 1.2298, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 1.288421052631579e-06, |
|
"loss": 1.1421, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 5.0, |
|
"learning_rate": 1.286842105263158e-06, |
|
"loss": 1.2018, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 1.2852631578947369e-06, |
|
"loss": 1.2329, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 5.375, |
|
"learning_rate": 1.2836842105263158e-06, |
|
"loss": 1.2509, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 1.2821052631578947e-06, |
|
"loss": 1.2556, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 1.2805263157894738e-06, |
|
"loss": 1.3504, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 1.2789473684210527e-06, |
|
"loss": 1.1525, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 1.2773684210526316e-06, |
|
"loss": 1.2227, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 1.2757894736842105e-06, |
|
"loss": 1.2962, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 1.2742105263157894e-06, |
|
"loss": 1.1717, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 1.2726315789473685e-06, |
|
"loss": 1.2543, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 1.2710526315789474e-06, |
|
"loss": 1.2092, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 1.2694736842105264e-06, |
|
"loss": 1.1855, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.2678947368421053e-06, |
|
"loss": 1.2698, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 5.625, |
|
"learning_rate": 1.2663157894736844e-06, |
|
"loss": 1.2615, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 1.2647368421052633e-06, |
|
"loss": 1.2156, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 6.875, |
|
"learning_rate": 1.263157894736842e-06, |
|
"loss": 1.2328, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"eval_loss": 1.3270684480667114, |
|
"eval_runtime": 64.5706, |
|
"eval_samples_per_second": 15.487, |
|
"eval_steps_per_second": 15.487, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 1.261578947368421e-06, |
|
"loss": 1.1687, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 1.26e-06, |
|
"loss": 1.2967, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 1.2584210526315791e-06, |
|
"loss": 1.2625, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.2568421052631578e-06, |
|
"loss": 1.2231, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 1.255263157894737e-06, |
|
"loss": 1.2561, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 1.2536842105263158e-06, |
|
"loss": 1.1732, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 1.2521052631578948e-06, |
|
"loss": 1.1991, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 1.2505263157894737e-06, |
|
"loss": 1.2405, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 1.2489473684210526e-06, |
|
"loss": 1.2658, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 1.2473684210526317e-06, |
|
"loss": 1.214, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.375, |
|
"learning_rate": 1.2457894736842106e-06, |
|
"loss": 1.2522, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 3.140625, |
|
"learning_rate": 1.2442105263157895e-06, |
|
"loss": 1.2873, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.71875, |
|
"learning_rate": 1.2426315789473684e-06, |
|
"loss": 1.2513, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.2410526315789473e-06, |
|
"loss": 1.2378, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 1.2394736842105264e-06, |
|
"loss": 1.2217, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 1.2378947368421053e-06, |
|
"loss": 1.1462, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 1.2363157894736842e-06, |
|
"loss": 1.2306, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 1.2347368421052631e-06, |
|
"loss": 1.2744, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 1.2331578947368423e-06, |
|
"loss": 1.1246, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 1.2315789473684212e-06, |
|
"loss": 1.2965, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 5.875, |
|
"learning_rate": 1.2299999999999999e-06, |
|
"loss": 1.1981, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 4.78125, |
|
"learning_rate": 1.228421052631579e-06, |
|
"loss": 1.1496, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 1.226842105263158e-06, |
|
"loss": 1.2732, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 1.225263157894737e-06, |
|
"loss": 1.2618, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.2236842105263157e-06, |
|
"loss": 1.1922, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 1.2221052631578948e-06, |
|
"loss": 1.1998, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 1.2205263157894737e-06, |
|
"loss": 1.237, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 1.2189473684210526e-06, |
|
"loss": 1.2583, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 1.2173684210526315e-06, |
|
"loss": 1.2544, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.2157894736842105e-06, |
|
"loss": 1.3154, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 4.90625, |
|
"learning_rate": 1.2142105263157896e-06, |
|
"loss": 1.248, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 6.5, |
|
"learning_rate": 1.2126315789473685e-06, |
|
"loss": 1.2039, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 1.2110526315789474e-06, |
|
"loss": 1.328, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 6.0625, |
|
"learning_rate": 1.2094736842105263e-06, |
|
"loss": 1.2407, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 1.2078947368421052e-06, |
|
"loss": 1.1751, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 1.2063157894736843e-06, |
|
"loss": 1.2525, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 1.2047368421052632e-06, |
|
"loss": 1.2554, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 1.2031578947368421e-06, |
|
"loss": 1.2803, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 1.201578947368421e-06, |
|
"loss": 1.1723, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 6.65625, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.2068, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 1.198421052631579e-06, |
|
"loss": 1.2599, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.1968421052631578e-06, |
|
"loss": 1.327, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 1.1952631578947369e-06, |
|
"loss": 1.1712, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 1.1936842105263158e-06, |
|
"loss": 1.303, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.5, |
|
"learning_rate": 1.192105263157895e-06, |
|
"loss": 1.2574, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 6.90625, |
|
"learning_rate": 1.1905263157894736e-06, |
|
"loss": 1.3576, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 1.1889473684210527e-06, |
|
"loss": 1.2322, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 4.78125, |
|
"learning_rate": 1.1873684210526316e-06, |
|
"loss": 1.2198, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 1.1857894736842105e-06, |
|
"loss": 1.2068, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.1842105263157894e-06, |
|
"loss": 1.2278, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 1.1826315789473683e-06, |
|
"loss": 1.2847, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 1.1810526315789475e-06, |
|
"loss": 1.1646, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 1.1794736842105264e-06, |
|
"loss": 1.201, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 1.1778947368421053e-06, |
|
"loss": 1.1445, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.1763157894736842e-06, |
|
"loss": 1.2555, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.174736842105263e-06, |
|
"loss": 1.1944, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 1.1731578947368422e-06, |
|
"loss": 1.1288, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 1.1715789473684211e-06, |
|
"loss": 1.2126, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.17e-06, |
|
"loss": 1.3395, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.875, |
|
"learning_rate": 1.168421052631579e-06, |
|
"loss": 1.1955, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 1.166842105263158e-06, |
|
"loss": 1.1931, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 1.165263157894737e-06, |
|
"loss": 1.2561, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.1636842105263156e-06, |
|
"loss": 1.2339, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 1.1621052631578948e-06, |
|
"loss": 1.1738, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 1.1605263157894737e-06, |
|
"loss": 1.1535, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 1.1589473684210528e-06, |
|
"loss": 1.2578, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 1.1573684210526315e-06, |
|
"loss": 1.229, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 1.1557894736842106e-06, |
|
"loss": 1.2044, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 6.0, |
|
"learning_rate": 1.1542105263157895e-06, |
|
"loss": 1.1518, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.1526315789473684e-06, |
|
"loss": 1.1951, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.375, |
|
"learning_rate": 1.1510526315789473e-06, |
|
"loss": 1.2361, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 5.125, |
|
"learning_rate": 1.1494736842105262e-06, |
|
"loss": 1.2218, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.1478947368421054e-06, |
|
"loss": 1.1337, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 1.1463157894736843e-06, |
|
"loss": 1.2582, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 1.1447368421052632e-06, |
|
"loss": 1.2717, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 1.143157894736842e-06, |
|
"loss": 1.2379, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 1.141578947368421e-06, |
|
"loss": 1.1829, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 5.625, |
|
"learning_rate": 1.14e-06, |
|
"loss": 1.3186, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 1.138421052631579e-06, |
|
"loss": 1.1503, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.136842105263158e-06, |
|
"loss": 1.347, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 1.1352631578947368e-06, |
|
"loss": 1.187, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 1.133684210526316e-06, |
|
"loss": 1.2084, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 1.1321052631578948e-06, |
|
"loss": 1.1274, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 4.375, |
|
"learning_rate": 1.1305263157894735e-06, |
|
"loss": 1.2603, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 3.5, |
|
"learning_rate": 1.1289473684210527e-06, |
|
"loss": 1.2914, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.1273684210526316e-06, |
|
"loss": 1.2076, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 1.1257894736842107e-06, |
|
"loss": 1.3274, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 1.1242105263157894e-06, |
|
"loss": 1.3251, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 1.1226315789473685e-06, |
|
"loss": 1.16, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 1.1210526315789474e-06, |
|
"loss": 1.2715, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 1.1194736842105265e-06, |
|
"loss": 1.2301, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 1.1178947368421052e-06, |
|
"loss": 1.2531, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 7.25, |
|
"learning_rate": 1.1163157894736841e-06, |
|
"loss": 1.2059, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.1147368421052632e-06, |
|
"loss": 1.3348, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 5.25, |
|
"learning_rate": 1.1131578947368421e-06, |
|
"loss": 1.2387, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.111578947368421e-06, |
|
"loss": 1.2128, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 1.11e-06, |
|
"loss": 1.1845, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 4.75, |
|
"learning_rate": 1.108421052631579e-06, |
|
"loss": 1.2829, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 1.106842105263158e-06, |
|
"loss": 1.185, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 1.1052631578947369e-06, |
|
"loss": 1.3094, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 1.1036842105263158e-06, |
|
"loss": 1.3127, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 1.1021052631578947e-06, |
|
"loss": 1.3655, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 1.1005263157894738e-06, |
|
"loss": 1.272, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.0989473684210527e-06, |
|
"loss": 1.2848, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 1.0973684210526316e-06, |
|
"loss": 1.2372, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 1.0957894736842105e-06, |
|
"loss": 1.1784, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 6.875, |
|
"learning_rate": 1.0942105263157895e-06, |
|
"loss": 1.245, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 1.0926315789473686e-06, |
|
"loss": 1.2199, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 1.0910526315789473e-06, |
|
"loss": 1.2262, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.0894736842105264e-06, |
|
"loss": 1.1917, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 1.0878947368421053e-06, |
|
"loss": 1.2455, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 1.0863157894736844e-06, |
|
"loss": 1.1966, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 6.0, |
|
"learning_rate": 1.084736842105263e-06, |
|
"loss": 1.396, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 1.083157894736842e-06, |
|
"loss": 1.2836, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 4.75, |
|
"learning_rate": 1.0815789473684211e-06, |
|
"loss": 1.2193, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 6.3125, |
|
"learning_rate": 1.08e-06, |
|
"loss": 1.2754, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 5.25, |
|
"learning_rate": 1.078421052631579e-06, |
|
"loss": 1.3408, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 5.53125, |
|
"learning_rate": 1.0768421052631578e-06, |
|
"loss": 1.2585, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 4.75, |
|
"learning_rate": 1.075263157894737e-06, |
|
"loss": 1.2207, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 5.25, |
|
"learning_rate": 1.0736842105263159e-06, |
|
"loss": 1.3081, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 1.0721052631578948e-06, |
|
"loss": 1.2992, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 4.125, |
|
"learning_rate": 1.0705263157894737e-06, |
|
"loss": 1.2481, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 1.0689473684210526e-06, |
|
"loss": 1.2883, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 6.25, |
|
"learning_rate": 1.0673684210526317e-06, |
|
"loss": 1.2565, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 1.0657894736842106e-06, |
|
"loss": 1.1712, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 1.0642105263157895e-06, |
|
"loss": 1.1657, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 1.0626315789473684e-06, |
|
"loss": 1.3871, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 5.0, |
|
"learning_rate": 1.0610526315789473e-06, |
|
"loss": 1.2952, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 1.0594736842105265e-06, |
|
"loss": 1.2694, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 6.375, |
|
"learning_rate": 1.0578947368421052e-06, |
|
"loss": 1.2199, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 1.0563157894736843e-06, |
|
"loss": 1.3025, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 1.0547368421052632e-06, |
|
"loss": 1.3357, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 1.0531578947368423e-06, |
|
"loss": 1.2073, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 6.125, |
|
"learning_rate": 1.051578947368421e-06, |
|
"loss": 1.2984, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 5.0, |
|
"learning_rate": 1.05e-06, |
|
"loss": 1.2861, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 1.048421052631579e-06, |
|
"loss": 1.2146, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 4.625, |
|
"learning_rate": 1.046842105263158e-06, |
|
"loss": 1.3299, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 1.0452631578947368e-06, |
|
"loss": 1.2183, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 1.0436842105263157e-06, |
|
"loss": 1.2435, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 1.0421052631578949e-06, |
|
"loss": 1.2189, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 1.0405263157894738e-06, |
|
"loss": 1.2964, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 1.0389473684210527e-06, |
|
"loss": 1.1613, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 5.625, |
|
"learning_rate": 1.0373684210526316e-06, |
|
"loss": 1.2126, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 1.0357894736842105e-06, |
|
"loss": 1.2181, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 4.0, |
|
"learning_rate": 1.0342105263157896e-06, |
|
"loss": 1.2717, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 1.0326315789473685e-06, |
|
"loss": 1.161, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 4.5, |
|
"learning_rate": 1.0310526315789474e-06, |
|
"loss": 1.3256, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 1.0294736842105263e-06, |
|
"loss": 1.2079, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 1.0278947368421052e-06, |
|
"loss": 1.2473, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 1.0263157894736843e-06, |
|
"loss": 1.2761, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 1.024736842105263e-06, |
|
"loss": 1.1995, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 1.0231578947368422e-06, |
|
"loss": 1.2516, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 1.021578947368421e-06, |
|
"loss": 1.3408, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 1.0200000000000002e-06, |
|
"loss": 1.1742, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 1.0184210526315789e-06, |
|
"loss": 1.22, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 1.0168421052631578e-06, |
|
"loss": 1.1872, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 1.015263157894737e-06, |
|
"loss": 1.255, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 5.0, |
|
"learning_rate": 1.0136842105263158e-06, |
|
"loss": 1.1664, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 6.75, |
|
"learning_rate": 1.0121052631578947e-06, |
|
"loss": 1.1648, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 1.0105263157894736e-06, |
|
"loss": 1.3138, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 5.625, |
|
"learning_rate": 1.0089473684210527e-06, |
|
"loss": 1.2173, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 1.0073684210526317e-06, |
|
"loss": 1.2128, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 1.0057894736842106e-06, |
|
"loss": 1.2562, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 1.0042105263157895e-06, |
|
"loss": 1.1745, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 6.875, |
|
"learning_rate": 1.0026315789473684e-06, |
|
"loss": 1.329, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 1.0010526315789475e-06, |
|
"loss": 1.2953, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 9.994736842105264e-07, |
|
"loss": 1.2442, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 5.0, |
|
"learning_rate": 9.978947368421053e-07, |
|
"loss": 1.1968, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 6.375, |
|
"learning_rate": 9.963157894736842e-07, |
|
"loss": 1.205, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 9.947368421052631e-07, |
|
"loss": 1.2721, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 9.931578947368422e-07, |
|
"loss": 1.3076, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 9.91578947368421e-07, |
|
"loss": 1.1861, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 6.5, |
|
"learning_rate": 9.9e-07, |
|
"loss": 1.2757, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 9.88421052631579e-07, |
|
"loss": 1.251, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 9.86842105263158e-07, |
|
"loss": 1.3049, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 5.875, |
|
"learning_rate": 9.852631578947368e-07, |
|
"loss": 1.1694, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 9.836842105263157e-07, |
|
"loss": 1.2924, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 9.821052631578948e-07, |
|
"loss": 1.1178, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 8.5, |
|
"learning_rate": 9.805263157894737e-07, |
|
"loss": 1.4241, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 5.375, |
|
"learning_rate": 9.789473684210526e-07, |
|
"loss": 1.2019, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 9.773684210526315e-07, |
|
"loss": 1.3219, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 9.757894736842106e-07, |
|
"loss": 1.2338, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 4.75, |
|
"learning_rate": 9.742105263157895e-07, |
|
"loss": 1.2872, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 5.09375, |
|
"learning_rate": 9.726315789473684e-07, |
|
"loss": 1.2748, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 9.710526315789474e-07, |
|
"loss": 1.2021, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 9.694736842105263e-07, |
|
"loss": 1.2684, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 9.678947368421054e-07, |
|
"loss": 1.2757, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 9.663157894736843e-07, |
|
"loss": 1.1384, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 3.375, |
|
"learning_rate": 9.647368421052632e-07, |
|
"loss": 1.2843, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 5.0, |
|
"learning_rate": 9.63157894736842e-07, |
|
"loss": 1.3287, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 9.61578947368421e-07, |
|
"loss": 1.1495, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 1.3465, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 9.584210526315788e-07, |
|
"loss": 1.2577, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 6.125, |
|
"learning_rate": 9.56842105263158e-07, |
|
"loss": 1.2289, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 9.552631578947368e-07, |
|
"loss": 1.279, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 4.375, |
|
"learning_rate": 9.53684210526316e-07, |
|
"loss": 1.1106, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 6.09375, |
|
"learning_rate": 9.521052631578948e-07, |
|
"loss": 1.2996, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 4.875, |
|
"learning_rate": 9.505263157894737e-07, |
|
"loss": 1.1866, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 9.489473684210527e-07, |
|
"loss": 1.2004, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 9.473684210526316e-07, |
|
"loss": 1.2039, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"eval_loss": 1.2978060245513916, |
|
"eval_runtime": 64.5441, |
|
"eval_samples_per_second": 15.493, |
|
"eval_steps_per_second": 15.493, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 2000, |
|
"total_flos": 6.455688167424e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|