|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"eval_steps": 500, |
|
"global_step": 820, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.012195121951219513, |
|
"grad_norm": 460.0, |
|
"learning_rate": 2.4390243902439027e-06, |
|
"loss": 48.1093, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.06097560975609756, |
|
"grad_norm": 360.0, |
|
"learning_rate": 1.2195121951219513e-05, |
|
"loss": 42.5766, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.12195121951219512, |
|
"grad_norm": 187.0, |
|
"learning_rate": 2.4390243902439026e-05, |
|
"loss": 35.1457, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.18292682926829268, |
|
"grad_norm": 31.375, |
|
"learning_rate": 3.6585365853658535e-05, |
|
"loss": 21.8793, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 17.375, |
|
"learning_rate": 4.878048780487805e-05, |
|
"loss": 17.5136, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.3048780487804878, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 6.097560975609756e-05, |
|
"loss": 16.4183, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.36585365853658536, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 7.317073170731707e-05, |
|
"loss": 15.0038, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.4268292682926829, |
|
"grad_norm": 8.25, |
|
"learning_rate": 8.53658536585366e-05, |
|
"loss": 14.2277, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 16.5, |
|
"learning_rate": 9.75609756097561e-05, |
|
"loss": 13.0434, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5487804878048781, |
|
"grad_norm": 35.25, |
|
"learning_rate": 0.00010975609756097563, |
|
"loss": 8.9174, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.6097560975609756, |
|
"grad_norm": 11.125, |
|
"learning_rate": 0.00012195121951219512, |
|
"loss": 2.4207, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6707317073170732, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 0.00013414634146341464, |
|
"loss": 1.5637, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.7317073170731707, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 0.00014634146341463414, |
|
"loss": 1.3611, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.7926829268292683, |
|
"grad_norm": 1.5, |
|
"learning_rate": 0.00015853658536585366, |
|
"loss": 1.1715, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8536585365853658, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 0.0001707317073170732, |
|
"loss": 1.0679, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.9146341463414634, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 0.0001829268292682927, |
|
"loss": 0.9835, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.975609756097561, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 0.0001951219512195122, |
|
"loss": 0.987, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.2807745933532715, |
|
"eval_runtime": 1.1183, |
|
"eval_samples_per_second": 1.788, |
|
"eval_steps_per_second": 0.894, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.0365853658536586, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 0.00019999184556954776, |
|
"loss": 0.887, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.0975609756097562, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 0.0001999420177550043, |
|
"loss": 0.849, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1585365853658536, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 0.00019984691491033906, |
|
"loss": 0.8057, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.2195121951219512, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 0.00019970658011837404, |
|
"loss": 0.8035, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.2804878048780488, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 0.00019952107695258992, |
|
"loss": 0.7717, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.3414634146341464, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 0.00019929048944832638, |
|
"loss": 0.7583, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4024390243902438, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 0.00019901492206471325, |
|
"loss": 0.7363, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.4634146341463414, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00019869449963734893, |
|
"loss": 0.7397, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.524390243902439, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 0.00019832936732174834, |
|
"loss": 0.7268, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.5853658536585367, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 0.00019791969052758562, |
|
"loss": 0.7103, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.6463414634146343, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 0.00019746565484376132, |
|
"loss": 0.7166, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.7073170731707317, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 0.00019696746595432828, |
|
"loss": 0.7174, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.7682926829268293, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 0.0001964253495453141, |
|
"loss": 0.6822, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.8292682926829267, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 0.00019583955120248237, |
|
"loss": 0.679, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.8902439024390243, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00019521033630007928, |
|
"loss": 0.6708, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.951219512195122, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00019453798988061535, |
|
"loss": 0.6859, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.1719168424606323, |
|
"eval_runtime": 1.1192, |
|
"eval_samples_per_second": 1.787, |
|
"eval_steps_per_second": 0.893, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.0121951219512195, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00019382281652573785, |
|
"loss": 0.6606, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.073170731707317, |
|
"grad_norm": 3.296875, |
|
"learning_rate": 0.00019306514021825118, |
|
"loss": 0.6528, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.1341463414634148, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 0.00019226530419534833, |
|
"loss": 0.6424, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.1951219512195124, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 0.00019142367079312021, |
|
"loss": 0.6061, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.2560975609756095, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00019054062128241264, |
|
"loss": 0.6186, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.317073170731707, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 0.00018961655569610557, |
|
"loss": 0.6099, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.3780487804878048, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 0.0001886518926478932, |
|
"loss": 0.6184, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.4390243902439024, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 0.00018764706914264635, |
|
"loss": 0.5949, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 0.5993, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.5609756097560976, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 0.00018551877954036162, |
|
"loss": 0.6041, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.6219512195121952, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 0.00018439627758611385, |
|
"loss": 0.6005, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.682926829268293, |
|
"grad_norm": 1.0, |
|
"learning_rate": 0.00018323554302364272, |
|
"loss": 0.5917, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.7439024390243905, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 0.00018203710168075788, |
|
"loss": 0.5894, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.8048780487804876, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 0.0001808014964669293, |
|
"loss": 0.5884, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.8658536585365852, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 0.00017952928712734268, |
|
"loss": 0.5967, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.926829268292683, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00017822104998932713, |
|
"loss": 0.6064, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.9878048780487805, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00017687737770127185, |
|
"loss": 0.5836, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.1479520797729492, |
|
"eval_runtime": 1.1346, |
|
"eval_samples_per_second": 1.763, |
|
"eval_steps_per_second": 0.881, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.048780487804878, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 0.00017549887896414851, |
|
"loss": 0.5409, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.1097560975609757, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0001740861782557618, |
|
"loss": 0.5316, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.1707317073170733, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.0001726399155478529, |
|
"loss": 0.5168, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.231707317073171, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00017116074601618417, |
|
"loss": 0.5243, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.292682926829268, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 0.0001696493397437357, |
|
"loss": 0.5076, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.3536585365853657, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 0.00016810638141714934, |
|
"loss": 0.5251, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.4146341463414633, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 0.00016653257001655652, |
|
"loss": 0.5379, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.475609756097561, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 0.0001649286184989315, |
|
"loss": 0.5421, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.5365853658536586, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.0001632952534751122, |
|
"loss": 0.5148, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.597560975609756, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00016163321488063637, |
|
"loss": 0.5222, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.658536585365854, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00015994325564054122, |
|
"loss": 0.5229, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.7195121951219514, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00015822614132827837, |
|
"loss": 0.532, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.7804878048780486, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00015648264981889934, |
|
"loss": 0.5401, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.841463414634146, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00015471357093666804, |
|
"loss": 0.5311, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 3.902439024390244, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00015291970609726007, |
|
"loss": 0.525, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.9634146341463414, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00015110186794471103, |
|
"loss": 0.5178, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.171655535697937, |
|
"eval_runtime": 1.117, |
|
"eval_samples_per_second": 1.791, |
|
"eval_steps_per_second": 0.895, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.024390243902439, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 0.00014926087998327837, |
|
"loss": 0.5073, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.085365853658536, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00014739757620438307, |
|
"loss": 0.4417, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.146341463414634, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.0001455128007088009, |
|
"loss": 0.4321, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.2073170731707314, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.00014360740732427367, |
|
"loss": 0.4481, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.2682926829268295, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 0.00014168225921871433, |
|
"loss": 0.4489, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.329268292682927, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00013973822850918055, |
|
"loss": 0.4517, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 4.390243902439025, |
|
"grad_norm": 0.78125, |
|
"learning_rate": 0.0001377761958667946, |
|
"loss": 0.4441, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.451219512195122, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.00013579705011778766, |
|
"loss": 0.4539, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 4.512195121951219, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.00013380168784085027, |
|
"loss": 0.4489, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.573170731707317, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.00013179101296097035, |
|
"loss": 0.4443, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.634146341463414, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 0.00012976593633994346, |
|
"loss": 0.4664, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.695121951219512, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.0001277273753637408, |
|
"loss": 0.4572, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 4.7560975609756095, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 0.00012567625352692127, |
|
"loss": 0.4675, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.817073170731708, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0001236135000142765, |
|
"loss": 0.45, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 4.878048780487805, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00012154004927989815, |
|
"loss": 0.4538, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.939024390243903, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.00011945684062385803, |
|
"loss": 0.4565, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.4668, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.2044252157211304, |
|
"eval_runtime": 1.1171, |
|
"eval_samples_per_second": 1.79, |
|
"eval_steps_per_second": 0.895, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 5.060975609756097, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.00011526492842188745, |
|
"loss": 0.3794, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 5.121951219512195, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 0.0001131581238665465, |
|
"loss": 0.376, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 5.182926829268292, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00011104535851045539, |
|
"loss": 0.3721, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 5.2439024390243905, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00010892758946371944, |
|
"loss": 0.3812, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 5.304878048780488, |
|
"grad_norm": 0.78125, |
|
"learning_rate": 0.00010680577610318072, |
|
"loss": 0.3748, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 5.365853658536586, |
|
"grad_norm": 0.7578125, |
|
"learning_rate": 0.00010468087963780789, |
|
"loss": 0.3698, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 5.426829268292683, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.00010255386267325602, |
|
"loss": 0.3849, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 5.487804878048781, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.00010042568877579388, |
|
"loss": 0.3831, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 5.548780487804878, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 9.829732203579584e-05, |
|
"loss": 0.3765, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 5.609756097560975, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 9.616972663099647e-05, |
|
"loss": 0.4006, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 5.670731707317073, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 9.404386638970542e-05, |
|
"loss": 0.384, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 5.7317073170731705, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.192070435418079e-05, |
|
"loss": 0.3804, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 5.7926829268292686, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 8.980120234435849e-05, |
|
"loss": 0.39, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 5.853658536585366, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 8.768632052213531e-05, |
|
"loss": 0.3881, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 5.914634146341464, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 8.557701695640321e-05, |
|
"loss": 0.3908, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 5.975609756097561, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 8.347424718903151e-05, |
|
"loss": 0.3955, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.325190782546997, |
|
"eval_runtime": 1.1255, |
|
"eval_samples_per_second": 1.777, |
|
"eval_steps_per_second": 0.888, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 6.036585365853658, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 8.13789638019942e-05, |
|
"loss": 0.3366, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 6.097560975609756, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 7.929211598583794e-05, |
|
"loss": 0.3141, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 6.158536585365853, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 7.721464910968627e-05, |
|
"loss": 0.3234, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 6.219512195121951, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 7.514750429297528e-05, |
|
"loss": 0.3232, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 6.280487804878049, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 7.309161797911441e-05, |
|
"loss": 0.3198, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 6.341463414634147, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 7.104792151126515e-05, |
|
"loss": 0.3236, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 6.402439024390244, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 6.901734071043071e-05, |
|
"loss": 0.3126, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 6.463414634146342, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 6.700079545604708e-05, |
|
"loss": 0.3167, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 6.524390243902439, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 6.499919926926566e-05, |
|
"loss": 0.3412, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 6.585365853658536, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 6.301345889911637e-05, |
|
"loss": 0.3269, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 6.646341463414634, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 6.104447391173858e-05, |
|
"loss": 0.3309, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 6.7073170731707314, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 5.909313628286601e-05, |
|
"loss": 0.3172, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 6.7682926829268295, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 5.716032999375006e-05, |
|
"loss": 0.326, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 6.829268292682927, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 5.524693063070492e-05, |
|
"loss": 0.3207, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 6.890243902439025, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 5.335380498845559e-05, |
|
"loss": 0.3295, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 6.951219512195122, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 5.148181067746862e-05, |
|
"loss": 0.3233, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.4225410223007202, |
|
"eval_runtime": 1.1188, |
|
"eval_samples_per_second": 1.788, |
|
"eval_steps_per_second": 0.894, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 7.012195121951219, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 4.963179573544357e-05, |
|
"loss": 0.3129, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 7.073170731707317, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 4.7804598243140666e-05, |
|
"loss": 0.2764, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 7.134146341463414, |
|
"grad_norm": 0.5, |
|
"learning_rate": 4.60010459447196e-05, |
|
"loss": 0.2792, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 7.195121951219512, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 4.422195587276058e-05, |
|
"loss": 0.2799, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 7.2560975609756095, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 4.2468133978137945e-05, |
|
"loss": 0.2759, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 7.317073170731708, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 4.0740374764914136e-05, |
|
"loss": 0.2697, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 7.378048780487805, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 3.903946093041877e-05, |
|
"loss": 0.2917, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 7.439024390243903, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 3.736616301067694e-05, |
|
"loss": 0.2748, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 3.5721239031346066e-05, |
|
"loss": 0.2771, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 7.560975609756097, |
|
"grad_norm": 0.5, |
|
"learning_rate": 3.410543416432069e-05, |
|
"loss": 0.2788, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 7.621951219512195, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 3.2519480390159806e-05, |
|
"loss": 0.2829, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 7.682926829268292, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 3.096409616649023e-05, |
|
"loss": 0.2708, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 7.7439024390243905, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 2.9439986102536043e-05, |
|
"loss": 0.2945, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 7.804878048780488, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 2.794784063992131e-05, |
|
"loss": 0.2833, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 7.865853658536586, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 2.6488335739891178e-05, |
|
"loss": 0.2767, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 7.926829268292683, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 2.50621325770927e-05, |
|
"loss": 0.2725, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 7.987804878048781, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 2.366987724005404e-05, |
|
"loss": 0.2669, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 1.611946702003479, |
|
"eval_runtime": 1.1206, |
|
"eval_samples_per_second": 1.785, |
|
"eval_steps_per_second": 0.892, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 8.048780487804878, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 2.2312200438498043e-05, |
|
"loss": 0.2564, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 8.109756097560975, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 2.0989717217622652e-05, |
|
"loss": 0.2598, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 8.170731707317072, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 1.9703026679477256e-05, |
|
"loss": 0.2507, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 8.231707317073171, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 1.8452711711561842e-05, |
|
"loss": 0.2433, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 8.292682926829269, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 1.7239338722771327e-05, |
|
"loss": 0.251, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 8.353658536585366, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 1.6063457386805004e-05, |
|
"loss": 0.252, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 8.414634146341463, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 1.4925600393157324e-05, |
|
"loss": 0.2471, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 8.475609756097562, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 1.3826283205802427e-05, |
|
"loss": 0.2526, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 8.536585365853659, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 1.2766003829682505e-05, |
|
"loss": 0.2517, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 8.597560975609756, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 1.1745242585104955e-05, |
|
"loss": 0.2479, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 8.658536585365853, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 1.0764461890151112e-05, |
|
"loss": 0.2485, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 8.71951219512195, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 9.824106051194859e-06, |
|
"loss": 0.2559, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 8.78048780487805, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 8.924601061626048e-06, |
|
"loss": 0.2588, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 8.841463414634147, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 8.066354408870048e-06, |
|
"loss": 0.2518, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 8.902439024390244, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 7.249754889790539e-06, |
|
"loss": 0.2542, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 8.963414634146341, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 6.475172434559573e-06, |
|
"loss": 0.2591, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 1.73529052734375, |
|
"eval_runtime": 1.1218, |
|
"eval_samples_per_second": 1.783, |
|
"eval_steps_per_second": 0.891, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 9.024390243902438, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 5.742957939074412e-06, |
|
"loss": 0.2467, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 9.085365853658537, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 5.0534431059970685e-06, |
|
"loss": 0.2449, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 9.146341463414634, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.40694029448877e-06, |
|
"loss": 0.244, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 9.207317073170731, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 3.803742378707198e-06, |
|
"loss": 0.2612, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 9.268292682926829, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 3.2441226151306404e-06, |
|
"loss": 0.2497, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 9.329268292682928, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 2.7283345187693264e-06, |
|
"loss": 0.2571, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 9.390243902439025, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 2.256611748319792e-06, |
|
"loss": 0.2458, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 9.451219512195122, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 1.8291680003145073e-06, |
|
"loss": 0.2585, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 9.512195121951219, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 1.4461969123145457e-06, |
|
"loss": 0.2516, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 9.573170731707316, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 1.107871975189234e-06, |
|
"loss": 0.2396, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 9.634146341463415, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 8.143464545226298e-07, |
|
"loss": 0.2441, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 9.695121951219512, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 5.657533211820942e-07, |
|
"loss": 0.2417, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 9.75609756097561, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 3.622051910808666e-07, |
|
"loss": 0.2425, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 9.817073170731707, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 2.037942741615617e-07, |
|
"loss": 0.2447, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 9.878048780487806, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 9.059233262386225e-08, |
|
"loss": 0.2512, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 9.939024390243903, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 2.2650648415334376e-08, |
|
"loss": 0.2365, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.0, |
|
"loss": 0.2367, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 1.750213384628296, |
|
"eval_runtime": 1.1197, |
|
"eval_samples_per_second": 1.786, |
|
"eval_steps_per_second": 0.893, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 820, |
|
"total_flos": 1.257706785410646e+18, |
|
"train_loss": 1.5742560074096772, |
|
"train_runtime": 5911.531, |
|
"train_samples_per_second": 2.214, |
|
"train_steps_per_second": 0.139 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 820, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 100, |
|
"total_flos": 1.257706785410646e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|