|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.999954960229883, |
|
"eval_steps": 100, |
|
"global_step": 3469, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0008647635862466558, |
|
"grad_norm": 27.898378372192383, |
|
"learning_rate": 1.7241379310344828e-07, |
|
"loss": 2.2211, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0017295271724933115, |
|
"grad_norm": 25.436155319213867, |
|
"learning_rate": 3.4482758620689656e-07, |
|
"loss": 2.1928, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0025942907587399675, |
|
"grad_norm": 25.42555046081543, |
|
"learning_rate": 5.172413793103449e-07, |
|
"loss": 2.006, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.003459054344986623, |
|
"grad_norm": 26.934585571289062, |
|
"learning_rate": 6.896551724137931e-07, |
|
"loss": 2.0768, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.004323817931233279, |
|
"grad_norm": 25.911136627197266, |
|
"learning_rate": 8.620689655172415e-07, |
|
"loss": 1.8806, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.005188581517479935, |
|
"grad_norm": 26.93269157409668, |
|
"learning_rate": 1.0344827586206898e-06, |
|
"loss": 2.2425, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.006053345103726591, |
|
"grad_norm": 29.343605041503906, |
|
"learning_rate": 1.2068965517241381e-06, |
|
"loss": 2.1498, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.006918108689973246, |
|
"grad_norm": 22.887393951416016, |
|
"learning_rate": 1.3793103448275862e-06, |
|
"loss": 1.8932, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0077828722762199026, |
|
"grad_norm": 24.839616775512695, |
|
"learning_rate": 1.5517241379310346e-06, |
|
"loss": 1.8092, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.008647635862466557, |
|
"grad_norm": 23.600284576416016, |
|
"learning_rate": 1.724137931034483e-06, |
|
"loss": 1.6473, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.009512399448713214, |
|
"grad_norm": 20.898502349853516, |
|
"learning_rate": 1.896551724137931e-06, |
|
"loss": 1.5059, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.01037716303495987, |
|
"grad_norm": 20.16602897644043, |
|
"learning_rate": 2.0689655172413796e-06, |
|
"loss": 1.3399, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.011241926621206525, |
|
"grad_norm": 20.394386291503906, |
|
"learning_rate": 2.241379310344828e-06, |
|
"loss": 1.276, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.012106690207453181, |
|
"grad_norm": 17.962718963623047, |
|
"learning_rate": 2.4137931034482762e-06, |
|
"loss": 1.2166, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.012971453793699838, |
|
"grad_norm": 16.842355728149414, |
|
"learning_rate": 2.5862068965517246e-06, |
|
"loss": 1.2867, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.013836217379946492, |
|
"grad_norm": 16.04344940185547, |
|
"learning_rate": 2.7586206896551725e-06, |
|
"loss": 1.2473, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.014700980966193149, |
|
"grad_norm": 10.40717887878418, |
|
"learning_rate": 2.931034482758621e-06, |
|
"loss": 1.1712, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.015565744552439805, |
|
"grad_norm": 9.352970123291016, |
|
"learning_rate": 3.103448275862069e-06, |
|
"loss": 0.8918, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.01643050813868646, |
|
"grad_norm": 15.546124458312988, |
|
"learning_rate": 3.2758620689655175e-06, |
|
"loss": 0.8335, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.017295271724933114, |
|
"grad_norm": 1.752021074295044, |
|
"learning_rate": 3.448275862068966e-06, |
|
"loss": 0.747, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01816003531117977, |
|
"grad_norm": 4.960226058959961, |
|
"learning_rate": 3.620689655172414e-06, |
|
"loss": 0.7935, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.019024798897426427, |
|
"grad_norm": 2.807642698287964, |
|
"learning_rate": 3.793103448275862e-06, |
|
"loss": 0.6656, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.019889562483673084, |
|
"grad_norm": 8.289933204650879, |
|
"learning_rate": 3.96551724137931e-06, |
|
"loss": 0.6971, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.02075432606991974, |
|
"grad_norm": 3.8299474716186523, |
|
"learning_rate": 4.137931034482759e-06, |
|
"loss": 0.5833, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.021619089656166397, |
|
"grad_norm": 3.540727138519287, |
|
"learning_rate": 4.310344827586207e-06, |
|
"loss": 0.5504, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02248385324241305, |
|
"grad_norm": 4.521378517150879, |
|
"learning_rate": 4.482758620689656e-06, |
|
"loss": 0.5376, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.023348616828659706, |
|
"grad_norm": 1.7125041484832764, |
|
"learning_rate": 4.655172413793104e-06, |
|
"loss": 0.5059, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.024213380414906362, |
|
"grad_norm": 4.4612555503845215, |
|
"learning_rate": 4.8275862068965525e-06, |
|
"loss": 0.5802, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.02507814400115302, |
|
"grad_norm": 2.3113410472869873, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5996, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.025942907587399675, |
|
"grad_norm": 2.3540542125701904, |
|
"learning_rate": 5.172413793103449e-06, |
|
"loss": 0.6472, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.026807671173646328, |
|
"grad_norm": 2.9415371417999268, |
|
"learning_rate": 5.344827586206896e-06, |
|
"loss": 0.4013, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.027672434759892985, |
|
"grad_norm": 3.8324429988861084, |
|
"learning_rate": 5.517241379310345e-06, |
|
"loss": 0.521, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.02853719834613964, |
|
"grad_norm": 2.941765785217285, |
|
"learning_rate": 5.689655172413794e-06, |
|
"loss": 0.4763, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.028825452874888528, |
|
"eval_loss": 0.4467589259147644, |
|
"eval_mse": 0.4467589111328125, |
|
"eval_runtime": 64.5355, |
|
"eval_samples_per_second": 15.495, |
|
"eval_steps_per_second": 3.874, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.029401961932386297, |
|
"grad_norm": 5.631622314453125, |
|
"learning_rate": 5.862068965517242e-06, |
|
"loss": 0.5672, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.030266725518632954, |
|
"grad_norm": 2.838885545730591, |
|
"learning_rate": 6.03448275862069e-06, |
|
"loss": 0.5572, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.03113148910487961, |
|
"grad_norm": 4.942019462585449, |
|
"learning_rate": 6.206896551724138e-06, |
|
"loss": 0.4501, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.03199625269112626, |
|
"grad_norm": 2.720856189727783, |
|
"learning_rate": 6.379310344827587e-06, |
|
"loss": 0.385, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.03286101627737292, |
|
"grad_norm": 14.2058744430542, |
|
"learning_rate": 6.551724137931035e-06, |
|
"loss": 0.4864, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.033725779863619576, |
|
"grad_norm": 3.2831430435180664, |
|
"learning_rate": 6.724137931034484e-06, |
|
"loss": 0.4978, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.03459054344986623, |
|
"grad_norm": 5.037907600402832, |
|
"learning_rate": 6.896551724137932e-06, |
|
"loss": 0.4359, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03545530703611289, |
|
"grad_norm": 3.339016914367676, |
|
"learning_rate": 7.0689655172413796e-06, |
|
"loss": 0.4092, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.03632007062235954, |
|
"grad_norm": 1.9857500791549683, |
|
"learning_rate": 7.241379310344828e-06, |
|
"loss": 0.4031, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.0371848342086062, |
|
"grad_norm": 66.43729400634766, |
|
"learning_rate": 7.413793103448277e-06, |
|
"loss": 0.3927, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.038049597794852855, |
|
"grad_norm": 14.006072998046875, |
|
"learning_rate": 7.586206896551724e-06, |
|
"loss": 0.4676, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.03891436138109951, |
|
"grad_norm": 3.6296005249023438, |
|
"learning_rate": 7.758620689655173e-06, |
|
"loss": 0.3862, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03977912496734617, |
|
"grad_norm": 9.856642723083496, |
|
"learning_rate": 7.93103448275862e-06, |
|
"loss": 0.3564, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.04064388855359282, |
|
"grad_norm": 21.199617385864258, |
|
"learning_rate": 8.103448275862069e-06, |
|
"loss": 0.3968, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.04150865213983948, |
|
"grad_norm": 3.6782989501953125, |
|
"learning_rate": 8.275862068965518e-06, |
|
"loss": 0.4045, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.04237341572608613, |
|
"grad_norm": 8.13273811340332, |
|
"learning_rate": 8.448275862068966e-06, |
|
"loss": 0.3674, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.04323817931233279, |
|
"grad_norm": 5.4698166847229, |
|
"learning_rate": 8.620689655172414e-06, |
|
"loss": 0.3479, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.044102942898579446, |
|
"grad_norm": 3.1589951515197754, |
|
"learning_rate": 8.793103448275862e-06, |
|
"loss": 0.3776, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.0449677064848261, |
|
"grad_norm": 2.283618211746216, |
|
"learning_rate": 8.965517241379312e-06, |
|
"loss": 0.3927, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.04583247007107276, |
|
"grad_norm": 8.170278549194336, |
|
"learning_rate": 9.13793103448276e-06, |
|
"loss": 0.3583, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.04669723365731941, |
|
"grad_norm": 3.574162483215332, |
|
"learning_rate": 9.310344827586207e-06, |
|
"loss": 0.3659, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.04756199724356607, |
|
"grad_norm": 12.687560081481934, |
|
"learning_rate": 9.482758620689655e-06, |
|
"loss": 0.3439, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.048426760829812725, |
|
"grad_norm": 3.187288761138916, |
|
"learning_rate": 9.655172413793105e-06, |
|
"loss": 0.3442, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.04929152441605938, |
|
"grad_norm": 9.385987281799316, |
|
"learning_rate": 9.827586206896553e-06, |
|
"loss": 0.3494, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.05015628800230604, |
|
"grad_norm": 2.9266576766967773, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3061, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.05102105158855269, |
|
"grad_norm": 3.7128305435180664, |
|
"learning_rate": 9.990895295902884e-06, |
|
"loss": 0.3664, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.05188581517479935, |
|
"grad_norm": 5.970412731170654, |
|
"learning_rate": 9.981790591805767e-06, |
|
"loss": 0.4051, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.052750578761046, |
|
"grad_norm": 4.895346164703369, |
|
"learning_rate": 9.972685887708651e-06, |
|
"loss": 0.2907, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.053615342347292656, |
|
"grad_norm": 16.259010314941406, |
|
"learning_rate": 9.963581183611534e-06, |
|
"loss": 0.303, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.054480105933539316, |
|
"grad_norm": 4.193286418914795, |
|
"learning_rate": 9.954476479514417e-06, |
|
"loss": 0.3164, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.05534486951978597, |
|
"grad_norm": 8.485808372497559, |
|
"learning_rate": 9.9453717754173e-06, |
|
"loss": 0.3494, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.05620963310603263, |
|
"grad_norm": 5.708987712860107, |
|
"learning_rate": 9.936267071320182e-06, |
|
"loss": 0.3413, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.05707439669227928, |
|
"grad_norm": 5.146287441253662, |
|
"learning_rate": 9.927162367223067e-06, |
|
"loss": 0.3078, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.057650905749777055, |
|
"eval_loss": 0.31299448013305664, |
|
"eval_mse": 0.31299447250366214, |
|
"eval_runtime": 64.6715, |
|
"eval_samples_per_second": 15.463, |
|
"eval_steps_per_second": 3.866, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.05793916027852594, |
|
"grad_norm": 6.595048427581787, |
|
"learning_rate": 9.91805766312595e-06, |
|
"loss": 0.3819, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.058803923864772595, |
|
"grad_norm": 4.586561679840088, |
|
"learning_rate": 9.908952959028833e-06, |
|
"loss": 0.3159, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.05966868745101925, |
|
"grad_norm": 5.183395862579346, |
|
"learning_rate": 9.899848254931715e-06, |
|
"loss": 0.3129, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.06053345103726591, |
|
"grad_norm": 12.04688549041748, |
|
"learning_rate": 9.890743550834598e-06, |
|
"loss": 0.3276, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.06139821462351256, |
|
"grad_norm": 5.607716083526611, |
|
"learning_rate": 9.881638846737481e-06, |
|
"loss": 0.3125, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.06226297820975922, |
|
"grad_norm": 4.468930244445801, |
|
"learning_rate": 9.872534142640366e-06, |
|
"loss": 0.2952, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.06312774179600587, |
|
"grad_norm": 4.9333415031433105, |
|
"learning_rate": 9.863429438543249e-06, |
|
"loss": 0.3156, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.06399250538225253, |
|
"grad_norm": 3.3796207904815674, |
|
"learning_rate": 9.854324734446131e-06, |
|
"loss": 0.3484, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.06485726896849918, |
|
"grad_norm": 2.9608285427093506, |
|
"learning_rate": 9.845220030349014e-06, |
|
"loss": 0.3244, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.06572203255474585, |
|
"grad_norm": 2.9653444290161133, |
|
"learning_rate": 9.836115326251897e-06, |
|
"loss": 0.2834, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.0665867961409925, |
|
"grad_norm": 6.536613464355469, |
|
"learning_rate": 9.827010622154782e-06, |
|
"loss": 0.2884, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.06745155972723915, |
|
"grad_norm": 4.19090461730957, |
|
"learning_rate": 9.817905918057664e-06, |
|
"loss": 0.3355, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.0683163233134858, |
|
"grad_norm": 3.4930806159973145, |
|
"learning_rate": 9.808801213960547e-06, |
|
"loss": 0.3038, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.06918108689973246, |
|
"grad_norm": 13.04383659362793, |
|
"learning_rate": 9.79969650986343e-06, |
|
"loss": 0.2971, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.07004585048597912, |
|
"grad_norm": 6.37193489074707, |
|
"learning_rate": 9.790591805766313e-06, |
|
"loss": 0.2927, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.07091061407222578, |
|
"grad_norm": 3.0384926795959473, |
|
"learning_rate": 9.781487101669198e-06, |
|
"loss": 0.3084, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.07177537765847243, |
|
"grad_norm": 20.000625610351562, |
|
"learning_rate": 9.77238239757208e-06, |
|
"loss": 0.2508, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.07264014124471908, |
|
"grad_norm": 5.248627662658691, |
|
"learning_rate": 9.763277693474963e-06, |
|
"loss": 0.2514, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.07350490483096574, |
|
"grad_norm": 6.290755748748779, |
|
"learning_rate": 9.754172989377846e-06, |
|
"loss": 0.2514, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.0743696684172124, |
|
"grad_norm": 5.56666898727417, |
|
"learning_rate": 9.745068285280729e-06, |
|
"loss": 0.3, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.07523443200345906, |
|
"grad_norm": 3.098717212677002, |
|
"learning_rate": 9.735963581183613e-06, |
|
"loss": 0.2927, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.07609919558970571, |
|
"grad_norm": 3.6719772815704346, |
|
"learning_rate": 9.726858877086496e-06, |
|
"loss": 0.3462, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.07696395917595236, |
|
"grad_norm": 16.087696075439453, |
|
"learning_rate": 9.717754172989379e-06, |
|
"loss": 0.2776, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.07782872276219902, |
|
"grad_norm": 3.008305788040161, |
|
"learning_rate": 9.708649468892262e-06, |
|
"loss": 0.3359, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.07869348634844568, |
|
"grad_norm": 5.651501655578613, |
|
"learning_rate": 9.699544764795145e-06, |
|
"loss": 0.3544, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.07955824993469233, |
|
"grad_norm": 3.0839080810546875, |
|
"learning_rate": 9.690440060698028e-06, |
|
"loss": 0.3059, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.08042301352093899, |
|
"grad_norm": 6.679815292358398, |
|
"learning_rate": 9.681335356600912e-06, |
|
"loss": 0.2823, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.08128777710718564, |
|
"grad_norm": 3.161733627319336, |
|
"learning_rate": 9.672230652503795e-06, |
|
"loss": 0.2924, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.0821525406934323, |
|
"grad_norm": 7.885376930236816, |
|
"learning_rate": 9.663125948406678e-06, |
|
"loss": 0.3316, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.08301730427967896, |
|
"grad_norm": 4.813638687133789, |
|
"learning_rate": 9.65402124430956e-06, |
|
"loss": 0.2827, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.08388206786592561, |
|
"grad_norm": 3.7599964141845703, |
|
"learning_rate": 9.644916540212444e-06, |
|
"loss": 0.3653, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.08474683145217227, |
|
"grad_norm": 3.177720546722412, |
|
"learning_rate": 9.635811836115328e-06, |
|
"loss": 0.3014, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.08561159503841892, |
|
"grad_norm": 3.337841749191284, |
|
"learning_rate": 9.626707132018211e-06, |
|
"loss": 0.2627, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.08647635862466559, |
|
"grad_norm": 6.686293601989746, |
|
"learning_rate": 9.617602427921094e-06, |
|
"loss": 0.3088, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.08647635862466559, |
|
"eval_loss": 0.26950159668922424, |
|
"eval_mse": 0.2695015795826912, |
|
"eval_runtime": 64.3244, |
|
"eval_samples_per_second": 15.546, |
|
"eval_steps_per_second": 3.887, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.08734112221091224, |
|
"grad_norm": 4.119994163513184, |
|
"learning_rate": 9.608497723823977e-06, |
|
"loss": 0.2565, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.08820588579715889, |
|
"grad_norm": 2.523799419403076, |
|
"learning_rate": 9.59939301972686e-06, |
|
"loss": 0.2987, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.08907064938340555, |
|
"grad_norm": 4.713862419128418, |
|
"learning_rate": 9.590288315629744e-06, |
|
"loss": 0.2668, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.0899354129696522, |
|
"grad_norm": 9.041470527648926, |
|
"learning_rate": 9.581183611532627e-06, |
|
"loss": 0.2889, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.09080017655589886, |
|
"grad_norm": 2.4231677055358887, |
|
"learning_rate": 9.57207890743551e-06, |
|
"loss": 0.3115, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.09166494014214552, |
|
"grad_norm": 2.21040678024292, |
|
"learning_rate": 9.562974203338393e-06, |
|
"loss": 0.2281, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.09252970372839217, |
|
"grad_norm": 2.1169981956481934, |
|
"learning_rate": 9.553869499241275e-06, |
|
"loss": 0.2577, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.09339446731463882, |
|
"grad_norm": 2.232067823410034, |
|
"learning_rate": 9.54476479514416e-06, |
|
"loss": 0.2205, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.09425923090088548, |
|
"grad_norm": 6.556311130523682, |
|
"learning_rate": 9.535660091047043e-06, |
|
"loss": 0.2522, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.09512399448713214, |
|
"grad_norm": 5.824563980102539, |
|
"learning_rate": 9.526555386949926e-06, |
|
"loss": 0.298, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.0959887580733788, |
|
"grad_norm": 3.462920665740967, |
|
"learning_rate": 9.517450682852808e-06, |
|
"loss": 0.2836, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.09685352165962545, |
|
"grad_norm": 8.646162986755371, |
|
"learning_rate": 9.508345978755691e-06, |
|
"loss": 0.2844, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.0977182852458721, |
|
"grad_norm": 2.7092299461364746, |
|
"learning_rate": 9.499241274658574e-06, |
|
"loss": 0.2869, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.09858304883211876, |
|
"grad_norm": 2.3480119705200195, |
|
"learning_rate": 9.490136570561459e-06, |
|
"loss": 0.3123, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.09944781241836542, |
|
"grad_norm": 3.3885226249694824, |
|
"learning_rate": 9.481031866464341e-06, |
|
"loss": 0.2096, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.10031257600461208, |
|
"grad_norm": 4.4023590087890625, |
|
"learning_rate": 9.471927162367224e-06, |
|
"loss": 0.2715, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.10117733959085873, |
|
"grad_norm": 5.742528438568115, |
|
"learning_rate": 9.462822458270107e-06, |
|
"loss": 0.3155, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.10204210317710538, |
|
"grad_norm": 5.3731913566589355, |
|
"learning_rate": 9.45371775417299e-06, |
|
"loss": 0.2759, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.10290686676335203, |
|
"grad_norm": 2.147970199584961, |
|
"learning_rate": 9.444613050075875e-06, |
|
"loss": 0.2481, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.1037716303495987, |
|
"grad_norm": 2.099414110183716, |
|
"learning_rate": 9.435508345978757e-06, |
|
"loss": 0.3029, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.10463639393584535, |
|
"grad_norm": 2.6507697105407715, |
|
"learning_rate": 9.42640364188164e-06, |
|
"loss": 0.2731, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.105501157522092, |
|
"grad_norm": 2.6286544799804688, |
|
"learning_rate": 9.417298937784523e-06, |
|
"loss": 0.2532, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.10636592110833866, |
|
"grad_norm": 4.705443859100342, |
|
"learning_rate": 9.408194233687406e-06, |
|
"loss": 0.245, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.10723068469458531, |
|
"grad_norm": 2.616685628890991, |
|
"learning_rate": 9.399089529590289e-06, |
|
"loss": 0.2953, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.10809544828083198, |
|
"grad_norm": 2.7139439582824707, |
|
"learning_rate": 9.389984825493173e-06, |
|
"loss": 0.2691, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.10896021186707863, |
|
"grad_norm": 5.848327159881592, |
|
"learning_rate": 9.380880121396056e-06, |
|
"loss": 0.2735, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.10982497545332529, |
|
"grad_norm": 4.9311957359313965, |
|
"learning_rate": 9.371775417298939e-06, |
|
"loss": 0.2915, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.11068973903957194, |
|
"grad_norm": 2.206596612930298, |
|
"learning_rate": 9.362670713201822e-06, |
|
"loss": 0.2811, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.11155450262581859, |
|
"grad_norm": 4.341143608093262, |
|
"learning_rate": 9.353566009104705e-06, |
|
"loss": 0.2845, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.11241926621206526, |
|
"grad_norm": 144.06268310546875, |
|
"learning_rate": 9.344461305007587e-06, |
|
"loss": 0.3057, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.11328402979831191, |
|
"grad_norm": 4.772558689117432, |
|
"learning_rate": 9.335356600910472e-06, |
|
"loss": 0.2683, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.11414879338455856, |
|
"grad_norm": 4.352729320526123, |
|
"learning_rate": 9.326251896813355e-06, |
|
"loss": 0.2707, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.11501355697080522, |
|
"grad_norm": 2.260861396789551, |
|
"learning_rate": 9.317147192716238e-06, |
|
"loss": 0.2379, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.11530181149955411, |
|
"eval_loss": 0.26182544231414795, |
|
"eval_mse": 0.26182544356794096, |
|
"eval_runtime": 64.4569, |
|
"eval_samples_per_second": 15.514, |
|
"eval_steps_per_second": 3.879, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.11587832055705188, |
|
"grad_norm": 4.492468357086182, |
|
"learning_rate": 9.30804248861912e-06, |
|
"loss": 0.2976, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.11674308414329854, |
|
"grad_norm": 2.6328372955322266, |
|
"learning_rate": 9.298937784522003e-06, |
|
"loss": 0.2224, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.11760784772954519, |
|
"grad_norm": 4.318969249725342, |
|
"learning_rate": 9.289833080424886e-06, |
|
"loss": 0.2396, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.11847261131579184, |
|
"grad_norm": 3.1604599952697754, |
|
"learning_rate": 9.28072837632777e-06, |
|
"loss": 0.2492, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.1193373749020385, |
|
"grad_norm": 4.404653072357178, |
|
"learning_rate": 9.271623672230654e-06, |
|
"loss": 0.3145, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.12020213848828516, |
|
"grad_norm": 2.884675979614258, |
|
"learning_rate": 9.262518968133536e-06, |
|
"loss": 0.2851, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.12106690207453182, |
|
"grad_norm": 2.1803503036499023, |
|
"learning_rate": 9.25341426403642e-06, |
|
"loss": 0.2832, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.12193166566077847, |
|
"grad_norm": 4.99426794052124, |
|
"learning_rate": 9.244309559939302e-06, |
|
"loss": 0.3244, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.12279642924702512, |
|
"grad_norm": 2.84147310256958, |
|
"learning_rate": 9.235204855842187e-06, |
|
"loss": 0.2886, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.12366119283327177, |
|
"grad_norm": 7.547422885894775, |
|
"learning_rate": 9.22610015174507e-06, |
|
"loss": 0.3274, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.12452595641951844, |
|
"grad_norm": 3.3562684059143066, |
|
"learning_rate": 9.216995447647952e-06, |
|
"loss": 0.2726, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.1253907200057651, |
|
"grad_norm": 5.314599990844727, |
|
"learning_rate": 9.207890743550835e-06, |
|
"loss": 0.2757, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.12625548359201175, |
|
"grad_norm": 2.0773863792419434, |
|
"learning_rate": 9.198786039453718e-06, |
|
"loss": 0.2342, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.1271202471782584, |
|
"grad_norm": 4.290255546569824, |
|
"learning_rate": 9.189681335356601e-06, |
|
"loss": 0.3036, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.12798501076450505, |
|
"grad_norm": 5.141223907470703, |
|
"learning_rate": 9.180576631259485e-06, |
|
"loss": 0.2455, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.1288497743507517, |
|
"grad_norm": 2.8631341457366943, |
|
"learning_rate": 9.171471927162368e-06, |
|
"loss": 0.229, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.12971453793699836, |
|
"grad_norm": 6.279902935028076, |
|
"learning_rate": 9.162367223065251e-06, |
|
"loss": 0.2693, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.130579301523245, |
|
"grad_norm": 1.910251498222351, |
|
"learning_rate": 9.153262518968134e-06, |
|
"loss": 0.2384, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.1314440651094917, |
|
"grad_norm": 3.9534478187561035, |
|
"learning_rate": 9.144157814871017e-06, |
|
"loss": 0.3125, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.13230882869573835, |
|
"grad_norm": 4.347681522369385, |
|
"learning_rate": 9.1350531107739e-06, |
|
"loss": 0.3019, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.133173592281985, |
|
"grad_norm": 2.742349624633789, |
|
"learning_rate": 9.125948406676784e-06, |
|
"loss": 0.2647, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.13403835586823165, |
|
"grad_norm": 3.5790295600891113, |
|
"learning_rate": 9.116843702579667e-06, |
|
"loss": 0.3299, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.1349031194544783, |
|
"grad_norm": 2.90129017829895, |
|
"learning_rate": 9.10773899848255e-06, |
|
"loss": 0.2666, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.13576788304072496, |
|
"grad_norm": 1.934597134590149, |
|
"learning_rate": 9.098634294385433e-06, |
|
"loss": 0.2665, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.1366326466269716, |
|
"grad_norm": 2.528752088546753, |
|
"learning_rate": 9.089529590288316e-06, |
|
"loss": 0.2386, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.13749741021321826, |
|
"grad_norm": 3.6109018325805664, |
|
"learning_rate": 9.080424886191198e-06, |
|
"loss": 0.2733, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.13836217379946492, |
|
"grad_norm": 2.2715227603912354, |
|
"learning_rate": 9.071320182094083e-06, |
|
"loss": 0.2703, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.1392269373857116, |
|
"grad_norm": 6.626646995544434, |
|
"learning_rate": 9.062215477996966e-06, |
|
"loss": 0.2605, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.14009170097195825, |
|
"grad_norm": 2.91410756111145, |
|
"learning_rate": 9.053110773899849e-06, |
|
"loss": 0.2382, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.1409564645582049, |
|
"grad_norm": 4.639560699462891, |
|
"learning_rate": 9.044006069802731e-06, |
|
"loss": 0.2977, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.14182122814445156, |
|
"grad_norm": 3.643523693084717, |
|
"learning_rate": 9.034901365705614e-06, |
|
"loss": 0.2775, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.1426859917306982, |
|
"grad_norm": 4.396177291870117, |
|
"learning_rate": 9.025796661608497e-06, |
|
"loss": 0.23, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.14355075531694486, |
|
"grad_norm": 2.5602617263793945, |
|
"learning_rate": 9.016691957511382e-06, |
|
"loss": 0.289, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.14412726437444262, |
|
"eval_loss": 0.25831595063209534, |
|
"eval_mse": 0.2583159562349319, |
|
"eval_runtime": 64.3088, |
|
"eval_samples_per_second": 15.55, |
|
"eval_steps_per_second": 3.887, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.14441551890319151, |
|
"grad_norm": 3.0293095111846924, |
|
"learning_rate": 9.007587253414265e-06, |
|
"loss": 0.2419, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.14528028248943817, |
|
"grad_norm": 2.9694740772247314, |
|
"learning_rate": 8.998482549317147e-06, |
|
"loss": 0.2689, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.14614504607568482, |
|
"grad_norm": 4.87199068069458, |
|
"learning_rate": 8.98937784522003e-06, |
|
"loss": 0.2353, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.14700980966193147, |
|
"grad_norm": 2.5802090167999268, |
|
"learning_rate": 8.980273141122913e-06, |
|
"loss": 0.2865, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.14787457324817815, |
|
"grad_norm": 2.9250080585479736, |
|
"learning_rate": 8.971168437025798e-06, |
|
"loss": 0.24, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.1487393368344248, |
|
"grad_norm": 5.678205966949463, |
|
"learning_rate": 8.96206373292868e-06, |
|
"loss": 0.2357, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.14960410042067146, |
|
"grad_norm": 4.411201477050781, |
|
"learning_rate": 8.952959028831563e-06, |
|
"loss": 0.2605, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.1504688640069181, |
|
"grad_norm": 3.1570451259613037, |
|
"learning_rate": 8.943854324734446e-06, |
|
"loss": 0.2841, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.15133362759316477, |
|
"grad_norm": 4.235450744628906, |
|
"learning_rate": 8.934749620637329e-06, |
|
"loss": 0.2675, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.15219839117941142, |
|
"grad_norm": 5.139553070068359, |
|
"learning_rate": 8.925644916540213e-06, |
|
"loss": 0.2944, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.15306315476565807, |
|
"grad_norm": 2.4079232215881348, |
|
"learning_rate": 8.916540212443096e-06, |
|
"loss": 0.2169, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.15392791835190472, |
|
"grad_norm": 5.065981388092041, |
|
"learning_rate": 8.90743550834598e-06, |
|
"loss": 0.306, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.15479268193815138, |
|
"grad_norm": 2.997144937515259, |
|
"learning_rate": 8.898330804248862e-06, |
|
"loss": 0.2389, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.15565744552439803, |
|
"grad_norm": 6.104944705963135, |
|
"learning_rate": 8.889226100151745e-06, |
|
"loss": 0.258, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.1565222091106447, |
|
"grad_norm": 2.971264362335205, |
|
"learning_rate": 8.880121396054628e-06, |
|
"loss": 0.2914, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.15738697269689136, |
|
"grad_norm": 14.179126739501953, |
|
"learning_rate": 8.871016691957512e-06, |
|
"loss": 0.2632, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.15825173628313802, |
|
"grad_norm": 1.9280515909194946, |
|
"learning_rate": 8.861911987860395e-06, |
|
"loss": 0.2743, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.15911649986938467, |
|
"grad_norm": 9.689038276672363, |
|
"learning_rate": 8.852807283763278e-06, |
|
"loss": 0.2148, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.15998126345563132, |
|
"grad_norm": 2.9501137733459473, |
|
"learning_rate": 8.84370257966616e-06, |
|
"loss": 0.251, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.16084602704187798, |
|
"grad_norm": 6.09636116027832, |
|
"learning_rate": 8.834597875569044e-06, |
|
"loss": 0.3178, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.16171079062812463, |
|
"grad_norm": 3.230820894241333, |
|
"learning_rate": 8.825493171471928e-06, |
|
"loss": 0.2574, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.16257555421437128, |
|
"grad_norm": 8.147329330444336, |
|
"learning_rate": 8.816388467374811e-06, |
|
"loss": 0.3363, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.16344031780061793, |
|
"grad_norm": 6.0211100578308105, |
|
"learning_rate": 8.807283763277694e-06, |
|
"loss": 0.2568, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.1643050813868646, |
|
"grad_norm": 3.558443307876587, |
|
"learning_rate": 8.798179059180577e-06, |
|
"loss": 0.2346, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.16516984497311127, |
|
"grad_norm": 6.004063129425049, |
|
"learning_rate": 8.78907435508346e-06, |
|
"loss": 0.28, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.16603460855935792, |
|
"grad_norm": 3.2006189823150635, |
|
"learning_rate": 8.779969650986344e-06, |
|
"loss": 0.2356, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.16689937214560457, |
|
"grad_norm": 3.382519245147705, |
|
"learning_rate": 8.770864946889227e-06, |
|
"loss": 0.2565, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.16776413573185123, |
|
"grad_norm": 3.465315580368042, |
|
"learning_rate": 8.76176024279211e-06, |
|
"loss": 0.2757, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.16862889931809788, |
|
"grad_norm": 3.368386745452881, |
|
"learning_rate": 8.752655538694993e-06, |
|
"loss": 0.2663, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.16949366290434453, |
|
"grad_norm": 5.74733304977417, |
|
"learning_rate": 8.743550834597875e-06, |
|
"loss": 0.2539, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.17035842649059119, |
|
"grad_norm": 2.7959225177764893, |
|
"learning_rate": 8.73444613050076e-06, |
|
"loss": 0.232, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.17122319007683784, |
|
"grad_norm": 6.77617073059082, |
|
"learning_rate": 8.725341426403643e-06, |
|
"loss": 0.2858, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.1720879536630845, |
|
"grad_norm": 1.6699326038360596, |
|
"learning_rate": 8.716236722306526e-06, |
|
"loss": 0.2202, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.17295271724933117, |
|
"grad_norm": 7.734217166900635, |
|
"learning_rate": 8.707132018209408e-06, |
|
"loss": 0.3049, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.17295271724933117, |
|
"eval_loss": 0.27226680517196655, |
|
"eval_mse": 0.27226682132482527, |
|
"eval_runtime": 64.8839, |
|
"eval_samples_per_second": 15.412, |
|
"eval_steps_per_second": 3.853, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.17381748083557783, |
|
"grad_norm": 2.729203224182129, |
|
"learning_rate": 8.698027314112291e-06, |
|
"loss": 0.2705, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.17468224442182448, |
|
"grad_norm": 3.3243207931518555, |
|
"learning_rate": 8.688922610015174e-06, |
|
"loss": 0.2685, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.17554700800807113, |
|
"grad_norm": 3.1003451347351074, |
|
"learning_rate": 8.679817905918059e-06, |
|
"loss": 0.2483, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.17641177159431778, |
|
"grad_norm": 5.373556613922119, |
|
"learning_rate": 8.670713201820942e-06, |
|
"loss": 0.2593, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.17727653518056444, |
|
"grad_norm": 5.082779407501221, |
|
"learning_rate": 8.661608497723824e-06, |
|
"loss": 0.2755, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.1781412987668111, |
|
"grad_norm": 5.2915802001953125, |
|
"learning_rate": 8.652503793626707e-06, |
|
"loss": 0.2428, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.17900606235305774, |
|
"grad_norm": 4.1451568603515625, |
|
"learning_rate": 8.64339908952959e-06, |
|
"loss": 0.3072, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.1798708259393044, |
|
"grad_norm": 5.081033229827881, |
|
"learning_rate": 8.634294385432475e-06, |
|
"loss": 0.2515, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.18073558952555105, |
|
"grad_norm": 3.615787982940674, |
|
"learning_rate": 8.625189681335357e-06, |
|
"loss": 0.2421, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.18160035311179773, |
|
"grad_norm": 3.4306159019470215, |
|
"learning_rate": 8.61608497723824e-06, |
|
"loss": 0.2378, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.18246511669804438, |
|
"grad_norm": 6.089697360992432, |
|
"learning_rate": 8.606980273141123e-06, |
|
"loss": 0.2537, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.18332988028429104, |
|
"grad_norm": 3.5059478282928467, |
|
"learning_rate": 8.597875569044006e-06, |
|
"loss": 0.2637, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.1841946438705377, |
|
"grad_norm": 4.7960710525512695, |
|
"learning_rate": 8.58877086494689e-06, |
|
"loss": 0.2519, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.18505940745678434, |
|
"grad_norm": 2.7048394680023193, |
|
"learning_rate": 8.579666160849773e-06, |
|
"loss": 0.2757, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.185924171043031, |
|
"grad_norm": 4.179544925689697, |
|
"learning_rate": 8.570561456752656e-06, |
|
"loss": 0.2829, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.18678893462927765, |
|
"grad_norm": 2.122403383255005, |
|
"learning_rate": 8.561456752655539e-06, |
|
"loss": 0.2636, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.1876536982155243, |
|
"grad_norm": 7.5383830070495605, |
|
"learning_rate": 8.552352048558422e-06, |
|
"loss": 0.3059, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.18851846180177095, |
|
"grad_norm": 3.6806488037109375, |
|
"learning_rate": 8.543247344461306e-06, |
|
"loss": 0.2266, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.1893832253880176, |
|
"grad_norm": 4.183900833129883, |
|
"learning_rate": 8.53414264036419e-06, |
|
"loss": 0.2696, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.1902479889742643, |
|
"grad_norm": 7.027878761291504, |
|
"learning_rate": 8.525037936267072e-06, |
|
"loss": 0.2292, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.19111275256051094, |
|
"grad_norm": 6.008081436157227, |
|
"learning_rate": 8.515933232169955e-06, |
|
"loss": 0.2628, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.1919775161467576, |
|
"grad_norm": 1.9789228439331055, |
|
"learning_rate": 8.506828528072838e-06, |
|
"loss": 0.2328, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.19284227973300425, |
|
"grad_norm": 4.350831985473633, |
|
"learning_rate": 8.49772382397572e-06, |
|
"loss": 0.2743, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.1937070433192509, |
|
"grad_norm": 2.4189023971557617, |
|
"learning_rate": 8.488619119878605e-06, |
|
"loss": 0.2549, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.19457180690549755, |
|
"grad_norm": 2.49760103225708, |
|
"learning_rate": 8.479514415781488e-06, |
|
"loss": 0.2287, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.1954365704917442, |
|
"grad_norm": 4.735384941101074, |
|
"learning_rate": 8.470409711684371e-06, |
|
"loss": 0.2168, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.19630133407799086, |
|
"grad_norm": 1.813238263130188, |
|
"learning_rate": 8.461305007587254e-06, |
|
"loss": 0.2536, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.1971660976642375, |
|
"grad_norm": 2.416673421859741, |
|
"learning_rate": 8.452200303490137e-06, |
|
"loss": 0.2538, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.19803086125048416, |
|
"grad_norm": 2.3501391410827637, |
|
"learning_rate": 8.443095599393021e-06, |
|
"loss": 0.2682, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.19889562483673084, |
|
"grad_norm": 4.72876501083374, |
|
"learning_rate": 8.433990895295904e-06, |
|
"loss": 0.2192, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.1997603884229775, |
|
"grad_norm": 2.0732574462890625, |
|
"learning_rate": 8.424886191198787e-06, |
|
"loss": 0.2612, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.20062515200922415, |
|
"grad_norm": 3.1769752502441406, |
|
"learning_rate": 8.41578148710167e-06, |
|
"loss": 0.2434, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.2014899155954708, |
|
"grad_norm": 4.40995454788208, |
|
"learning_rate": 8.406676783004552e-06, |
|
"loss": 0.2292, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.2017781701242197, |
|
"eval_loss": 0.2476717233657837, |
|
"eval_mse": 0.24767172618419864, |
|
"eval_runtime": 64.3865, |
|
"eval_samples_per_second": 15.531, |
|
"eval_steps_per_second": 3.883, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.20235467918171746, |
|
"grad_norm": 1.8437422513961792, |
|
"learning_rate": 8.397572078907437e-06, |
|
"loss": 0.2751, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.2032194427679641, |
|
"grad_norm": 1.9612832069396973, |
|
"learning_rate": 8.38846737481032e-06, |
|
"loss": 0.2322, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.20408420635421076, |
|
"grad_norm": 1.9508056640625, |
|
"learning_rate": 8.379362670713203e-06, |
|
"loss": 0.2206, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.20494896994045742, |
|
"grad_norm": 12.081295013427734, |
|
"learning_rate": 8.370257966616086e-06, |
|
"loss": 0.2845, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.20581373352670407, |
|
"grad_norm": 7.8822479248046875, |
|
"learning_rate": 8.361153262518968e-06, |
|
"loss": 0.2439, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.20667849711295075, |
|
"grad_norm": 7.18080997467041, |
|
"learning_rate": 8.352048558421853e-06, |
|
"loss": 0.2813, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.2075432606991974, |
|
"grad_norm": 2.941767692565918, |
|
"learning_rate": 8.342943854324736e-06, |
|
"loss": 0.2384, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.20840802428544405, |
|
"grad_norm": 3.640935182571411, |
|
"learning_rate": 8.333839150227619e-06, |
|
"loss": 0.2535, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.2092727878716907, |
|
"grad_norm": 3.2023556232452393, |
|
"learning_rate": 8.324734446130501e-06, |
|
"loss": 0.2822, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.21013755145793736, |
|
"grad_norm": 4.521758556365967, |
|
"learning_rate": 8.315629742033384e-06, |
|
"loss": 0.2604, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.211002315044184, |
|
"grad_norm": 2.9931998252868652, |
|
"learning_rate": 8.306525037936269e-06, |
|
"loss": 0.239, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.21186707863043067, |
|
"grad_norm": 3.8456552028656006, |
|
"learning_rate": 8.297420333839152e-06, |
|
"loss": 0.2256, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.21273184221667732, |
|
"grad_norm": 3.0459201335906982, |
|
"learning_rate": 8.288315629742034e-06, |
|
"loss": 0.3055, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.21359660580292397, |
|
"grad_norm": 6.939425468444824, |
|
"learning_rate": 8.279210925644917e-06, |
|
"loss": 0.2945, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.21446136938917063, |
|
"grad_norm": 4.698008060455322, |
|
"learning_rate": 8.2701062215478e-06, |
|
"loss": 0.2558, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.2153261329754173, |
|
"grad_norm": 2.6725728511810303, |
|
"learning_rate": 8.261001517450683e-06, |
|
"loss": 0.2487, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.21619089656166396, |
|
"grad_norm": 3.1390607357025146, |
|
"learning_rate": 8.251896813353568e-06, |
|
"loss": 0.2371, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.2170556601479106, |
|
"grad_norm": 4.460456371307373, |
|
"learning_rate": 8.24279210925645e-06, |
|
"loss": 0.2595, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.21792042373415726, |
|
"grad_norm": 7.146320343017578, |
|
"learning_rate": 8.233687405159333e-06, |
|
"loss": 0.2687, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.21878518732040392, |
|
"grad_norm": 5.645358562469482, |
|
"learning_rate": 8.224582701062216e-06, |
|
"loss": 0.2402, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.21964995090665057, |
|
"grad_norm": 2.5356500148773193, |
|
"learning_rate": 8.215477996965099e-06, |
|
"loss": 0.2567, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.22051471449289722, |
|
"grad_norm": 5.814233303070068, |
|
"learning_rate": 8.206373292867983e-06, |
|
"loss": 0.2535, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.22137947807914388, |
|
"grad_norm": 2.0408260822296143, |
|
"learning_rate": 8.197268588770866e-06, |
|
"loss": 0.254, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.22224424166539053, |
|
"grad_norm": 2.689192056655884, |
|
"learning_rate": 8.188163884673749e-06, |
|
"loss": 0.2397, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.22310900525163718, |
|
"grad_norm": 4.501987934112549, |
|
"learning_rate": 8.179059180576632e-06, |
|
"loss": 0.2507, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.22397376883788386, |
|
"grad_norm": 3.8230247497558594, |
|
"learning_rate": 8.169954476479515e-06, |
|
"loss": 0.253, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.22483853242413052, |
|
"grad_norm": 4.210086345672607, |
|
"learning_rate": 8.1608497723824e-06, |
|
"loss": 0.231, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.22570329601037717, |
|
"grad_norm": 3.3682661056518555, |
|
"learning_rate": 8.151745068285282e-06, |
|
"loss": 0.2632, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.22656805959662382, |
|
"grad_norm": 3.226896286010742, |
|
"learning_rate": 8.142640364188165e-06, |
|
"loss": 0.2293, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.22743282318287047, |
|
"grad_norm": 1.880353569984436, |
|
"learning_rate": 8.133535660091048e-06, |
|
"loss": 0.2529, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.22829758676911713, |
|
"grad_norm": 5.078037738800049, |
|
"learning_rate": 8.12443095599393e-06, |
|
"loss": 0.2533, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.22916235035536378, |
|
"grad_norm": 5.995652198791504, |
|
"learning_rate": 8.115326251896815e-06, |
|
"loss": 0.2685, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.23002711394161043, |
|
"grad_norm": 4.134960174560547, |
|
"learning_rate": 8.106221547799698e-06, |
|
"loss": 0.2677, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.23060362299910822, |
|
"eval_loss": 0.23694634437561035, |
|
"eval_mse": 0.23694635881483556, |
|
"eval_runtime": 64.1432, |
|
"eval_samples_per_second": 15.59, |
|
"eval_steps_per_second": 3.898, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.2308918775278571, |
|
"grad_norm": 2.2709124088287354, |
|
"learning_rate": 8.097116843702581e-06, |
|
"loss": 0.2338, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.23175664111410377, |
|
"grad_norm": 3.1088995933532715, |
|
"learning_rate": 8.088012139605464e-06, |
|
"loss": 0.2342, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.23262140470035042, |
|
"grad_norm": 2.4137418270111084, |
|
"learning_rate": 8.078907435508347e-06, |
|
"loss": 0.2467, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.23348616828659707, |
|
"grad_norm": 14.154044151306152, |
|
"learning_rate": 8.06980273141123e-06, |
|
"loss": 0.2386, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.23435093187284373, |
|
"grad_norm": 6.077353477478027, |
|
"learning_rate": 8.060698027314114e-06, |
|
"loss": 0.2243, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.23521569545909038, |
|
"grad_norm": 2.9805984497070312, |
|
"learning_rate": 8.051593323216997e-06, |
|
"loss": 0.2451, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.23608045904533703, |
|
"grad_norm": 2.8187265396118164, |
|
"learning_rate": 8.04248861911988e-06, |
|
"loss": 0.2922, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.23694522263158369, |
|
"grad_norm": 2.3969638347625732, |
|
"learning_rate": 8.033383915022763e-06, |
|
"loss": 0.2579, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.23780998621783034, |
|
"grad_norm": 2.1792314052581787, |
|
"learning_rate": 8.024279210925645e-06, |
|
"loss": 0.2282, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.238674749804077, |
|
"grad_norm": 2.370007038116455, |
|
"learning_rate": 8.01517450682853e-06, |
|
"loss": 0.291, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.23953951339032364, |
|
"grad_norm": 1.9614750146865845, |
|
"learning_rate": 8.006069802731413e-06, |
|
"loss": 0.2391, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.24040427697657032, |
|
"grad_norm": 2.416917324066162, |
|
"learning_rate": 7.996965098634296e-06, |
|
"loss": 0.2546, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.24126904056281698, |
|
"grad_norm": 2.136915683746338, |
|
"learning_rate": 7.987860394537178e-06, |
|
"loss": 0.2664, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.24213380414906363, |
|
"grad_norm": 2.0941572189331055, |
|
"learning_rate": 7.978755690440061e-06, |
|
"loss": 0.2597, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.24299856773531028, |
|
"grad_norm": 3.3543283939361572, |
|
"learning_rate": 7.969650986342944e-06, |
|
"loss": 0.233, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.24386333132155694, |
|
"grad_norm": 2.2875921726226807, |
|
"learning_rate": 7.960546282245829e-06, |
|
"loss": 0.2302, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.2447280949078036, |
|
"grad_norm": 2.283102035522461, |
|
"learning_rate": 7.951441578148712e-06, |
|
"loss": 0.2825, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.24559285849405024, |
|
"grad_norm": 5.802175998687744, |
|
"learning_rate": 7.942336874051594e-06, |
|
"loss": 0.2515, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.2464576220802969, |
|
"grad_norm": 2.8898258209228516, |
|
"learning_rate": 7.933232169954477e-06, |
|
"loss": 0.2419, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.24732238566654355, |
|
"grad_norm": 1.9422801733016968, |
|
"learning_rate": 7.92412746585736e-06, |
|
"loss": 0.2361, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.2481871492527902, |
|
"grad_norm": 3.0359907150268555, |
|
"learning_rate": 7.915022761760245e-06, |
|
"loss": 0.1944, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.24905191283903688, |
|
"grad_norm": 3.544053554534912, |
|
"learning_rate": 7.905918057663127e-06, |
|
"loss": 0.2938, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.24991667642528353, |
|
"grad_norm": 2.4866981506347656, |
|
"learning_rate": 7.89681335356601e-06, |
|
"loss": 0.2339, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.2507814400115302, |
|
"grad_norm": 9.933099746704102, |
|
"learning_rate": 7.887708649468893e-06, |
|
"loss": 0.2824, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.25164620359777684, |
|
"grad_norm": 1.8425731658935547, |
|
"learning_rate": 7.878603945371776e-06, |
|
"loss": 0.2065, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.2525109671840235, |
|
"grad_norm": 14.634519577026367, |
|
"learning_rate": 7.869499241274659e-06, |
|
"loss": 0.2422, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.25337573077027015, |
|
"grad_norm": 3.1440680027008057, |
|
"learning_rate": 7.860394537177543e-06, |
|
"loss": 0.2418, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.2542404943565168, |
|
"grad_norm": 6.234373092651367, |
|
"learning_rate": 7.851289833080426e-06, |
|
"loss": 0.233, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.25510525794276345, |
|
"grad_norm": 2.140277147293091, |
|
"learning_rate": 7.842185128983309e-06, |
|
"loss": 0.2527, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.2559700215290101, |
|
"grad_norm": 3.195327043533325, |
|
"learning_rate": 7.833080424886192e-06, |
|
"loss": 0.2549, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.25683478511525676, |
|
"grad_norm": 2.8949568271636963, |
|
"learning_rate": 7.823975720789075e-06, |
|
"loss": 0.2318, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.2576995487015034, |
|
"grad_norm": 3.3351497650146484, |
|
"learning_rate": 7.814871016691958e-06, |
|
"loss": 0.2349, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.25856431228775006, |
|
"grad_norm": 7.218577861785889, |
|
"learning_rate": 7.805766312594842e-06, |
|
"loss": 0.2856, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.2594290758739967, |
|
"grad_norm": 3.1537606716156006, |
|
"learning_rate": 7.796661608497725e-06, |
|
"loss": 0.3181, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.2594290758739967, |
|
"eval_loss": 0.23068155348300934, |
|
"eval_mse": 0.2306815572567284, |
|
"eval_runtime": 64.5975, |
|
"eval_samples_per_second": 15.48, |
|
"eval_steps_per_second": 3.87, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.26029383946024337, |
|
"grad_norm": 4.1888933181762695, |
|
"learning_rate": 7.787556904400608e-06, |
|
"loss": 0.2682, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.26115860304649, |
|
"grad_norm": 2.9940059185028076, |
|
"learning_rate": 7.77845220030349e-06, |
|
"loss": 0.2381, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.26202336663273673, |
|
"grad_norm": 2.2147626876831055, |
|
"learning_rate": 7.769347496206373e-06, |
|
"loss": 0.2803, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.2628881302189834, |
|
"grad_norm": 2.8790485858917236, |
|
"learning_rate": 7.760242792109256e-06, |
|
"loss": 0.2372, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.26375289380523004, |
|
"grad_norm": 4.44177770614624, |
|
"learning_rate": 7.75113808801214e-06, |
|
"loss": 0.2345, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.2646176573914767, |
|
"grad_norm": 1.7946621179580688, |
|
"learning_rate": 7.742033383915024e-06, |
|
"loss": 0.2215, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.26548242097772334, |
|
"grad_norm": 2.3959579467773438, |
|
"learning_rate": 7.732928679817907e-06, |
|
"loss": 0.2286, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.26634718456397, |
|
"grad_norm": 6.54136848449707, |
|
"learning_rate": 7.72382397572079e-06, |
|
"loss": 0.2423, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.26721194815021665, |
|
"grad_norm": 7.17802095413208, |
|
"learning_rate": 7.714719271623672e-06, |
|
"loss": 0.2535, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.2680767117364633, |
|
"grad_norm": 2.7691524028778076, |
|
"learning_rate": 7.705614567526557e-06, |
|
"loss": 0.2141, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.26894147532270996, |
|
"grad_norm": 4.189211368560791, |
|
"learning_rate": 7.69650986342944e-06, |
|
"loss": 0.2577, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.2698062389089566, |
|
"grad_norm": 3.442680835723877, |
|
"learning_rate": 7.687405159332322e-06, |
|
"loss": 0.2438, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.27067100249520326, |
|
"grad_norm": 2.050264596939087, |
|
"learning_rate": 7.678300455235205e-06, |
|
"loss": 0.262, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.2715357660814499, |
|
"grad_norm": 6.750906944274902, |
|
"learning_rate": 7.669195751138088e-06, |
|
"loss": 0.2789, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.27240052966769657, |
|
"grad_norm": 14.716294288635254, |
|
"learning_rate": 7.660091047040971e-06, |
|
"loss": 0.294, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.2732652932539432, |
|
"grad_norm": 4.65310001373291, |
|
"learning_rate": 7.650986342943855e-06, |
|
"loss": 0.2938, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.2741300568401899, |
|
"grad_norm": 4.3455424308776855, |
|
"learning_rate": 7.641881638846738e-06, |
|
"loss": 0.2654, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.2749948204264365, |
|
"grad_norm": 18.063007354736328, |
|
"learning_rate": 7.632776934749621e-06, |
|
"loss": 0.2812, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.2758595840126832, |
|
"grad_norm": 2.212507963180542, |
|
"learning_rate": 7.623672230652505e-06, |
|
"loss": 0.2134, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.27672434759892983, |
|
"grad_norm": 2.462282180786133, |
|
"learning_rate": 7.614567526555388e-06, |
|
"loss": 0.235, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.2775891111851765, |
|
"grad_norm": 9.775514602661133, |
|
"learning_rate": 7.6054628224582705e-06, |
|
"loss": 0.258, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.2784538747714232, |
|
"grad_norm": 2.7479944229125977, |
|
"learning_rate": 7.596358118361153e-06, |
|
"loss": 0.2653, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.27931863835766985, |
|
"grad_norm": 3.1024341583251953, |
|
"learning_rate": 7.587253414264037e-06, |
|
"loss": 0.2611, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.2801834019439165, |
|
"grad_norm": 4.7744317054748535, |
|
"learning_rate": 7.578148710166921e-06, |
|
"loss": 0.2676, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.28104816553016315, |
|
"grad_norm": 2.3438374996185303, |
|
"learning_rate": 7.569044006069804e-06, |
|
"loss": 0.2521, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2819129291164098, |
|
"grad_norm": 5.55867338180542, |
|
"learning_rate": 7.5599393019726864e-06, |
|
"loss": 0.2592, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.28277769270265646, |
|
"grad_norm": 3.6212918758392334, |
|
"learning_rate": 7.550834597875569e-06, |
|
"loss": 0.2717, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.2836424562889031, |
|
"grad_norm": 2.0010621547698975, |
|
"learning_rate": 7.541729893778453e-06, |
|
"loss": 0.2303, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.28450721987514976, |
|
"grad_norm": 2.170227289199829, |
|
"learning_rate": 7.532625189681337e-06, |
|
"loss": 0.2436, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.2853719834613964, |
|
"grad_norm": 3.4833590984344482, |
|
"learning_rate": 7.5235204855842195e-06, |
|
"loss": 0.2327, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.28623674704764307, |
|
"grad_norm": 2.1279664039611816, |
|
"learning_rate": 7.514415781487102e-06, |
|
"loss": 0.2706, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.2871015106338897, |
|
"grad_norm": 3.14772891998291, |
|
"learning_rate": 7.505311077389985e-06, |
|
"loss": 0.2325, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.2879662742201364, |
|
"grad_norm": 3.4754092693328857, |
|
"learning_rate": 7.496206373292868e-06, |
|
"loss": 0.2551, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.28825452874888524, |
|
"eval_loss": 0.24107317626476288, |
|
"eval_mse": 0.24107318028528243, |
|
"eval_runtime": 64.3256, |
|
"eval_samples_per_second": 15.546, |
|
"eval_steps_per_second": 3.886, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.28883103780638303, |
|
"grad_norm": 2.1394267082214355, |
|
"learning_rate": 7.487101669195752e-06, |
|
"loss": 0.2094, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.2896958013926297, |
|
"grad_norm": 1.6514432430267334, |
|
"learning_rate": 7.477996965098635e-06, |
|
"loss": 0.2475, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.29056056497887633, |
|
"grad_norm": 5.082777500152588, |
|
"learning_rate": 7.468892261001518e-06, |
|
"loss": 0.2829, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.291425328565123, |
|
"grad_norm": 3.7276644706726074, |
|
"learning_rate": 7.459787556904401e-06, |
|
"loss": 0.2955, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.29229009215136964, |
|
"grad_norm": 3.5418541431427, |
|
"learning_rate": 7.450682852807284e-06, |
|
"loss": 0.2359, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.2931548557376163, |
|
"grad_norm": 1.7081390619277954, |
|
"learning_rate": 7.441578148710168e-06, |
|
"loss": 0.2377, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.29401961932386295, |
|
"grad_norm": 3.4884068965911865, |
|
"learning_rate": 7.4324734446130505e-06, |
|
"loss": 0.1981, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.2948843829101096, |
|
"grad_norm": 2.5671768188476562, |
|
"learning_rate": 7.423368740515934e-06, |
|
"loss": 0.2657, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.2957491464963563, |
|
"grad_norm": 3.0763845443725586, |
|
"learning_rate": 7.414264036418817e-06, |
|
"loss": 0.2391, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.29661391008260296, |
|
"grad_norm": 2.09977388381958, |
|
"learning_rate": 7.4051593323217e-06, |
|
"loss": 0.2426, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.2974786736688496, |
|
"grad_norm": 1.816253900527954, |
|
"learning_rate": 7.3960546282245835e-06, |
|
"loss": 0.2309, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.29834343725509627, |
|
"grad_norm": 2.3119256496429443, |
|
"learning_rate": 7.386949924127466e-06, |
|
"loss": 0.2379, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.2992082008413429, |
|
"grad_norm": 4.264431953430176, |
|
"learning_rate": 7.377845220030349e-06, |
|
"loss": 0.2603, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.3000729644275896, |
|
"grad_norm": 2.759507656097412, |
|
"learning_rate": 7.368740515933233e-06, |
|
"loss": 0.2634, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.3009377280138362, |
|
"grad_norm": 2.629415988922119, |
|
"learning_rate": 7.359635811836116e-06, |
|
"loss": 0.2091, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.3018024916000829, |
|
"grad_norm": 1.7376865148544312, |
|
"learning_rate": 7.3505311077389994e-06, |
|
"loss": 0.227, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.30266725518632953, |
|
"grad_norm": 2.700201988220215, |
|
"learning_rate": 7.341426403641882e-06, |
|
"loss": 0.2336, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.3035320187725762, |
|
"grad_norm": 17.49102020263672, |
|
"learning_rate": 7.332321699544765e-06, |
|
"loss": 0.2714, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.30439678235882284, |
|
"grad_norm": 3.3329529762268066, |
|
"learning_rate": 7.323216995447649e-06, |
|
"loss": 0.2199, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.3052615459450695, |
|
"grad_norm": 2.0356404781341553, |
|
"learning_rate": 7.314112291350532e-06, |
|
"loss": 0.2329, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.30612630953131614, |
|
"grad_norm": 1.992146611213684, |
|
"learning_rate": 7.305007587253415e-06, |
|
"loss": 0.2305, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.3069910731175628, |
|
"grad_norm": 3.694671630859375, |
|
"learning_rate": 7.295902883156298e-06, |
|
"loss": 0.2149, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.30785583670380945, |
|
"grad_norm": 2.258823871612549, |
|
"learning_rate": 7.286798179059181e-06, |
|
"loss": 0.2347, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.3087206002900561, |
|
"grad_norm": 2.762303590774536, |
|
"learning_rate": 7.277693474962064e-06, |
|
"loss": 0.2355, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.30958536387630275, |
|
"grad_norm": 2.68644118309021, |
|
"learning_rate": 7.2685887708649476e-06, |
|
"loss": 0.2308, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.3104501274625494, |
|
"grad_norm": 5.270909786224365, |
|
"learning_rate": 7.25948406676783e-06, |
|
"loss": 0.2781, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.31131489104879606, |
|
"grad_norm": 1.9733515977859497, |
|
"learning_rate": 7.250379362670714e-06, |
|
"loss": 0.2465, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.31217965463504277, |
|
"grad_norm": 4.668141841888428, |
|
"learning_rate": 7.241274658573597e-06, |
|
"loss": 0.2743, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.3130444182212894, |
|
"grad_norm": 5.158870220184326, |
|
"learning_rate": 7.23216995447648e-06, |
|
"loss": 0.2702, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.3139091818075361, |
|
"grad_norm": 20.275535583496094, |
|
"learning_rate": 7.223065250379363e-06, |
|
"loss": 0.2609, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.31477394539378273, |
|
"grad_norm": 3.8496415615081787, |
|
"learning_rate": 7.213960546282246e-06, |
|
"loss": 0.2641, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.3156387089800294, |
|
"grad_norm": 4.2407073974609375, |
|
"learning_rate": 7.20485584218513e-06, |
|
"loss": 0.2255, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.31650347256627603, |
|
"grad_norm": 6.420517444610596, |
|
"learning_rate": 7.195751138088013e-06, |
|
"loss": 0.2743, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.31707998162377377, |
|
"eval_loss": 0.2349877655506134, |
|
"eval_mse": 0.2349877648057918, |
|
"eval_runtime": 64.4388, |
|
"eval_samples_per_second": 15.519, |
|
"eval_steps_per_second": 3.88, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.3173682361525227, |
|
"grad_norm": 3.0954813957214355, |
|
"learning_rate": 7.186646433990896e-06, |
|
"loss": 0.2681, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.31823299973876934, |
|
"grad_norm": 3.8299410343170166, |
|
"learning_rate": 7.1775417298937785e-06, |
|
"loss": 0.2378, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.319097763325016, |
|
"grad_norm": 2.5736284255981445, |
|
"learning_rate": 7.168437025796661e-06, |
|
"loss": 0.2441, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.31996252691126265, |
|
"grad_norm": 2.7379114627838135, |
|
"learning_rate": 7.159332321699546e-06, |
|
"loss": 0.2216, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.3208272904975093, |
|
"grad_norm": 11.14561653137207, |
|
"learning_rate": 7.150227617602429e-06, |
|
"loss": 0.2574, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.32169205408375595, |
|
"grad_norm": 3.286914348602295, |
|
"learning_rate": 7.141122913505312e-06, |
|
"loss": 0.2575, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.3225568176700026, |
|
"grad_norm": 5.4453020095825195, |
|
"learning_rate": 7.1320182094081944e-06, |
|
"loss": 0.2307, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.32342158125624926, |
|
"grad_norm": 3.9706311225891113, |
|
"learning_rate": 7.122913505311077e-06, |
|
"loss": 0.2622, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.3242863448424959, |
|
"grad_norm": 5.497633934020996, |
|
"learning_rate": 7.113808801213962e-06, |
|
"loss": 0.2433, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.32515110842874256, |
|
"grad_norm": 5.734874725341797, |
|
"learning_rate": 7.104704097116845e-06, |
|
"loss": 0.265, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.3260158720149892, |
|
"grad_norm": 1.9510211944580078, |
|
"learning_rate": 7.0955993930197275e-06, |
|
"loss": 0.26, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.32688063560123587, |
|
"grad_norm": 11.36854076385498, |
|
"learning_rate": 7.08649468892261e-06, |
|
"loss": 0.2378, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.3277453991874825, |
|
"grad_norm": 1.991607666015625, |
|
"learning_rate": 7.077389984825493e-06, |
|
"loss": 0.2399, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.3286101627737292, |
|
"grad_norm": 4.413910388946533, |
|
"learning_rate": 7.068285280728376e-06, |
|
"loss": 0.2411, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.3294749263599759, |
|
"grad_norm": 4.106217861175537, |
|
"learning_rate": 7.0591805766312606e-06, |
|
"loss": 0.2713, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.33033968994622254, |
|
"grad_norm": 2.9739654064178467, |
|
"learning_rate": 7.050075872534143e-06, |
|
"loss": 0.2463, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.3312044535324692, |
|
"grad_norm": 6.642212390899658, |
|
"learning_rate": 7.040971168437026e-06, |
|
"loss": 0.2773, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.33206921711871584, |
|
"grad_norm": 4.020822048187256, |
|
"learning_rate": 7.031866464339909e-06, |
|
"loss": 0.2493, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.3329339807049625, |
|
"grad_norm": 4.196906566619873, |
|
"learning_rate": 7.022761760242792e-06, |
|
"loss": 0.227, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.33379874429120915, |
|
"grad_norm": 3.0572118759155273, |
|
"learning_rate": 7.0136570561456765e-06, |
|
"loss": 0.2594, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.3346635078774558, |
|
"grad_norm": 2.7601091861724854, |
|
"learning_rate": 7.004552352048559e-06, |
|
"loss": 0.2299, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.33552827146370245, |
|
"grad_norm": 2.025371551513672, |
|
"learning_rate": 6.995447647951442e-06, |
|
"loss": 0.2484, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.3363930350499491, |
|
"grad_norm": 4.210418701171875, |
|
"learning_rate": 6.986342943854325e-06, |
|
"loss": 0.2247, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.33725779863619576, |
|
"grad_norm": 5.4730544090271, |
|
"learning_rate": 6.977238239757208e-06, |
|
"loss": 0.2616, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.3381225622224424, |
|
"grad_norm": 3.7742884159088135, |
|
"learning_rate": 6.968133535660092e-06, |
|
"loss": 0.2428, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.33898732580868907, |
|
"grad_norm": 3.491050958633423, |
|
"learning_rate": 6.959028831562975e-06, |
|
"loss": 0.2296, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.3398520893949357, |
|
"grad_norm": 2.1583101749420166, |
|
"learning_rate": 6.949924127465858e-06, |
|
"loss": 0.2649, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.34071685298118237, |
|
"grad_norm": 2.2460827827453613, |
|
"learning_rate": 6.940819423368741e-06, |
|
"loss": 0.2308, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.341581616567429, |
|
"grad_norm": 4.751939296722412, |
|
"learning_rate": 6.931714719271624e-06, |
|
"loss": 0.2491, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.3424463801536757, |
|
"grad_norm": 2.2470085620880127, |
|
"learning_rate": 6.922610015174508e-06, |
|
"loss": 0.2135, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.34331114373992233, |
|
"grad_norm": 3.2550578117370605, |
|
"learning_rate": 6.913505311077391e-06, |
|
"loss": 0.2345, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.344175907326169, |
|
"grad_norm": 2.159435749053955, |
|
"learning_rate": 6.904400606980274e-06, |
|
"loss": 0.2413, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.34504067091241564, |
|
"grad_norm": 23.409839630126953, |
|
"learning_rate": 6.895295902883157e-06, |
|
"loss": 0.2101, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.34590543449866235, |
|
"grad_norm": 3.5459580421447754, |
|
"learning_rate": 6.88619119878604e-06, |
|
"loss": 0.2383, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.34590543449866235, |
|
"eval_loss": 0.2423837035894394, |
|
"eval_mse": 0.2423837145415746, |
|
"eval_runtime": 64.1907, |
|
"eval_samples_per_second": 15.579, |
|
"eval_steps_per_second": 3.895, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.346770198084909, |
|
"grad_norm": 4.81601095199585, |
|
"learning_rate": 6.8770864946889225e-06, |
|
"loss": 0.2548, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.34763496167115565, |
|
"grad_norm": 1.821042776107788, |
|
"learning_rate": 6.867981790591807e-06, |
|
"loss": 0.2054, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.3484997252574023, |
|
"grad_norm": 2.1432528495788574, |
|
"learning_rate": 6.85887708649469e-06, |
|
"loss": 0.2324, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.34936448884364896, |
|
"grad_norm": 2.9332826137542725, |
|
"learning_rate": 6.849772382397573e-06, |
|
"loss": 0.2336, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.3502292524298956, |
|
"grad_norm": 6.8339619636535645, |
|
"learning_rate": 6.8406676783004556e-06, |
|
"loss": 0.2132, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.35109401601614226, |
|
"grad_norm": 2.1698455810546875, |
|
"learning_rate": 6.831562974203338e-06, |
|
"loss": 0.235, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.3519587796023889, |
|
"grad_norm": 4.076608657836914, |
|
"learning_rate": 6.822458270106223e-06, |
|
"loss": 0.2444, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.35282354318863557, |
|
"grad_norm": 2.6955060958862305, |
|
"learning_rate": 6.813353566009106e-06, |
|
"loss": 0.228, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.3536883067748822, |
|
"grad_norm": 2.3251149654388428, |
|
"learning_rate": 6.804248861911989e-06, |
|
"loss": 0.2501, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.3545530703611289, |
|
"grad_norm": 3.7349984645843506, |
|
"learning_rate": 6.7951441578148715e-06, |
|
"loss": 0.2332, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.3554178339473755, |
|
"grad_norm": 2.2955520153045654, |
|
"learning_rate": 6.786039453717754e-06, |
|
"loss": 0.2196, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.3562825975336222, |
|
"grad_norm": 2.053947687149048, |
|
"learning_rate": 6.776934749620638e-06, |
|
"loss": 0.2281, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.35714736111986883, |
|
"grad_norm": 1.7476141452789307, |
|
"learning_rate": 6.767830045523522e-06, |
|
"loss": 0.2403, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.3580121247061155, |
|
"grad_norm": 3.03908109664917, |
|
"learning_rate": 6.7587253414264045e-06, |
|
"loss": 0.266, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.35887688829236214, |
|
"grad_norm": 3.2814714908599854, |
|
"learning_rate": 6.749620637329287e-06, |
|
"loss": 0.2137, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.3597416518786088, |
|
"grad_norm": 2.523711681365967, |
|
"learning_rate": 6.74051593323217e-06, |
|
"loss": 0.2396, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.36060641546485545, |
|
"grad_norm": 2.1850292682647705, |
|
"learning_rate": 6.731411229135054e-06, |
|
"loss": 0.2297, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.3614711790511021, |
|
"grad_norm": 2.3782458305358887, |
|
"learning_rate": 6.722306525037937e-06, |
|
"loss": 0.2159, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.36233594263734875, |
|
"grad_norm": 4.745922088623047, |
|
"learning_rate": 6.7132018209408204e-06, |
|
"loss": 0.2111, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.36320070622359546, |
|
"grad_norm": 2.0191493034362793, |
|
"learning_rate": 6.704097116843703e-06, |
|
"loss": 0.2648, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.3640654698098421, |
|
"grad_norm": 61.380306243896484, |
|
"learning_rate": 6.694992412746586e-06, |
|
"loss": 0.2484, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.36493023339608877, |
|
"grad_norm": 3.0049686431884766, |
|
"learning_rate": 6.685887708649469e-06, |
|
"loss": 0.2456, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.3657949969823354, |
|
"grad_norm": 3.6466712951660156, |
|
"learning_rate": 6.676783004552353e-06, |
|
"loss": 0.3093, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.36665976056858207, |
|
"grad_norm": 2.6709084510803223, |
|
"learning_rate": 6.6676783004552355e-06, |
|
"loss": 0.2129, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.3675245241548287, |
|
"grad_norm": 3.721860408782959, |
|
"learning_rate": 6.658573596358119e-06, |
|
"loss": 0.2476, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.3683892877410754, |
|
"grad_norm": 3.2325708866119385, |
|
"learning_rate": 6.649468892261002e-06, |
|
"loss": 0.2281, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.36925405132732203, |
|
"grad_norm": 3.1280910968780518, |
|
"learning_rate": 6.640364188163885e-06, |
|
"loss": 0.2791, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.3701188149135687, |
|
"grad_norm": 2.1942126750946045, |
|
"learning_rate": 6.6312594840667686e-06, |
|
"loss": 0.2323, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.37098357849981534, |
|
"grad_norm": 4.829765796661377, |
|
"learning_rate": 6.622154779969651e-06, |
|
"loss": 0.204, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.371848342086062, |
|
"grad_norm": 2.7358474731445312, |
|
"learning_rate": 6.613050075872534e-06, |
|
"loss": 0.2484, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.37271310567230864, |
|
"grad_norm": 3.0888988971710205, |
|
"learning_rate": 6.603945371775418e-06, |
|
"loss": 0.251, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.3735778692585553, |
|
"grad_norm": 2.374920129776001, |
|
"learning_rate": 6.594840667678301e-06, |
|
"loss": 0.2513, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.37444263284480195, |
|
"grad_norm": 4.422918319702148, |
|
"learning_rate": 6.5857359635811845e-06, |
|
"loss": 0.2191, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.37473088737355087, |
|
"eval_loss": 0.22791720926761627, |
|
"eval_mse": 0.2279172136860434, |
|
"eval_runtime": 64.4369, |
|
"eval_samples_per_second": 15.519, |
|
"eval_steps_per_second": 3.88, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.3753073964310486, |
|
"grad_norm": 3.1236884593963623, |
|
"learning_rate": 6.576631259484067e-06, |
|
"loss": 0.2676, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.37617216001729525, |
|
"grad_norm": 2.3095321655273438, |
|
"learning_rate": 6.56752655538695e-06, |
|
"loss": 0.2373, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.3770369236035419, |
|
"grad_norm": 2.6782171726226807, |
|
"learning_rate": 6.558421851289834e-06, |
|
"loss": 0.2147, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.37790168718978856, |
|
"grad_norm": 2.2795357704162598, |
|
"learning_rate": 6.549317147192717e-06, |
|
"loss": 0.2458, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.3787664507760352, |
|
"grad_norm": 2.5536789894104004, |
|
"learning_rate": 6.5402124430956e-06, |
|
"loss": 0.2643, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.3796312143622819, |
|
"grad_norm": 2.8398616313934326, |
|
"learning_rate": 6.531107738998483e-06, |
|
"loss": 0.2647, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.3804959779485286, |
|
"grad_norm": 4.880849838256836, |
|
"learning_rate": 6.522003034901366e-06, |
|
"loss": 0.2151, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.3813607415347752, |
|
"grad_norm": 3.158966302871704, |
|
"learning_rate": 6.512898330804249e-06, |
|
"loss": 0.2884, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.3822255051210219, |
|
"grad_norm": 1.8538583517074585, |
|
"learning_rate": 6.503793626707133e-06, |
|
"loss": 0.2238, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.38309026870726853, |
|
"grad_norm": 3.2521581649780273, |
|
"learning_rate": 6.4946889226100154e-06, |
|
"loss": 0.2328, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.3839550322935152, |
|
"grad_norm": 2.3196299076080322, |
|
"learning_rate": 6.485584218512899e-06, |
|
"loss": 0.2885, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.38481979587976184, |
|
"grad_norm": 3.506424903869629, |
|
"learning_rate": 6.476479514415782e-06, |
|
"loss": 0.2208, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.3856845594660085, |
|
"grad_norm": 2.378120183944702, |
|
"learning_rate": 6.467374810318665e-06, |
|
"loss": 0.2187, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.38654932305225514, |
|
"grad_norm": 2.1336095333099365, |
|
"learning_rate": 6.458270106221548e-06, |
|
"loss": 0.2626, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.3874140866385018, |
|
"grad_norm": 1.8846355676651, |
|
"learning_rate": 6.449165402124431e-06, |
|
"loss": 0.2468, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.38827885022474845, |
|
"grad_norm": 3.833599805831909, |
|
"learning_rate": 6.440060698027315e-06, |
|
"loss": 0.2432, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.3891436138109951, |
|
"grad_norm": 2.8712267875671387, |
|
"learning_rate": 6.430955993930198e-06, |
|
"loss": 0.2383, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.39000837739724176, |
|
"grad_norm": 2.344010829925537, |
|
"learning_rate": 6.421851289833081e-06, |
|
"loss": 0.233, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.3908731409834884, |
|
"grad_norm": 4.56198263168335, |
|
"learning_rate": 6.4127465857359636e-06, |
|
"loss": 0.2583, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.39173790456973506, |
|
"grad_norm": 3.3095390796661377, |
|
"learning_rate": 6.403641881638846e-06, |
|
"loss": 0.2228, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.3926026681559817, |
|
"grad_norm": 5.623388290405273, |
|
"learning_rate": 6.394537177541731e-06, |
|
"loss": 0.2425, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.39346743174222837, |
|
"grad_norm": 4.270349502563477, |
|
"learning_rate": 6.385432473444614e-06, |
|
"loss": 0.2283, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.394332195328475, |
|
"grad_norm": 4.91237211227417, |
|
"learning_rate": 6.376327769347497e-06, |
|
"loss": 0.2472, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.3951969589147217, |
|
"grad_norm": 3.120549440383911, |
|
"learning_rate": 6.3672230652503795e-06, |
|
"loss": 0.2724, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.3960617225009683, |
|
"grad_norm": 2.615607500076294, |
|
"learning_rate": 6.358118361153262e-06, |
|
"loss": 0.2779, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.39692648608721504, |
|
"grad_norm": 3.6636528968811035, |
|
"learning_rate": 6.349013657056147e-06, |
|
"loss": 0.2561, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.3977912496734617, |
|
"grad_norm": 3.095202684402466, |
|
"learning_rate": 6.33990895295903e-06, |
|
"loss": 0.2556, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.39865601325970834, |
|
"grad_norm": 1.8077003955841064, |
|
"learning_rate": 6.3308042488619125e-06, |
|
"loss": 0.2337, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.399520776845955, |
|
"grad_norm": 1.9146672487258911, |
|
"learning_rate": 6.321699544764795e-06, |
|
"loss": 0.206, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.40038554043220165, |
|
"grad_norm": 1.8629751205444336, |
|
"learning_rate": 6.312594840667678e-06, |
|
"loss": 0.2303, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.4012503040184483, |
|
"grad_norm": 3.0480451583862305, |
|
"learning_rate": 6.303490136570563e-06, |
|
"loss": 0.2745, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.40211506760469495, |
|
"grad_norm": 2.0094082355499268, |
|
"learning_rate": 6.294385432473446e-06, |
|
"loss": 0.2122, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.4029798311909416, |
|
"grad_norm": 3.787971019744873, |
|
"learning_rate": 6.2852807283763284e-06, |
|
"loss": 0.2431, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.4035563402484394, |
|
"eval_loss": 0.22321856021881104, |
|
"eval_mse": 0.2232185562737286, |
|
"eval_runtime": 64.7184, |
|
"eval_samples_per_second": 15.452, |
|
"eval_steps_per_second": 3.863, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.40384459477718826, |
|
"grad_norm": 2.886836290359497, |
|
"learning_rate": 6.276176024279211e-06, |
|
"loss": 0.1921, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.4047093583634349, |
|
"grad_norm": 3.5917375087738037, |
|
"learning_rate": 6.267071320182094e-06, |
|
"loss": 0.2708, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.40557412194968157, |
|
"grad_norm": 1.947647213935852, |
|
"learning_rate": 6.257966616084977e-06, |
|
"loss": 0.2386, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.4064388855359282, |
|
"grad_norm": 21.491071701049805, |
|
"learning_rate": 6.2488619119878615e-06, |
|
"loss": 0.2368, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.40730364912217487, |
|
"grad_norm": 2.2164909839630127, |
|
"learning_rate": 6.239757207890744e-06, |
|
"loss": 0.2206, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.4081684127084215, |
|
"grad_norm": 4.416579246520996, |
|
"learning_rate": 6.230652503793627e-06, |
|
"loss": 0.2655, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.4090331762946682, |
|
"grad_norm": 2.469964027404785, |
|
"learning_rate": 6.22154779969651e-06, |
|
"loss": 0.2633, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.40989793988091483, |
|
"grad_norm": 2.6230666637420654, |
|
"learning_rate": 6.212443095599393e-06, |
|
"loss": 0.2043, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.4107627034671615, |
|
"grad_norm": 2.4938716888427734, |
|
"learning_rate": 6.203338391502277e-06, |
|
"loss": 0.2508, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.41162746705340814, |
|
"grad_norm": 2.508373975753784, |
|
"learning_rate": 6.19423368740516e-06, |
|
"loss": 0.2663, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.4124922306396548, |
|
"grad_norm": 2.955127716064453, |
|
"learning_rate": 6.185128983308043e-06, |
|
"loss": 0.2933, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.4133569942259015, |
|
"grad_norm": 2.3575446605682373, |
|
"learning_rate": 6.176024279210926e-06, |
|
"loss": 0.2369, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.41422175781214815, |
|
"grad_norm": 2.6992990970611572, |
|
"learning_rate": 6.166919575113809e-06, |
|
"loss": 0.2584, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.4150865213983948, |
|
"grad_norm": 3.9453766345977783, |
|
"learning_rate": 6.157814871016693e-06, |
|
"loss": 0.27, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.41595128498464146, |
|
"grad_norm": 2.111201286315918, |
|
"learning_rate": 6.148710166919576e-06, |
|
"loss": 0.2312, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.4168160485708881, |
|
"grad_norm": 2.37361741065979, |
|
"learning_rate": 6.139605462822459e-06, |
|
"loss": 0.2638, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.41768081215713476, |
|
"grad_norm": 5.3990631103515625, |
|
"learning_rate": 6.130500758725342e-06, |
|
"loss": 0.242, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.4185455757433814, |
|
"grad_norm": 3.4745547771453857, |
|
"learning_rate": 6.121396054628225e-06, |
|
"loss": 0.2404, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.41941033932962807, |
|
"grad_norm": 2.619647979736328, |
|
"learning_rate": 6.112291350531108e-06, |
|
"loss": 0.211, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.4202751029158747, |
|
"grad_norm": 5.034475326538086, |
|
"learning_rate": 6.103186646433992e-06, |
|
"loss": 0.2398, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.4211398665021214, |
|
"grad_norm": 3.0307469367980957, |
|
"learning_rate": 6.094081942336875e-06, |
|
"loss": 0.2708, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.422004630088368, |
|
"grad_norm": 7.210414409637451, |
|
"learning_rate": 6.084977238239758e-06, |
|
"loss": 0.2348, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.4228693936746147, |
|
"grad_norm": 2.5068700313568115, |
|
"learning_rate": 6.075872534142641e-06, |
|
"loss": 0.2168, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.42373415726086133, |
|
"grad_norm": 1.664629578590393, |
|
"learning_rate": 6.0667678300455234e-06, |
|
"loss": 0.2166, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.424598920847108, |
|
"grad_norm": 2.4851441383361816, |
|
"learning_rate": 6.057663125948408e-06, |
|
"loss": 0.2202, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.42546368443335464, |
|
"grad_norm": 3.4534077644348145, |
|
"learning_rate": 6.048558421851291e-06, |
|
"loss": 0.2236, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.4263284480196013, |
|
"grad_norm": 1.7571723461151123, |
|
"learning_rate": 6.039453717754174e-06, |
|
"loss": 0.2499, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.42719321160584794, |
|
"grad_norm": 6.117722034454346, |
|
"learning_rate": 6.0303490136570565e-06, |
|
"loss": 0.2408, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.4280579751920946, |
|
"grad_norm": 6.158169746398926, |
|
"learning_rate": 6.021244309559939e-06, |
|
"loss": 0.2673, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.42892273877834125, |
|
"grad_norm": 2.713707208633423, |
|
"learning_rate": 6.012139605462823e-06, |
|
"loss": 0.2544, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.42978750236458796, |
|
"grad_norm": 2.2053639888763428, |
|
"learning_rate": 6.003034901365707e-06, |
|
"loss": 0.2218, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 0.4306522659508346, |
|
"grad_norm": 2.291422128677368, |
|
"learning_rate": 5.9939301972685896e-06, |
|
"loss": 0.1924, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.43151702953708126, |
|
"grad_norm": 4.670581817626953, |
|
"learning_rate": 5.984825493171472e-06, |
|
"loss": 0.209, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 0.4323817931233279, |
|
"grad_norm": 2.0881216526031494, |
|
"learning_rate": 5.975720789074355e-06, |
|
"loss": 0.2161, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.4323817931233279, |
|
"eval_loss": 0.23065043985843658, |
|
"eval_mse": 0.23065044428501277, |
|
"eval_runtime": 64.6792, |
|
"eval_samples_per_second": 15.461, |
|
"eval_steps_per_second": 3.865, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.43324655670957457, |
|
"grad_norm": 4.005910396575928, |
|
"learning_rate": 5.966616084977239e-06, |
|
"loss": 0.2585, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 0.4341113202958212, |
|
"grad_norm": 3.212456703186035, |
|
"learning_rate": 5.957511380880122e-06, |
|
"loss": 0.2328, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 0.4349760838820679, |
|
"grad_norm": 17.725706100463867, |
|
"learning_rate": 5.9484066767830055e-06, |
|
"loss": 0.2658, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 0.43584084746831453, |
|
"grad_norm": 5.476046562194824, |
|
"learning_rate": 5.939301972685888e-06, |
|
"loss": 0.2512, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 0.4367056110545612, |
|
"grad_norm": 6.102067470550537, |
|
"learning_rate": 5.930197268588771e-06, |
|
"loss": 0.2248, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.43757037464080784, |
|
"grad_norm": 1.9437217712402344, |
|
"learning_rate": 5.921092564491655e-06, |
|
"loss": 0.2424, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 0.4384351382270545, |
|
"grad_norm": 4.161287307739258, |
|
"learning_rate": 5.911987860394538e-06, |
|
"loss": 0.238, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 0.43929990181330114, |
|
"grad_norm": 4.9400458335876465, |
|
"learning_rate": 5.9028831562974205e-06, |
|
"loss": 0.2587, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 0.4401646653995478, |
|
"grad_norm": 3.344257354736328, |
|
"learning_rate": 5.893778452200304e-06, |
|
"loss": 0.259, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 0.44102942898579445, |
|
"grad_norm": 3.5931825637817383, |
|
"learning_rate": 5.884673748103187e-06, |
|
"loss": 0.2444, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.4418941925720411, |
|
"grad_norm": 2.649014949798584, |
|
"learning_rate": 5.87556904400607e-06, |
|
"loss": 0.2303, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 0.44275895615828775, |
|
"grad_norm": 2.2219338417053223, |
|
"learning_rate": 5.866464339908954e-06, |
|
"loss": 0.2177, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.4436237197445344, |
|
"grad_norm": 1.748891830444336, |
|
"learning_rate": 5.8573596358118364e-06, |
|
"loss": 0.2048, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 0.44448848333078106, |
|
"grad_norm": 1.8763848543167114, |
|
"learning_rate": 5.848254931714719e-06, |
|
"loss": 0.2451, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 0.4453532469170277, |
|
"grad_norm": 3.2289862632751465, |
|
"learning_rate": 5.839150227617603e-06, |
|
"loss": 0.2773, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.44621801050327436, |
|
"grad_norm": 2.1212282180786133, |
|
"learning_rate": 5.830045523520486e-06, |
|
"loss": 0.2212, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 0.4470827740895211, |
|
"grad_norm": 3.2190654277801514, |
|
"learning_rate": 5.8209408194233695e-06, |
|
"loss": 0.2574, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 0.4479475376757677, |
|
"grad_norm": 2.9537158012390137, |
|
"learning_rate": 5.811836115326252e-06, |
|
"loss": 0.2216, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 0.4488123012620144, |
|
"grad_norm": 4.764918327331543, |
|
"learning_rate": 5.802731411229135e-06, |
|
"loss": 0.2479, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 0.44967706484826103, |
|
"grad_norm": 2.5907225608825684, |
|
"learning_rate": 5.793626707132019e-06, |
|
"loss": 0.2266, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.4505418284345077, |
|
"grad_norm": 6.063370704650879, |
|
"learning_rate": 5.784522003034902e-06, |
|
"loss": 0.2378, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 0.45140659202075434, |
|
"grad_norm": 2.465559720993042, |
|
"learning_rate": 5.775417298937785e-06, |
|
"loss": 0.2762, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 0.452271355607001, |
|
"grad_norm": 2.4620933532714844, |
|
"learning_rate": 5.766312594840668e-06, |
|
"loss": 0.2412, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 0.45313611919324764, |
|
"grad_norm": 1.7580320835113525, |
|
"learning_rate": 5.757207890743551e-06, |
|
"loss": 0.2045, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 0.4540008827794943, |
|
"grad_norm": 3.2482638359069824, |
|
"learning_rate": 5.748103186646434e-06, |
|
"loss": 0.2528, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.45486564636574095, |
|
"grad_norm": 1.7835358381271362, |
|
"learning_rate": 5.738998482549318e-06, |
|
"loss": 0.2245, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 0.4557304099519876, |
|
"grad_norm": 2.004756450653076, |
|
"learning_rate": 5.729893778452201e-06, |
|
"loss": 0.246, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.45659517353823426, |
|
"grad_norm": 2.1401569843292236, |
|
"learning_rate": 5.720789074355084e-06, |
|
"loss": 0.221, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 0.4574599371244809, |
|
"grad_norm": 2.868241786956787, |
|
"learning_rate": 5.711684370257967e-06, |
|
"loss": 0.2211, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 0.45832470071072756, |
|
"grad_norm": 2.1547744274139404, |
|
"learning_rate": 5.70257966616085e-06, |
|
"loss": 0.2259, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.4591894642969742, |
|
"grad_norm": 3.7882297039031982, |
|
"learning_rate": 5.693474962063733e-06, |
|
"loss": 0.2486, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 0.46005422788322087, |
|
"grad_norm": 2.8157854080200195, |
|
"learning_rate": 5.684370257966616e-06, |
|
"loss": 0.2299, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 0.4609189914694675, |
|
"grad_norm": 2.409140110015869, |
|
"learning_rate": 5.6752655538695e-06, |
|
"loss": 0.2459, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 0.46120724599821644, |
|
"eval_loss": 0.2246265709400177, |
|
"eval_mse": 0.22462656778097154, |
|
"eval_runtime": 64.6238, |
|
"eval_samples_per_second": 15.474, |
|
"eval_steps_per_second": 3.869, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.4617837550557142, |
|
"grad_norm": 4.253114223480225, |
|
"learning_rate": 5.666160849772383e-06, |
|
"loss": 0.2543, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 0.4626485186419608, |
|
"grad_norm": 1.6027945280075073, |
|
"learning_rate": 5.657056145675266e-06, |
|
"loss": 0.224, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.46351328222820753, |
|
"grad_norm": 4.508523941040039, |
|
"learning_rate": 5.647951441578149e-06, |
|
"loss": 0.2278, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 0.4643780458144542, |
|
"grad_norm": 2.6588051319122314, |
|
"learning_rate": 5.6388467374810314e-06, |
|
"loss": 0.2368, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 0.46524280940070084, |
|
"grad_norm": 2.123340129852295, |
|
"learning_rate": 5.629742033383916e-06, |
|
"loss": 0.2308, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 0.4661075729869475, |
|
"grad_norm": 4.028223991394043, |
|
"learning_rate": 5.620637329286799e-06, |
|
"loss": 0.2405, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 0.46697233657319415, |
|
"grad_norm": 1.7611653804779053, |
|
"learning_rate": 5.611532625189682e-06, |
|
"loss": 0.2131, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.4678371001594408, |
|
"grad_norm": 2.331615924835205, |
|
"learning_rate": 5.6024279210925645e-06, |
|
"loss": 0.2435, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 0.46870186374568745, |
|
"grad_norm": 1.896458625793457, |
|
"learning_rate": 5.593323216995447e-06, |
|
"loss": 0.2447, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 0.4695666273319341, |
|
"grad_norm": 3.185960292816162, |
|
"learning_rate": 5.584218512898332e-06, |
|
"loss": 0.2412, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 0.47043139091818076, |
|
"grad_norm": 2.1739768981933594, |
|
"learning_rate": 5.575113808801215e-06, |
|
"loss": 0.2396, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 0.4712961545044274, |
|
"grad_norm": 1.8949024677276611, |
|
"learning_rate": 5.5660091047040976e-06, |
|
"loss": 0.2152, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.47216091809067406, |
|
"grad_norm": 2.536170482635498, |
|
"learning_rate": 5.55690440060698e-06, |
|
"loss": 0.2447, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 0.4730256816769207, |
|
"grad_norm": 2.3221828937530518, |
|
"learning_rate": 5.547799696509863e-06, |
|
"loss": 0.2497, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 0.47389044526316737, |
|
"grad_norm": 2.3624842166900635, |
|
"learning_rate": 5.538694992412748e-06, |
|
"loss": 0.2058, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 0.474755208849414, |
|
"grad_norm": 2.1354258060455322, |
|
"learning_rate": 5.529590288315631e-06, |
|
"loss": 0.2133, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 0.4756199724356607, |
|
"grad_norm": 2.2622838020324707, |
|
"learning_rate": 5.5204855842185135e-06, |
|
"loss": 0.247, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.47648473602190733, |
|
"grad_norm": 3.781003952026367, |
|
"learning_rate": 5.511380880121396e-06, |
|
"loss": 0.2231, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 0.477349499608154, |
|
"grad_norm": 4.45161247253418, |
|
"learning_rate": 5.502276176024279e-06, |
|
"loss": 0.26, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 0.47821426319440064, |
|
"grad_norm": 2.7861571311950684, |
|
"learning_rate": 5.493171471927162e-06, |
|
"loss": 0.21, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 0.4790790267806473, |
|
"grad_norm": 2.1872150897979736, |
|
"learning_rate": 5.4840667678300465e-06, |
|
"loss": 0.2048, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 0.47994379036689394, |
|
"grad_norm": 5.789909839630127, |
|
"learning_rate": 5.474962063732929e-06, |
|
"loss": 0.2213, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.48080855395314065, |
|
"grad_norm": 2.418182849884033, |
|
"learning_rate": 5.465857359635812e-06, |
|
"loss": 0.27, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 0.4816733175393873, |
|
"grad_norm": 2.442600727081299, |
|
"learning_rate": 5.456752655538695e-06, |
|
"loss": 0.2234, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 0.48253808112563396, |
|
"grad_norm": 5.272119522094727, |
|
"learning_rate": 5.447647951441578e-06, |
|
"loss": 0.2374, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.4834028447118806, |
|
"grad_norm": 2.1721715927124023, |
|
"learning_rate": 5.4385432473444624e-06, |
|
"loss": 0.2081, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 0.48426760829812726, |
|
"grad_norm": 2.432861328125, |
|
"learning_rate": 5.429438543247345e-06, |
|
"loss": 0.2488, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.4851323718843739, |
|
"grad_norm": 2.196579933166504, |
|
"learning_rate": 5.420333839150228e-06, |
|
"loss": 0.231, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 0.48599713547062057, |
|
"grad_norm": 2.4169063568115234, |
|
"learning_rate": 5.411229135053111e-06, |
|
"loss": 0.2145, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 0.4868618990568672, |
|
"grad_norm": 3.8229868412017822, |
|
"learning_rate": 5.402124430955994e-06, |
|
"loss": 0.2518, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 0.4877266626431139, |
|
"grad_norm": 2.484478235244751, |
|
"learning_rate": 5.393019726858878e-06, |
|
"loss": 0.2145, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 0.4885914262293605, |
|
"grad_norm": 2.7220754623413086, |
|
"learning_rate": 5.383915022761761e-06, |
|
"loss": 0.2584, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.4894561898156072, |
|
"grad_norm": 2.954333543777466, |
|
"learning_rate": 5.374810318664644e-06, |
|
"loss": 0.2403, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 0.49003269887310497, |
|
"eval_loss": 0.22324734926223755, |
|
"eval_mse": 0.22324735438660717, |
|
"eval_runtime": 64.5172, |
|
"eval_samples_per_second": 15.5, |
|
"eval_steps_per_second": 3.875, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.49032095340185383, |
|
"grad_norm": 3.1045334339141846, |
|
"learning_rate": 5.365705614567527e-06, |
|
"loss": 0.2655, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 0.4911857169881005, |
|
"grad_norm": 2.5739636421203613, |
|
"learning_rate": 5.35660091047041e-06, |
|
"loss": 0.245, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 0.49205048057434714, |
|
"grad_norm": 2.3004565238952637, |
|
"learning_rate": 5.347496206373293e-06, |
|
"loss": 0.2261, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 0.4929152441605938, |
|
"grad_norm": 2.6713101863861084, |
|
"learning_rate": 5.338391502276177e-06, |
|
"loss": 0.2152, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.49378000774684044, |
|
"grad_norm": 2.055208683013916, |
|
"learning_rate": 5.32928679817906e-06, |
|
"loss": 0.2247, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 0.4946447713330871, |
|
"grad_norm": 2.4551613330841064, |
|
"learning_rate": 5.320182094081943e-06, |
|
"loss": 0.2494, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 0.49550953491933375, |
|
"grad_norm": 2.6361169815063477, |
|
"learning_rate": 5.311077389984826e-06, |
|
"loss": 0.261, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 0.4963742985055804, |
|
"grad_norm": 2.158512830734253, |
|
"learning_rate": 5.301972685887709e-06, |
|
"loss": 0.2106, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 0.4972390620918271, |
|
"grad_norm": 2.550480365753174, |
|
"learning_rate": 5.292867981790593e-06, |
|
"loss": 0.1984, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.49810382567807376, |
|
"grad_norm": 5.54836893081665, |
|
"learning_rate": 5.283763277693476e-06, |
|
"loss": 0.2339, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 0.4989685892643204, |
|
"grad_norm": 1.3806259632110596, |
|
"learning_rate": 5.274658573596359e-06, |
|
"loss": 0.2622, |
|
"step": 1731 |
|
}, |
|
{ |
|
"epoch": 0.49983335285056707, |
|
"grad_norm": 2.9417316913604736, |
|
"learning_rate": 5.2655538694992415e-06, |
|
"loss": 0.2401, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 0.5006981164368137, |
|
"grad_norm": 3.094886541366577, |
|
"learning_rate": 5.256449165402124e-06, |
|
"loss": 0.2189, |
|
"step": 1737 |
|
}, |
|
{ |
|
"epoch": 0.5015628800230604, |
|
"grad_norm": 2.4346110820770264, |
|
"learning_rate": 5.247344461305008e-06, |
|
"loss": 0.2029, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.502427643609307, |
|
"grad_norm": 2.5410044193267822, |
|
"learning_rate": 5.238239757207892e-06, |
|
"loss": 0.2417, |
|
"step": 1743 |
|
}, |
|
{ |
|
"epoch": 0.5032924071955537, |
|
"grad_norm": 2.1265604496002197, |
|
"learning_rate": 5.229135053110775e-06, |
|
"loss": 0.2476, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 0.5041571707818003, |
|
"grad_norm": 2.449608087539673, |
|
"learning_rate": 5.2200303490136574e-06, |
|
"loss": 0.2413, |
|
"step": 1749 |
|
}, |
|
{ |
|
"epoch": 0.505021934368047, |
|
"grad_norm": 5.359190464019775, |
|
"learning_rate": 5.21092564491654e-06, |
|
"loss": 0.2459, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 0.5058866979542936, |
|
"grad_norm": 6.479160785675049, |
|
"learning_rate": 5.201820940819424e-06, |
|
"loss": 0.2257, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.5067514615405403, |
|
"grad_norm": 3.1292057037353516, |
|
"learning_rate": 5.192716236722307e-06, |
|
"loss": 0.2508, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 0.507616225126787, |
|
"grad_norm": 5.284337997436523, |
|
"learning_rate": 5.1836115326251905e-06, |
|
"loss": 0.2817, |
|
"step": 1761 |
|
}, |
|
{ |
|
"epoch": 0.5084809887130336, |
|
"grad_norm": 2.8322136402130127, |
|
"learning_rate": 5.174506828528073e-06, |
|
"loss": 0.2358, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 0.5093457522992803, |
|
"grad_norm": 2.5609347820281982, |
|
"learning_rate": 5.165402124430956e-06, |
|
"loss": 0.2321, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.5102105158855269, |
|
"grad_norm": 3.2140045166015625, |
|
"learning_rate": 5.15629742033384e-06, |
|
"loss": 0.2335, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.5110752794717736, |
|
"grad_norm": 2.619800567626953, |
|
"learning_rate": 5.147192716236723e-06, |
|
"loss": 0.2544, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 0.5119400430580202, |
|
"grad_norm": 3.5388903617858887, |
|
"learning_rate": 5.1380880121396055e-06, |
|
"loss": 0.2797, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 0.5128048066442669, |
|
"grad_norm": 4.132437705993652, |
|
"learning_rate": 5.128983308042489e-06, |
|
"loss": 0.2722, |
|
"step": 1779 |
|
}, |
|
{ |
|
"epoch": 0.5136695702305135, |
|
"grad_norm": 1.7023959159851074, |
|
"learning_rate": 5.119878603945372e-06, |
|
"loss": 0.2563, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 0.5145343338167602, |
|
"grad_norm": 3.2654476165771484, |
|
"learning_rate": 5.110773899848256e-06, |
|
"loss": 0.2547, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.5153990974030068, |
|
"grad_norm": 2.9560678005218506, |
|
"learning_rate": 5.101669195751139e-06, |
|
"loss": 0.2019, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 0.5162638609892535, |
|
"grad_norm": 3.0013792514801025, |
|
"learning_rate": 5.0925644916540215e-06, |
|
"loss": 0.2237, |
|
"step": 1791 |
|
}, |
|
{ |
|
"epoch": 0.5171286245755001, |
|
"grad_norm": 5.049393653869629, |
|
"learning_rate": 5.083459787556905e-06, |
|
"loss": 0.2326, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 0.5179933881617468, |
|
"grad_norm": 2.8161003589630127, |
|
"learning_rate": 5.074355083459788e-06, |
|
"loss": 0.255, |
|
"step": 1797 |
|
}, |
|
{ |
|
"epoch": 0.5188581517479934, |
|
"grad_norm": 3.8299570083618164, |
|
"learning_rate": 5.065250379362671e-06, |
|
"loss": 0.251, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.5188581517479934, |
|
"eval_loss": 0.24214114248752594, |
|
"eval_mse": 0.2421411427827552, |
|
"eval_runtime": 64.4904, |
|
"eval_samples_per_second": 15.506, |
|
"eval_steps_per_second": 3.877, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.5197229153342401, |
|
"grad_norm": 6.20158576965332, |
|
"learning_rate": 5.0561456752655545e-06, |
|
"loss": 0.2574, |
|
"step": 1803 |
|
}, |
|
{ |
|
"epoch": 0.5205876789204867, |
|
"grad_norm": 3.9144914150238037, |
|
"learning_rate": 5.047040971168437e-06, |
|
"loss": 0.2249, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 0.5214524425067334, |
|
"grad_norm": 4.534792423248291, |
|
"learning_rate": 5.03793626707132e-06, |
|
"loss": 0.2357, |
|
"step": 1809 |
|
}, |
|
{ |
|
"epoch": 0.52231720609298, |
|
"grad_norm": 2.006342649459839, |
|
"learning_rate": 5.028831562974204e-06, |
|
"loss": 0.2434, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 0.5231819696792268, |
|
"grad_norm": 2.1051185131073, |
|
"learning_rate": 5.019726858877087e-06, |
|
"loss": 0.2159, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.5240467332654735, |
|
"grad_norm": 2.2918484210968018, |
|
"learning_rate": 5.0106221547799704e-06, |
|
"loss": 0.2254, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 0.5249114968517201, |
|
"grad_norm": 2.324146270751953, |
|
"learning_rate": 5.001517450682853e-06, |
|
"loss": 0.2593, |
|
"step": 1821 |
|
}, |
|
{ |
|
"epoch": 0.5257762604379668, |
|
"grad_norm": 2.0753884315490723, |
|
"learning_rate": 4.992412746585736e-06, |
|
"loss": 0.2344, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 0.5266410240242134, |
|
"grad_norm": 2.431459426879883, |
|
"learning_rate": 4.983308042488619e-06, |
|
"loss": 0.2544, |
|
"step": 1827 |
|
}, |
|
{ |
|
"epoch": 0.5275057876104601, |
|
"grad_norm": 3.231693983078003, |
|
"learning_rate": 4.974203338391503e-06, |
|
"loss": 0.2232, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.5283705511967067, |
|
"grad_norm": 4.636984825134277, |
|
"learning_rate": 4.9650986342943855e-06, |
|
"loss": 0.2505, |
|
"step": 1833 |
|
}, |
|
{ |
|
"epoch": 0.5292353147829534, |
|
"grad_norm": 1.6613258123397827, |
|
"learning_rate": 4.955993930197269e-06, |
|
"loss": 0.2259, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 0.5301000783692, |
|
"grad_norm": 2.0384411811828613, |
|
"learning_rate": 4.946889226100152e-06, |
|
"loss": 0.2459, |
|
"step": 1839 |
|
}, |
|
{ |
|
"epoch": 0.5309648419554467, |
|
"grad_norm": 3.353332281112671, |
|
"learning_rate": 4.937784522003035e-06, |
|
"loss": 0.2132, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 0.5318296055416933, |
|
"grad_norm": 2.3782637119293213, |
|
"learning_rate": 4.9286798179059185e-06, |
|
"loss": 0.2531, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.53269436912794, |
|
"grad_norm": 2.1125853061676025, |
|
"learning_rate": 4.919575113808801e-06, |
|
"loss": 0.247, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 0.5335591327141866, |
|
"grad_norm": 2.698594808578491, |
|
"learning_rate": 4.910470409711684e-06, |
|
"loss": 0.2599, |
|
"step": 1851 |
|
}, |
|
{ |
|
"epoch": 0.5344238963004333, |
|
"grad_norm": 3.285856008529663, |
|
"learning_rate": 4.901365705614568e-06, |
|
"loss": 0.2242, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 0.53528865988668, |
|
"grad_norm": 2.558708429336548, |
|
"learning_rate": 4.892261001517451e-06, |
|
"loss": 0.2629, |
|
"step": 1857 |
|
}, |
|
{ |
|
"epoch": 0.5361534234729266, |
|
"grad_norm": 1.9437116384506226, |
|
"learning_rate": 4.8831562974203345e-06, |
|
"loss": 0.2363, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.5370181870591733, |
|
"grad_norm": 1.8736016750335693, |
|
"learning_rate": 4.874051593323217e-06, |
|
"loss": 0.231, |
|
"step": 1863 |
|
}, |
|
{ |
|
"epoch": 0.5378829506454199, |
|
"grad_norm": 3.5459728240966797, |
|
"learning_rate": 4.8649468892261e-06, |
|
"loss": 0.2617, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 0.5387477142316666, |
|
"grad_norm": 2.2355449199676514, |
|
"learning_rate": 4.855842185128984e-06, |
|
"loss": 0.221, |
|
"step": 1869 |
|
}, |
|
{ |
|
"epoch": 0.5396124778179132, |
|
"grad_norm": 3.19442081451416, |
|
"learning_rate": 4.846737481031867e-06, |
|
"loss": 0.2517, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 0.5404772414041599, |
|
"grad_norm": 3.5516788959503174, |
|
"learning_rate": 4.8376327769347495e-06, |
|
"loss": 0.228, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.5413420049904065, |
|
"grad_norm": 3.464395523071289, |
|
"learning_rate": 4.828528072837633e-06, |
|
"loss": 0.2469, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 0.5422067685766532, |
|
"grad_norm": 2.241856813430786, |
|
"learning_rate": 4.819423368740516e-06, |
|
"loss": 0.239, |
|
"step": 1881 |
|
}, |
|
{ |
|
"epoch": 0.5430715321628998, |
|
"grad_norm": 2.296264886856079, |
|
"learning_rate": 4.8103186646434e-06, |
|
"loss": 0.204, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 0.5439362957491465, |
|
"grad_norm": 1.9935134649276733, |
|
"learning_rate": 4.801213960546283e-06, |
|
"loss": 0.237, |
|
"step": 1887 |
|
}, |
|
{ |
|
"epoch": 0.5448010593353931, |
|
"grad_norm": 4.519099235534668, |
|
"learning_rate": 4.792109256449165e-06, |
|
"loss": 0.2981, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.5456658229216398, |
|
"grad_norm": 2.157167673110962, |
|
"learning_rate": 4.783004552352049e-06, |
|
"loss": 0.221, |
|
"step": 1893 |
|
}, |
|
{ |
|
"epoch": 0.5465305865078864, |
|
"grad_norm": 1.8143630027770996, |
|
"learning_rate": 4.773899848254932e-06, |
|
"loss": 0.2221, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 0.5473953500941331, |
|
"grad_norm": 3.750279188156128, |
|
"learning_rate": 4.764795144157816e-06, |
|
"loss": 0.2565, |
|
"step": 1899 |
|
}, |
|
{ |
|
"epoch": 0.547683604622882, |
|
"eval_loss": 0.2207367867231369, |
|
"eval_mse": 0.2207367917916272, |
|
"eval_runtime": 64.5517, |
|
"eval_samples_per_second": 15.491, |
|
"eval_steps_per_second": 3.873, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.5482601136803797, |
|
"grad_norm": 2.6302292346954346, |
|
"learning_rate": 4.7556904400606985e-06, |
|
"loss": 0.2202, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 0.5491248772666264, |
|
"grad_norm": 13.511824607849121, |
|
"learning_rate": 4.746585735963581e-06, |
|
"loss": 0.2335, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.549989640852873, |
|
"grad_norm": 2.213571548461914, |
|
"learning_rate": 4.737481031866465e-06, |
|
"loss": 0.2219, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 0.5508544044391197, |
|
"grad_norm": 2.6418211460113525, |
|
"learning_rate": 4.728376327769348e-06, |
|
"loss": 0.2317, |
|
"step": 1911 |
|
}, |
|
{ |
|
"epoch": 0.5517191680253664, |
|
"grad_norm": 2.2119081020355225, |
|
"learning_rate": 4.719271623672231e-06, |
|
"loss": 0.2468, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 0.552583931611613, |
|
"grad_norm": 2.070915460586548, |
|
"learning_rate": 4.710166919575114e-06, |
|
"loss": 0.2503, |
|
"step": 1917 |
|
}, |
|
{ |
|
"epoch": 0.5534486951978597, |
|
"grad_norm": 3.188049077987671, |
|
"learning_rate": 4.701062215477997e-06, |
|
"loss": 0.2212, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.5543134587841063, |
|
"grad_norm": 2.545793294906616, |
|
"learning_rate": 4.691957511380881e-06, |
|
"loss": 0.2107, |
|
"step": 1923 |
|
}, |
|
{ |
|
"epoch": 0.555178222370353, |
|
"grad_norm": 1.9591479301452637, |
|
"learning_rate": 4.682852807283764e-06, |
|
"loss": 0.2208, |
|
"step": 1926 |
|
}, |
|
{ |
|
"epoch": 0.5560429859565996, |
|
"grad_norm": 4.691300392150879, |
|
"learning_rate": 4.673748103186647e-06, |
|
"loss": 0.2373, |
|
"step": 1929 |
|
}, |
|
{ |
|
"epoch": 0.5569077495428464, |
|
"grad_norm": 2.1649136543273926, |
|
"learning_rate": 4.66464339908953e-06, |
|
"loss": 0.2126, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 0.557772513129093, |
|
"grad_norm": 1.9450371265411377, |
|
"learning_rate": 4.655538694992413e-06, |
|
"loss": 0.2137, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.5586372767153397, |
|
"grad_norm": 2.2597336769104004, |
|
"learning_rate": 4.646433990895296e-06, |
|
"loss": 0.2073, |
|
"step": 1938 |
|
}, |
|
{ |
|
"epoch": 0.5595020403015863, |
|
"grad_norm": 1.523226022720337, |
|
"learning_rate": 4.63732928679818e-06, |
|
"loss": 0.2259, |
|
"step": 1941 |
|
}, |
|
{ |
|
"epoch": 0.560366803887833, |
|
"grad_norm": 2.4895904064178467, |
|
"learning_rate": 4.6282245827010625e-06, |
|
"loss": 0.2412, |
|
"step": 1944 |
|
}, |
|
{ |
|
"epoch": 0.5612315674740797, |
|
"grad_norm": 1.6229183673858643, |
|
"learning_rate": 4.619119878603946e-06, |
|
"loss": 0.213, |
|
"step": 1947 |
|
}, |
|
{ |
|
"epoch": 0.5620963310603263, |
|
"grad_norm": 3.478339672088623, |
|
"learning_rate": 4.610015174506829e-06, |
|
"loss": 0.2821, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.562961094646573, |
|
"grad_norm": 1.5027942657470703, |
|
"learning_rate": 4.600910470409712e-06, |
|
"loss": 0.2346, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 0.5638258582328196, |
|
"grad_norm": 38.24848556518555, |
|
"learning_rate": 4.591805766312596e-06, |
|
"loss": 0.2452, |
|
"step": 1956 |
|
}, |
|
{ |
|
"epoch": 0.5646906218190663, |
|
"grad_norm": 2.314491033554077, |
|
"learning_rate": 4.582701062215478e-06, |
|
"loss": 0.2634, |
|
"step": 1959 |
|
}, |
|
{ |
|
"epoch": 0.5655553854053129, |
|
"grad_norm": 2.920741081237793, |
|
"learning_rate": 4.573596358118362e-06, |
|
"loss": 0.209, |
|
"step": 1962 |
|
}, |
|
{ |
|
"epoch": 0.5664201489915596, |
|
"grad_norm": 3.8211729526519775, |
|
"learning_rate": 4.564491654021245e-06, |
|
"loss": 0.2145, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.5672849125778062, |
|
"grad_norm": 3.976433753967285, |
|
"learning_rate": 4.555386949924128e-06, |
|
"loss": 0.2308, |
|
"step": 1968 |
|
}, |
|
{ |
|
"epoch": 0.5681496761640529, |
|
"grad_norm": 2.2235753536224365, |
|
"learning_rate": 4.5462822458270115e-06, |
|
"loss": 0.1959, |
|
"step": 1971 |
|
}, |
|
{ |
|
"epoch": 0.5690144397502995, |
|
"grad_norm": 2.7206735610961914, |
|
"learning_rate": 4.537177541729894e-06, |
|
"loss": 0.221, |
|
"step": 1974 |
|
}, |
|
{ |
|
"epoch": 0.5698792033365462, |
|
"grad_norm": 1.9556628465652466, |
|
"learning_rate": 4.528072837632777e-06, |
|
"loss": 0.212, |
|
"step": 1977 |
|
}, |
|
{ |
|
"epoch": 0.5707439669227928, |
|
"grad_norm": 2.0342814922332764, |
|
"learning_rate": 4.518968133535661e-06, |
|
"loss": 0.2388, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.5716087305090395, |
|
"grad_norm": 1.7735626697540283, |
|
"learning_rate": 4.509863429438544e-06, |
|
"loss": 0.2227, |
|
"step": 1983 |
|
}, |
|
{ |
|
"epoch": 0.5724734940952861, |
|
"grad_norm": 1.7684601545333862, |
|
"learning_rate": 4.500758725341427e-06, |
|
"loss": 0.2195, |
|
"step": 1986 |
|
}, |
|
{ |
|
"epoch": 0.5733382576815328, |
|
"grad_norm": 1.8815613985061646, |
|
"learning_rate": 4.49165402124431e-06, |
|
"loss": 0.262, |
|
"step": 1989 |
|
}, |
|
{ |
|
"epoch": 0.5742030212677794, |
|
"grad_norm": 1.985158085823059, |
|
"learning_rate": 4.482549317147193e-06, |
|
"loss": 0.2109, |
|
"step": 1992 |
|
}, |
|
{ |
|
"epoch": 0.5750677848540261, |
|
"grad_norm": 3.0801327228546143, |
|
"learning_rate": 4.473444613050077e-06, |
|
"loss": 0.2017, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.5759325484402728, |
|
"grad_norm": 1.847817301750183, |
|
"learning_rate": 4.46433990895296e-06, |
|
"loss": 0.2274, |
|
"step": 1998 |
|
}, |
|
{ |
|
"epoch": 0.5765090574977705, |
|
"eval_loss": 0.22936804592609406, |
|
"eval_mse": 0.22936805277447275, |
|
"eval_runtime": 64.5577, |
|
"eval_samples_per_second": 15.49, |
|
"eval_steps_per_second": 3.873, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.5767973120265194, |
|
"grad_norm": 2.9898228645324707, |
|
"learning_rate": 4.4552352048558425e-06, |
|
"loss": 0.2044, |
|
"step": 2001 |
|
}, |
|
{ |
|
"epoch": 0.5776620756127661, |
|
"grad_norm": 3.691619873046875, |
|
"learning_rate": 4.446130500758726e-06, |
|
"loss": 0.2331, |
|
"step": 2004 |
|
}, |
|
{ |
|
"epoch": 0.5785268391990127, |
|
"grad_norm": 1.9646469354629517, |
|
"learning_rate": 4.437025796661609e-06, |
|
"loss": 0.2331, |
|
"step": 2007 |
|
}, |
|
{ |
|
"epoch": 0.5793916027852594, |
|
"grad_norm": 1.9815963506698608, |
|
"learning_rate": 4.427921092564492e-06, |
|
"loss": 0.2068, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.580256366371506, |
|
"grad_norm": 4.30540657043457, |
|
"learning_rate": 4.4188163884673755e-06, |
|
"loss": 0.2588, |
|
"step": 2013 |
|
}, |
|
{ |
|
"epoch": 0.5811211299577527, |
|
"grad_norm": 2.5641026496887207, |
|
"learning_rate": 4.409711684370258e-06, |
|
"loss": 0.2704, |
|
"step": 2016 |
|
}, |
|
{ |
|
"epoch": 0.5819858935439993, |
|
"grad_norm": 2.0352160930633545, |
|
"learning_rate": 4.400606980273141e-06, |
|
"loss": 0.2249, |
|
"step": 2019 |
|
}, |
|
{ |
|
"epoch": 0.582850657130246, |
|
"grad_norm": 2.3772523403167725, |
|
"learning_rate": 4.391502276176025e-06, |
|
"loss": 0.2381, |
|
"step": 2022 |
|
}, |
|
{ |
|
"epoch": 0.5837154207164926, |
|
"grad_norm": 2.5510172843933105, |
|
"learning_rate": 4.382397572078908e-06, |
|
"loss": 0.2464, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.5845801843027393, |
|
"grad_norm": 2.5375447273254395, |
|
"learning_rate": 4.3732928679817906e-06, |
|
"loss": 0.2299, |
|
"step": 2028 |
|
}, |
|
{ |
|
"epoch": 0.5854449478889859, |
|
"grad_norm": 4.41203498840332, |
|
"learning_rate": 4.364188163884674e-06, |
|
"loss": 0.2281, |
|
"step": 2031 |
|
}, |
|
{ |
|
"epoch": 0.5863097114752326, |
|
"grad_norm": 1.7787522077560425, |
|
"learning_rate": 4.355083459787557e-06, |
|
"loss": 0.2518, |
|
"step": 2034 |
|
}, |
|
{ |
|
"epoch": 0.5871744750614792, |
|
"grad_norm": 2.4308512210845947, |
|
"learning_rate": 4.34597875569044e-06, |
|
"loss": 0.2621, |
|
"step": 2037 |
|
}, |
|
{ |
|
"epoch": 0.5880392386477259, |
|
"grad_norm": 3.5610814094543457, |
|
"learning_rate": 4.336874051593324e-06, |
|
"loss": 0.2537, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.5889040022339725, |
|
"grad_norm": 3.633169174194336, |
|
"learning_rate": 4.3277693474962065e-06, |
|
"loss": 0.2533, |
|
"step": 2043 |
|
}, |
|
{ |
|
"epoch": 0.5897687658202192, |
|
"grad_norm": 2.8986027240753174, |
|
"learning_rate": 4.31866464339909e-06, |
|
"loss": 0.2156, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 0.590633529406466, |
|
"grad_norm": 1.8997621536254883, |
|
"learning_rate": 4.309559939301973e-06, |
|
"loss": 0.191, |
|
"step": 2049 |
|
}, |
|
{ |
|
"epoch": 0.5914982929927126, |
|
"grad_norm": 3.2994544506073, |
|
"learning_rate": 4.300455235204856e-06, |
|
"loss": 0.2083, |
|
"step": 2052 |
|
}, |
|
{ |
|
"epoch": 0.5923630565789593, |
|
"grad_norm": 2.4655439853668213, |
|
"learning_rate": 4.2913505311077395e-06, |
|
"loss": 0.2369, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.5932278201652059, |
|
"grad_norm": 1.669376254081726, |
|
"learning_rate": 4.282245827010622e-06, |
|
"loss": 0.226, |
|
"step": 2058 |
|
}, |
|
{ |
|
"epoch": 0.5940925837514526, |
|
"grad_norm": 2.234907388687134, |
|
"learning_rate": 4.273141122913505e-06, |
|
"loss": 0.207, |
|
"step": 2061 |
|
}, |
|
{ |
|
"epoch": 0.5949573473376992, |
|
"grad_norm": 5.61646032333374, |
|
"learning_rate": 4.264036418816389e-06, |
|
"loss": 0.2293, |
|
"step": 2064 |
|
}, |
|
{ |
|
"epoch": 0.5958221109239459, |
|
"grad_norm": 6.289272785186768, |
|
"learning_rate": 4.254931714719272e-06, |
|
"loss": 0.1854, |
|
"step": 2067 |
|
}, |
|
{ |
|
"epoch": 0.5966868745101925, |
|
"grad_norm": 3.474231481552124, |
|
"learning_rate": 4.245827010622155e-06, |
|
"loss": 0.2221, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.5975516380964392, |
|
"grad_norm": 3.175489902496338, |
|
"learning_rate": 4.236722306525038e-06, |
|
"loss": 0.2118, |
|
"step": 2073 |
|
}, |
|
{ |
|
"epoch": 0.5984164016826858, |
|
"grad_norm": 3.48205304145813, |
|
"learning_rate": 4.227617602427921e-06, |
|
"loss": 0.2107, |
|
"step": 2076 |
|
}, |
|
{ |
|
"epoch": 0.5992811652689325, |
|
"grad_norm": 3.100991725921631, |
|
"learning_rate": 4.218512898330804e-06, |
|
"loss": 0.2238, |
|
"step": 2079 |
|
}, |
|
{ |
|
"epoch": 0.6001459288551791, |
|
"grad_norm": 2.0803310871124268, |
|
"learning_rate": 4.209408194233688e-06, |
|
"loss": 0.2469, |
|
"step": 2082 |
|
}, |
|
{ |
|
"epoch": 0.6010106924414258, |
|
"grad_norm": 1.8892669677734375, |
|
"learning_rate": 4.2003034901365705e-06, |
|
"loss": 0.1876, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.6018754560276725, |
|
"grad_norm": 2.093517541885376, |
|
"learning_rate": 4.191198786039454e-06, |
|
"loss": 0.2136, |
|
"step": 2088 |
|
}, |
|
{ |
|
"epoch": 0.6027402196139191, |
|
"grad_norm": 2.2591047286987305, |
|
"learning_rate": 4.182094081942337e-06, |
|
"loss": 0.2309, |
|
"step": 2091 |
|
}, |
|
{ |
|
"epoch": 0.6036049832001658, |
|
"grad_norm": 1.6561003923416138, |
|
"learning_rate": 4.17298937784522e-06, |
|
"loss": 0.1958, |
|
"step": 2094 |
|
}, |
|
{ |
|
"epoch": 0.6044697467864124, |
|
"grad_norm": 4.529551029205322, |
|
"learning_rate": 4.1638846737481036e-06, |
|
"loss": 0.2551, |
|
"step": 2097 |
|
}, |
|
{ |
|
"epoch": 0.6053345103726591, |
|
"grad_norm": 3.610400915145874, |
|
"learning_rate": 4.154779969650986e-06, |
|
"loss": 0.2272, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.6053345103726591, |
|
"eval_loss": 0.21923716366291046, |
|
"eval_mse": 0.21923717018315803, |
|
"eval_runtime": 64.6466, |
|
"eval_samples_per_second": 15.469, |
|
"eval_steps_per_second": 3.867, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.6061992739589057, |
|
"grad_norm": 5.296643257141113, |
|
"learning_rate": 4.145675265553869e-06, |
|
"loss": 0.2147, |
|
"step": 2103 |
|
}, |
|
{ |
|
"epoch": 0.6070640375451524, |
|
"grad_norm": 1.9191397428512573, |
|
"learning_rate": 4.136570561456753e-06, |
|
"loss": 0.2303, |
|
"step": 2106 |
|
}, |
|
{ |
|
"epoch": 0.607928801131399, |
|
"grad_norm": 5.336986541748047, |
|
"learning_rate": 4.127465857359636e-06, |
|
"loss": 0.2544, |
|
"step": 2109 |
|
}, |
|
{ |
|
"epoch": 0.6087935647176457, |
|
"grad_norm": 2.223107099533081, |
|
"learning_rate": 4.1183611532625195e-06, |
|
"loss": 0.2246, |
|
"step": 2112 |
|
}, |
|
{ |
|
"epoch": 0.6096583283038923, |
|
"grad_norm": 3.1443583965301514, |
|
"learning_rate": 4.109256449165402e-06, |
|
"loss": 0.2594, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.610523091890139, |
|
"grad_norm": 2.582916498184204, |
|
"learning_rate": 4.100151745068285e-06, |
|
"loss": 0.2307, |
|
"step": 2118 |
|
}, |
|
{ |
|
"epoch": 0.6113878554763856, |
|
"grad_norm": 3.238401412963867, |
|
"learning_rate": 4.091047040971169e-06, |
|
"loss": 0.2595, |
|
"step": 2121 |
|
}, |
|
{ |
|
"epoch": 0.6122526190626323, |
|
"grad_norm": 4.401957988739014, |
|
"learning_rate": 4.081942336874052e-06, |
|
"loss": 0.2156, |
|
"step": 2124 |
|
}, |
|
{ |
|
"epoch": 0.6131173826488789, |
|
"grad_norm": 1.7487517595291138, |
|
"learning_rate": 4.072837632776935e-06, |
|
"loss": 0.2376, |
|
"step": 2127 |
|
}, |
|
{ |
|
"epoch": 0.6139821462351256, |
|
"grad_norm": 2.8765621185302734, |
|
"learning_rate": 4.063732928679818e-06, |
|
"loss": 0.22, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.6148469098213722, |
|
"grad_norm": 2.272740125656128, |
|
"learning_rate": 4.054628224582701e-06, |
|
"loss": 0.2528, |
|
"step": 2133 |
|
}, |
|
{ |
|
"epoch": 0.6157116734076189, |
|
"grad_norm": 2.2663707733154297, |
|
"learning_rate": 4.045523520485585e-06, |
|
"loss": 0.185, |
|
"step": 2136 |
|
}, |
|
{ |
|
"epoch": 0.6165764369938656, |
|
"grad_norm": 1.8243865966796875, |
|
"learning_rate": 4.036418816388468e-06, |
|
"loss": 0.2154, |
|
"step": 2139 |
|
}, |
|
{ |
|
"epoch": 0.6174412005801122, |
|
"grad_norm": 3.036996364593506, |
|
"learning_rate": 4.0273141122913504e-06, |
|
"loss": 0.2388, |
|
"step": 2142 |
|
}, |
|
{ |
|
"epoch": 0.6183059641663589, |
|
"grad_norm": 2.0164997577667236, |
|
"learning_rate": 4.018209408194234e-06, |
|
"loss": 0.2371, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.6191707277526055, |
|
"grad_norm": 3.748368978500366, |
|
"learning_rate": 4.009104704097117e-06, |
|
"loss": 0.2377, |
|
"step": 2148 |
|
}, |
|
{ |
|
"epoch": 0.6200354913388522, |
|
"grad_norm": 3.3998799324035645, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.2359, |
|
"step": 2151 |
|
}, |
|
{ |
|
"epoch": 0.6209002549250988, |
|
"grad_norm": 1.6474953889846802, |
|
"learning_rate": 3.9908952959028835e-06, |
|
"loss": 0.2049, |
|
"step": 2154 |
|
}, |
|
{ |
|
"epoch": 0.6217650185113455, |
|
"grad_norm": 2.3346197605133057, |
|
"learning_rate": 3.981790591805766e-06, |
|
"loss": 0.2263, |
|
"step": 2157 |
|
}, |
|
{ |
|
"epoch": 0.6226297820975921, |
|
"grad_norm": 2.6755003929138184, |
|
"learning_rate": 3.97268588770865e-06, |
|
"loss": 0.248, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.6234945456838388, |
|
"grad_norm": 3.026387929916382, |
|
"learning_rate": 3.963581183611533e-06, |
|
"loss": 0.2784, |
|
"step": 2163 |
|
}, |
|
{ |
|
"epoch": 0.6243593092700855, |
|
"grad_norm": 3.390216112136841, |
|
"learning_rate": 3.9544764795144166e-06, |
|
"loss": 0.241, |
|
"step": 2166 |
|
}, |
|
{ |
|
"epoch": 0.6252240728563322, |
|
"grad_norm": 5.739169120788574, |
|
"learning_rate": 3.945371775417299e-06, |
|
"loss": 0.226, |
|
"step": 2169 |
|
}, |
|
{ |
|
"epoch": 0.6260888364425788, |
|
"grad_norm": 1.7892546653747559, |
|
"learning_rate": 3.936267071320182e-06, |
|
"loss": 0.2337, |
|
"step": 2172 |
|
}, |
|
{ |
|
"epoch": 0.6269536000288255, |
|
"grad_norm": 4.317811965942383, |
|
"learning_rate": 3.927162367223066e-06, |
|
"loss": 0.2528, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.6278183636150722, |
|
"grad_norm": 2.44039249420166, |
|
"learning_rate": 3.918057663125949e-06, |
|
"loss": 0.2604, |
|
"step": 2178 |
|
}, |
|
{ |
|
"epoch": 0.6286831272013188, |
|
"grad_norm": 3.1203863620758057, |
|
"learning_rate": 3.908952959028832e-06, |
|
"loss": 0.2116, |
|
"step": 2181 |
|
}, |
|
{ |
|
"epoch": 0.6295478907875655, |
|
"grad_norm": 2.5261828899383545, |
|
"learning_rate": 3.899848254931715e-06, |
|
"loss": 0.2279, |
|
"step": 2184 |
|
}, |
|
{ |
|
"epoch": 0.6304126543738121, |
|
"grad_norm": 2.393629550933838, |
|
"learning_rate": 3.890743550834598e-06, |
|
"loss": 0.2338, |
|
"step": 2187 |
|
}, |
|
{ |
|
"epoch": 0.6312774179600588, |
|
"grad_norm": 2.138955593109131, |
|
"learning_rate": 3.881638846737482e-06, |
|
"loss": 0.2942, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.6321421815463054, |
|
"grad_norm": 2.024564266204834, |
|
"learning_rate": 3.872534142640365e-06, |
|
"loss": 0.2666, |
|
"step": 2193 |
|
}, |
|
{ |
|
"epoch": 0.6330069451325521, |
|
"grad_norm": 5.208636283874512, |
|
"learning_rate": 3.8634294385432475e-06, |
|
"loss": 0.2594, |
|
"step": 2196 |
|
}, |
|
{ |
|
"epoch": 0.6338717087187987, |
|
"grad_norm": 2.6842122077941895, |
|
"learning_rate": 3.854324734446131e-06, |
|
"loss": 0.2668, |
|
"step": 2199 |
|
}, |
|
{ |
|
"epoch": 0.6341599632475475, |
|
"eval_loss": 0.22037872672080994, |
|
"eval_mse": 0.22037871759198607, |
|
"eval_runtime": 64.5846, |
|
"eval_samples_per_second": 15.484, |
|
"eval_steps_per_second": 3.871, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.6347364723050454, |
|
"grad_norm": 3.914788246154785, |
|
"learning_rate": 3.845220030349014e-06, |
|
"loss": 0.238, |
|
"step": 2202 |
|
}, |
|
{ |
|
"epoch": 0.635601235891292, |
|
"grad_norm": 2.634946823120117, |
|
"learning_rate": 3.836115326251897e-06, |
|
"loss": 0.2302, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.6364659994775387, |
|
"grad_norm": 2.131955862045288, |
|
"learning_rate": 3.827010622154781e-06, |
|
"loss": 0.2251, |
|
"step": 2208 |
|
}, |
|
{ |
|
"epoch": 0.6373307630637853, |
|
"grad_norm": 2.292926788330078, |
|
"learning_rate": 3.8179059180576634e-06, |
|
"loss": 0.2182, |
|
"step": 2211 |
|
}, |
|
{ |
|
"epoch": 0.638195526650032, |
|
"grad_norm": 2.0773963928222656, |
|
"learning_rate": 3.8088012139605467e-06, |
|
"loss": 0.2191, |
|
"step": 2214 |
|
}, |
|
{ |
|
"epoch": 0.6390602902362786, |
|
"grad_norm": 7.258689880371094, |
|
"learning_rate": 3.7996965098634296e-06, |
|
"loss": 0.2496, |
|
"step": 2217 |
|
}, |
|
{ |
|
"epoch": 0.6399250538225253, |
|
"grad_norm": 2.341298818588257, |
|
"learning_rate": 3.790591805766313e-06, |
|
"loss": 0.2353, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.640789817408772, |
|
"grad_norm": 2.6624574661254883, |
|
"learning_rate": 3.781487101669196e-06, |
|
"loss": 0.2242, |
|
"step": 2223 |
|
}, |
|
{ |
|
"epoch": 0.6416545809950186, |
|
"grad_norm": 6.106657981872559, |
|
"learning_rate": 3.772382397572079e-06, |
|
"loss": 0.2428, |
|
"step": 2226 |
|
}, |
|
{ |
|
"epoch": 0.6425193445812653, |
|
"grad_norm": 2.1354269981384277, |
|
"learning_rate": 3.7632776934749626e-06, |
|
"loss": 0.2239, |
|
"step": 2229 |
|
}, |
|
{ |
|
"epoch": 0.6433841081675119, |
|
"grad_norm": 3.34881329536438, |
|
"learning_rate": 3.7541729893778455e-06, |
|
"loss": 0.2513, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 0.6442488717537586, |
|
"grad_norm": 3.310595750808716, |
|
"learning_rate": 3.7450682852807287e-06, |
|
"loss": 0.1986, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.6451136353400052, |
|
"grad_norm": 2.3163015842437744, |
|
"learning_rate": 3.735963581183612e-06, |
|
"loss": 0.2228, |
|
"step": 2238 |
|
}, |
|
{ |
|
"epoch": 0.6459783989262519, |
|
"grad_norm": 2.8986473083496094, |
|
"learning_rate": 3.726858877086495e-06, |
|
"loss": 0.2542, |
|
"step": 2241 |
|
}, |
|
{ |
|
"epoch": 0.6468431625124985, |
|
"grad_norm": 1.9085699319839478, |
|
"learning_rate": 3.717754172989378e-06, |
|
"loss": 0.2215, |
|
"step": 2244 |
|
}, |
|
{ |
|
"epoch": 0.6477079260987452, |
|
"grad_norm": 1.8858418464660645, |
|
"learning_rate": 3.7086494688922614e-06, |
|
"loss": 0.2081, |
|
"step": 2247 |
|
}, |
|
{ |
|
"epoch": 0.6485726896849918, |
|
"grad_norm": 2.7756083011627197, |
|
"learning_rate": 3.699544764795144e-06, |
|
"loss": 0.2483, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.6494374532712385, |
|
"grad_norm": 2.3329155445098877, |
|
"learning_rate": 3.690440060698028e-06, |
|
"loss": 0.2106, |
|
"step": 2253 |
|
}, |
|
{ |
|
"epoch": 0.6503022168574851, |
|
"grad_norm": 2.95639967918396, |
|
"learning_rate": 3.6813353566009107e-06, |
|
"loss": 0.2115, |
|
"step": 2256 |
|
}, |
|
{ |
|
"epoch": 0.6511669804437318, |
|
"grad_norm": 2.3648903369903564, |
|
"learning_rate": 3.6722306525037936e-06, |
|
"loss": 0.2156, |
|
"step": 2259 |
|
}, |
|
{ |
|
"epoch": 0.6520317440299784, |
|
"grad_norm": 2.9157233238220215, |
|
"learning_rate": 3.6631259484066773e-06, |
|
"loss": 0.2542, |
|
"step": 2262 |
|
}, |
|
{ |
|
"epoch": 0.6528965076162251, |
|
"grad_norm": 2.67684268951416, |
|
"learning_rate": 3.65402124430956e-06, |
|
"loss": 0.2434, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.6537612712024717, |
|
"grad_norm": 1.8347716331481934, |
|
"learning_rate": 3.644916540212443e-06, |
|
"loss": 0.2236, |
|
"step": 2268 |
|
}, |
|
{ |
|
"epoch": 0.6546260347887184, |
|
"grad_norm": 2.5923428535461426, |
|
"learning_rate": 3.6358118361153266e-06, |
|
"loss": 0.2232, |
|
"step": 2271 |
|
}, |
|
{ |
|
"epoch": 0.655490798374965, |
|
"grad_norm": 1.9584388732910156, |
|
"learning_rate": 3.6267071320182095e-06, |
|
"loss": 0.1951, |
|
"step": 2274 |
|
}, |
|
{ |
|
"epoch": 0.6563555619612117, |
|
"grad_norm": 1.8902173042297363, |
|
"learning_rate": 3.617602427921093e-06, |
|
"loss": 0.2051, |
|
"step": 2277 |
|
}, |
|
{ |
|
"epoch": 0.6572203255474584, |
|
"grad_norm": 1.735851764678955, |
|
"learning_rate": 3.608497723823976e-06, |
|
"loss": 0.22, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.6580850891337051, |
|
"grad_norm": 3.9721834659576416, |
|
"learning_rate": 3.599393019726859e-06, |
|
"loss": 0.2518, |
|
"step": 2283 |
|
}, |
|
{ |
|
"epoch": 0.6589498527199518, |
|
"grad_norm": 3.3028452396392822, |
|
"learning_rate": 3.5902883156297426e-06, |
|
"loss": 0.2379, |
|
"step": 2286 |
|
}, |
|
{ |
|
"epoch": 0.6598146163061984, |
|
"grad_norm": 3.636674404144287, |
|
"learning_rate": 3.5811836115326254e-06, |
|
"loss": 0.2736, |
|
"step": 2289 |
|
}, |
|
{ |
|
"epoch": 0.6606793798924451, |
|
"grad_norm": 3.664700984954834, |
|
"learning_rate": 3.572078907435509e-06, |
|
"loss": 0.2409, |
|
"step": 2292 |
|
}, |
|
{ |
|
"epoch": 0.6615441434786917, |
|
"grad_norm": 1.9176503419876099, |
|
"learning_rate": 3.562974203338392e-06, |
|
"loss": 0.1983, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.6624089070649384, |
|
"grad_norm": 1.9897313117980957, |
|
"learning_rate": 3.5538694992412748e-06, |
|
"loss": 0.2434, |
|
"step": 2298 |
|
}, |
|
{ |
|
"epoch": 0.6629854161224361, |
|
"eval_loss": 0.2195964902639389, |
|
"eval_mse": 0.21959649334591813, |
|
"eval_runtime": 64.3698, |
|
"eval_samples_per_second": 15.535, |
|
"eval_steps_per_second": 3.884, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.663273670651185, |
|
"grad_norm": 2.568549871444702, |
|
"learning_rate": 3.5447647951441585e-06, |
|
"loss": 0.2006, |
|
"step": 2301 |
|
}, |
|
{ |
|
"epoch": 0.6641384342374317, |
|
"grad_norm": 2.855433940887451, |
|
"learning_rate": 3.5356600910470413e-06, |
|
"loss": 0.2064, |
|
"step": 2304 |
|
}, |
|
{ |
|
"epoch": 0.6650031978236783, |
|
"grad_norm": 3.653754949569702, |
|
"learning_rate": 3.526555386949924e-06, |
|
"loss": 0.2349, |
|
"step": 2307 |
|
}, |
|
{ |
|
"epoch": 0.665867961409925, |
|
"grad_norm": 2.384378671646118, |
|
"learning_rate": 3.517450682852808e-06, |
|
"loss": 0.2334, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.6667327249961716, |
|
"grad_norm": 1.966973900794983, |
|
"learning_rate": 3.5083459787556907e-06, |
|
"loss": 0.2148, |
|
"step": 2313 |
|
}, |
|
{ |
|
"epoch": 0.6675974885824183, |
|
"grad_norm": 2.5276942253112793, |
|
"learning_rate": 3.499241274658574e-06, |
|
"loss": 0.2302, |
|
"step": 2316 |
|
}, |
|
{ |
|
"epoch": 0.668462252168665, |
|
"grad_norm": 3.5509471893310547, |
|
"learning_rate": 3.490136570561457e-06, |
|
"loss": 0.2672, |
|
"step": 2319 |
|
}, |
|
{ |
|
"epoch": 0.6693270157549116, |
|
"grad_norm": 6.874312877655029, |
|
"learning_rate": 3.48103186646434e-06, |
|
"loss": 0.2538, |
|
"step": 2322 |
|
}, |
|
{ |
|
"epoch": 0.6701917793411583, |
|
"grad_norm": 2.7544713020324707, |
|
"learning_rate": 3.4719271623672233e-06, |
|
"loss": 0.2146, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.6710565429274049, |
|
"grad_norm": 2.2370007038116455, |
|
"learning_rate": 3.4628224582701066e-06, |
|
"loss": 0.2373, |
|
"step": 2328 |
|
}, |
|
{ |
|
"epoch": 0.6719213065136516, |
|
"grad_norm": 2.3013980388641357, |
|
"learning_rate": 3.45371775417299e-06, |
|
"loss": 0.2437, |
|
"step": 2331 |
|
}, |
|
{ |
|
"epoch": 0.6727860700998982, |
|
"grad_norm": 2.2957603931427, |
|
"learning_rate": 3.4446130500758727e-06, |
|
"loss": 0.1974, |
|
"step": 2334 |
|
}, |
|
{ |
|
"epoch": 0.6736508336861449, |
|
"grad_norm": 2.216355562210083, |
|
"learning_rate": 3.435508345978756e-06, |
|
"loss": 0.2274, |
|
"step": 2337 |
|
}, |
|
{ |
|
"epoch": 0.6745155972723915, |
|
"grad_norm": 2.5096681118011475, |
|
"learning_rate": 3.4264036418816392e-06, |
|
"loss": 0.2423, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.6753803608586382, |
|
"grad_norm": 2.094209671020508, |
|
"learning_rate": 3.417298937784522e-06, |
|
"loss": 0.2296, |
|
"step": 2343 |
|
}, |
|
{ |
|
"epoch": 0.6762451244448848, |
|
"grad_norm": 2.772343635559082, |
|
"learning_rate": 3.4081942336874053e-06, |
|
"loss": 0.241, |
|
"step": 2346 |
|
}, |
|
{ |
|
"epoch": 0.6771098880311315, |
|
"grad_norm": 3.8428046703338623, |
|
"learning_rate": 3.3990895295902886e-06, |
|
"loss": 0.2055, |
|
"step": 2349 |
|
}, |
|
{ |
|
"epoch": 0.6779746516173781, |
|
"grad_norm": 1.7676074504852295, |
|
"learning_rate": 3.3899848254931714e-06, |
|
"loss": 0.1845, |
|
"step": 2352 |
|
}, |
|
{ |
|
"epoch": 0.6788394152036248, |
|
"grad_norm": 3.0652945041656494, |
|
"learning_rate": 3.380880121396055e-06, |
|
"loss": 0.2376, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.6797041787898714, |
|
"grad_norm": 3.206815242767334, |
|
"learning_rate": 3.371775417298938e-06, |
|
"loss": 0.2543, |
|
"step": 2358 |
|
}, |
|
{ |
|
"epoch": 0.6805689423761181, |
|
"grad_norm": 3.2649717330932617, |
|
"learning_rate": 3.3626707132018212e-06, |
|
"loss": 0.2255, |
|
"step": 2361 |
|
}, |
|
{ |
|
"epoch": 0.6814337059623647, |
|
"grad_norm": 2.2816710472106934, |
|
"learning_rate": 3.3535660091047045e-06, |
|
"loss": 0.2318, |
|
"step": 2364 |
|
}, |
|
{ |
|
"epoch": 0.6822984695486114, |
|
"grad_norm": 2.291609287261963, |
|
"learning_rate": 3.3444613050075873e-06, |
|
"loss": 0.235, |
|
"step": 2367 |
|
}, |
|
{ |
|
"epoch": 0.683163233134858, |
|
"grad_norm": 2.037982940673828, |
|
"learning_rate": 3.3353566009104706e-06, |
|
"loss": 0.2437, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.6840279967211047, |
|
"grad_norm": 2.614445209503174, |
|
"learning_rate": 3.326251896813354e-06, |
|
"loss": 0.2452, |
|
"step": 2373 |
|
}, |
|
{ |
|
"epoch": 0.6848927603073514, |
|
"grad_norm": 1.8799285888671875, |
|
"learning_rate": 3.3171471927162367e-06, |
|
"loss": 0.2567, |
|
"step": 2376 |
|
}, |
|
{ |
|
"epoch": 0.685757523893598, |
|
"grad_norm": 3.457829236984253, |
|
"learning_rate": 3.3080424886191204e-06, |
|
"loss": 0.2307, |
|
"step": 2379 |
|
}, |
|
{ |
|
"epoch": 0.6866222874798447, |
|
"grad_norm": 3.6573526859283447, |
|
"learning_rate": 3.2989377845220033e-06, |
|
"loss": 0.2321, |
|
"step": 2382 |
|
}, |
|
{ |
|
"epoch": 0.6874870510660913, |
|
"grad_norm": 2.0287060737609863, |
|
"learning_rate": 3.289833080424886e-06, |
|
"loss": 0.246, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.688351814652338, |
|
"grad_norm": 2.5227603912353516, |
|
"learning_rate": 3.2807283763277698e-06, |
|
"loss": 0.1936, |
|
"step": 2388 |
|
}, |
|
{ |
|
"epoch": 0.6892165782385846, |
|
"grad_norm": 1.6194448471069336, |
|
"learning_rate": 3.2716236722306526e-06, |
|
"loss": 0.2108, |
|
"step": 2391 |
|
}, |
|
{ |
|
"epoch": 0.6900813418248313, |
|
"grad_norm": 3.9881162643432617, |
|
"learning_rate": 3.2625189681335363e-06, |
|
"loss": 0.2384, |
|
"step": 2394 |
|
}, |
|
{ |
|
"epoch": 0.6909461054110779, |
|
"grad_norm": 2.262324094772339, |
|
"learning_rate": 3.253414264036419e-06, |
|
"loss": 0.2791, |
|
"step": 2397 |
|
}, |
|
{ |
|
"epoch": 0.6918108689973247, |
|
"grad_norm": 3.344790458679199, |
|
"learning_rate": 3.244309559939302e-06, |
|
"loss": 0.2464, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.6918108689973247, |
|
"eval_loss": 0.21846170723438263, |
|
"eval_mse": 0.21846168629289606, |
|
"eval_runtime": 64.3607, |
|
"eval_samples_per_second": 15.537, |
|
"eval_steps_per_second": 3.884, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.6926756325835713, |
|
"grad_norm": 3.4839906692504883, |
|
"learning_rate": 3.2352048558421857e-06, |
|
"loss": 0.2318, |
|
"step": 2403 |
|
}, |
|
{ |
|
"epoch": 0.693540396169818, |
|
"grad_norm": 1.9182045459747314, |
|
"learning_rate": 3.2261001517450685e-06, |
|
"loss": 0.2366, |
|
"step": 2406 |
|
}, |
|
{ |
|
"epoch": 0.6944051597560646, |
|
"grad_norm": 3.907069683074951, |
|
"learning_rate": 3.2169954476479514e-06, |
|
"loss": 0.2068, |
|
"step": 2409 |
|
}, |
|
{ |
|
"epoch": 0.6952699233423113, |
|
"grad_norm": 2.516395330429077, |
|
"learning_rate": 3.207890743550835e-06, |
|
"loss": 0.2467, |
|
"step": 2412 |
|
}, |
|
{ |
|
"epoch": 0.696134686928558, |
|
"grad_norm": 8.096402168273926, |
|
"learning_rate": 3.198786039453718e-06, |
|
"loss": 0.205, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.6969994505148046, |
|
"grad_norm": 2.871696949005127, |
|
"learning_rate": 3.1896813353566016e-06, |
|
"loss": 0.2259, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 0.6978642141010513, |
|
"grad_norm": 2.6868882179260254, |
|
"learning_rate": 3.1805766312594844e-06, |
|
"loss": 0.2389, |
|
"step": 2421 |
|
}, |
|
{ |
|
"epoch": 0.6987289776872979, |
|
"grad_norm": 2.687101364135742, |
|
"learning_rate": 3.1714719271623673e-06, |
|
"loss": 0.2148, |
|
"step": 2424 |
|
}, |
|
{ |
|
"epoch": 0.6995937412735446, |
|
"grad_norm": 10.40469741821289, |
|
"learning_rate": 3.162367223065251e-06, |
|
"loss": 0.2412, |
|
"step": 2427 |
|
}, |
|
{ |
|
"epoch": 0.7004585048597912, |
|
"grad_norm": 2.589078903198242, |
|
"learning_rate": 3.153262518968134e-06, |
|
"loss": 0.2411, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.7013232684460379, |
|
"grad_norm": 2.443969488143921, |
|
"learning_rate": 3.1441578148710167e-06, |
|
"loss": 0.2378, |
|
"step": 2433 |
|
}, |
|
{ |
|
"epoch": 0.7021880320322845, |
|
"grad_norm": 3.190467596054077, |
|
"learning_rate": 3.1350531107739003e-06, |
|
"loss": 0.2148, |
|
"step": 2436 |
|
}, |
|
{ |
|
"epoch": 0.7030527956185312, |
|
"grad_norm": 6.994554042816162, |
|
"learning_rate": 3.125948406676783e-06, |
|
"loss": 0.2349, |
|
"step": 2439 |
|
}, |
|
{ |
|
"epoch": 0.7039175592047778, |
|
"grad_norm": 4.769686698913574, |
|
"learning_rate": 3.1168437025796665e-06, |
|
"loss": 0.225, |
|
"step": 2442 |
|
}, |
|
{ |
|
"epoch": 0.7047823227910245, |
|
"grad_norm": 1.8445836305618286, |
|
"learning_rate": 3.1077389984825497e-06, |
|
"loss": 0.2498, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.7056470863772711, |
|
"grad_norm": 2.2382473945617676, |
|
"learning_rate": 3.0986342943854326e-06, |
|
"loss": 0.21, |
|
"step": 2448 |
|
}, |
|
{ |
|
"epoch": 0.7065118499635178, |
|
"grad_norm": 2.9342122077941895, |
|
"learning_rate": 3.089529590288316e-06, |
|
"loss": 0.2316, |
|
"step": 2451 |
|
}, |
|
{ |
|
"epoch": 0.7073766135497644, |
|
"grad_norm": 2.4339959621429443, |
|
"learning_rate": 3.080424886191199e-06, |
|
"loss": 0.2446, |
|
"step": 2454 |
|
}, |
|
{ |
|
"epoch": 0.7082413771360111, |
|
"grad_norm": 3.4156157970428467, |
|
"learning_rate": 3.0713201820940824e-06, |
|
"loss": 0.2465, |
|
"step": 2457 |
|
}, |
|
{ |
|
"epoch": 0.7091061407222577, |
|
"grad_norm": 2.0606541633605957, |
|
"learning_rate": 3.062215477996965e-06, |
|
"loss": 0.2538, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.7099709043085044, |
|
"grad_norm": 2.633622407913208, |
|
"learning_rate": 3.0531107738998485e-06, |
|
"loss": 0.2176, |
|
"step": 2463 |
|
}, |
|
{ |
|
"epoch": 0.710835667894751, |
|
"grad_norm": 5.920553207397461, |
|
"learning_rate": 3.0440060698027317e-06, |
|
"loss": 0.2238, |
|
"step": 2466 |
|
}, |
|
{ |
|
"epoch": 0.7117004314809977, |
|
"grad_norm": 1.6151419878005981, |
|
"learning_rate": 3.0349013657056146e-06, |
|
"loss": 0.1892, |
|
"step": 2469 |
|
}, |
|
{ |
|
"epoch": 0.7125651950672444, |
|
"grad_norm": 2.7643418312072754, |
|
"learning_rate": 3.025796661608498e-06, |
|
"loss": 0.2222, |
|
"step": 2472 |
|
}, |
|
{ |
|
"epoch": 0.713429958653491, |
|
"grad_norm": 2.580146551132202, |
|
"learning_rate": 3.016691957511381e-06, |
|
"loss": 0.261, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.7142947222397377, |
|
"grad_norm": 1.7853143215179443, |
|
"learning_rate": 3.0075872534142644e-06, |
|
"loss": 0.2295, |
|
"step": 2478 |
|
}, |
|
{ |
|
"epoch": 0.7151594858259843, |
|
"grad_norm": 1.8063633441925049, |
|
"learning_rate": 2.9984825493171476e-06, |
|
"loss": 0.2186, |
|
"step": 2481 |
|
}, |
|
{ |
|
"epoch": 0.716024249412231, |
|
"grad_norm": 5.130537986755371, |
|
"learning_rate": 2.9893778452200305e-06, |
|
"loss": 0.242, |
|
"step": 2484 |
|
}, |
|
{ |
|
"epoch": 0.7168890129984776, |
|
"grad_norm": 2.46856951713562, |
|
"learning_rate": 2.9802731411229137e-06, |
|
"loss": 0.2491, |
|
"step": 2487 |
|
}, |
|
{ |
|
"epoch": 0.7177537765847243, |
|
"grad_norm": 3.249279260635376, |
|
"learning_rate": 2.971168437025797e-06, |
|
"loss": 0.2291, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.7186185401709709, |
|
"grad_norm": 2.0126419067382812, |
|
"learning_rate": 2.96206373292868e-06, |
|
"loss": 0.2038, |
|
"step": 2493 |
|
}, |
|
{ |
|
"epoch": 0.7194833037572176, |
|
"grad_norm": 11.745406150817871, |
|
"learning_rate": 2.9529590288315635e-06, |
|
"loss": 0.2555, |
|
"step": 2496 |
|
}, |
|
{ |
|
"epoch": 0.7203480673434642, |
|
"grad_norm": 1.5893986225128174, |
|
"learning_rate": 2.9438543247344464e-06, |
|
"loss": 0.2338, |
|
"step": 2499 |
|
}, |
|
{ |
|
"epoch": 0.7206363218722132, |
|
"eval_loss": 0.21656151115894318, |
|
"eval_mse": 0.21656151949986815, |
|
"eval_runtime": 64.4174, |
|
"eval_samples_per_second": 15.524, |
|
"eval_steps_per_second": 3.881, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.7212128309297109, |
|
"grad_norm": 2.0566763877868652, |
|
"learning_rate": 2.9347496206373292e-06, |
|
"loss": 0.2395, |
|
"step": 2502 |
|
}, |
|
{ |
|
"epoch": 0.7220775945159575, |
|
"grad_norm": 2.7235090732574463, |
|
"learning_rate": 2.925644916540213e-06, |
|
"loss": 0.2057, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.7229423581022042, |
|
"grad_norm": 4.992210388183594, |
|
"learning_rate": 2.9165402124430958e-06, |
|
"loss": 0.2557, |
|
"step": 2508 |
|
}, |
|
{ |
|
"epoch": 0.7238071216884508, |
|
"grad_norm": 2.544344186782837, |
|
"learning_rate": 2.9074355083459786e-06, |
|
"loss": 0.2299, |
|
"step": 2511 |
|
}, |
|
{ |
|
"epoch": 0.7246718852746975, |
|
"grad_norm": 3.472134590148926, |
|
"learning_rate": 2.8983308042488623e-06, |
|
"loss": 0.2813, |
|
"step": 2514 |
|
}, |
|
{ |
|
"epoch": 0.7255366488609443, |
|
"grad_norm": 1.9130823612213135, |
|
"learning_rate": 2.889226100151745e-06, |
|
"loss": 0.2167, |
|
"step": 2517 |
|
}, |
|
{ |
|
"epoch": 0.7264014124471909, |
|
"grad_norm": 1.9955520629882812, |
|
"learning_rate": 2.880121396054629e-06, |
|
"loss": 0.2372, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.7272661760334376, |
|
"grad_norm": 2.574761152267456, |
|
"learning_rate": 2.8710166919575117e-06, |
|
"loss": 0.2256, |
|
"step": 2523 |
|
}, |
|
{ |
|
"epoch": 0.7281309396196842, |
|
"grad_norm": 1.6945056915283203, |
|
"learning_rate": 2.8619119878603945e-06, |
|
"loss": 0.2177, |
|
"step": 2526 |
|
}, |
|
{ |
|
"epoch": 0.7289957032059309, |
|
"grad_norm": 2.1304931640625, |
|
"learning_rate": 2.852807283763278e-06, |
|
"loss": 0.2438, |
|
"step": 2529 |
|
}, |
|
{ |
|
"epoch": 0.7298604667921775, |
|
"grad_norm": 1.626629114151001, |
|
"learning_rate": 2.843702579666161e-06, |
|
"loss": 0.1954, |
|
"step": 2532 |
|
}, |
|
{ |
|
"epoch": 0.7307252303784242, |
|
"grad_norm": 2.735403537750244, |
|
"learning_rate": 2.834597875569044e-06, |
|
"loss": 0.2274, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.7315899939646708, |
|
"grad_norm": 2.8743979930877686, |
|
"learning_rate": 2.8254931714719276e-06, |
|
"loss": 0.235, |
|
"step": 2538 |
|
}, |
|
{ |
|
"epoch": 0.7324547575509175, |
|
"grad_norm": 1.9181838035583496, |
|
"learning_rate": 2.8163884673748104e-06, |
|
"loss": 0.217, |
|
"step": 2541 |
|
}, |
|
{ |
|
"epoch": 0.7333195211371641, |
|
"grad_norm": 1.9460699558258057, |
|
"learning_rate": 2.807283763277694e-06, |
|
"loss": 0.1983, |
|
"step": 2544 |
|
}, |
|
{ |
|
"epoch": 0.7341842847234108, |
|
"grad_norm": 2.0447700023651123, |
|
"learning_rate": 2.798179059180577e-06, |
|
"loss": 0.2168, |
|
"step": 2547 |
|
}, |
|
{ |
|
"epoch": 0.7350490483096574, |
|
"grad_norm": 2.380274534225464, |
|
"learning_rate": 2.78907435508346e-06, |
|
"loss": 0.2275, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.7359138118959041, |
|
"grad_norm": 1.5381513833999634, |
|
"learning_rate": 2.7799696509863435e-06, |
|
"loss": 0.1808, |
|
"step": 2553 |
|
}, |
|
{ |
|
"epoch": 0.7367785754821508, |
|
"grad_norm": 1.8658198118209839, |
|
"learning_rate": 2.7708649468892263e-06, |
|
"loss": 0.2094, |
|
"step": 2556 |
|
}, |
|
{ |
|
"epoch": 0.7376433390683974, |
|
"grad_norm": 1.8906834125518799, |
|
"learning_rate": 2.7617602427921096e-06, |
|
"loss": 0.2315, |
|
"step": 2559 |
|
}, |
|
{ |
|
"epoch": 0.7385081026546441, |
|
"grad_norm": 2.179494619369507, |
|
"learning_rate": 2.752655538694993e-06, |
|
"loss": 0.2317, |
|
"step": 2562 |
|
}, |
|
{ |
|
"epoch": 0.7393728662408907, |
|
"grad_norm": 1.9197441339492798, |
|
"learning_rate": 2.7435508345978757e-06, |
|
"loss": 0.2023, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.7402376298271374, |
|
"grad_norm": 3.4614813327789307, |
|
"learning_rate": 2.734446130500759e-06, |
|
"loss": 0.2112, |
|
"step": 2568 |
|
}, |
|
{ |
|
"epoch": 0.741102393413384, |
|
"grad_norm": 4.101759433746338, |
|
"learning_rate": 2.7253414264036422e-06, |
|
"loss": 0.2087, |
|
"step": 2571 |
|
}, |
|
{ |
|
"epoch": 0.7419671569996307, |
|
"grad_norm": 2.005354166030884, |
|
"learning_rate": 2.716236722306525e-06, |
|
"loss": 0.2689, |
|
"step": 2574 |
|
}, |
|
{ |
|
"epoch": 0.7428319205858773, |
|
"grad_norm": 1.8958799839019775, |
|
"learning_rate": 2.7071320182094083e-06, |
|
"loss": 0.2639, |
|
"step": 2577 |
|
}, |
|
{ |
|
"epoch": 0.743696684172124, |
|
"grad_norm": 2.3008432388305664, |
|
"learning_rate": 2.6980273141122916e-06, |
|
"loss": 0.2412, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.7445614477583706, |
|
"grad_norm": 2.5842695236206055, |
|
"learning_rate": 2.688922610015175e-06, |
|
"loss": 0.2339, |
|
"step": 2583 |
|
}, |
|
{ |
|
"epoch": 0.7454262113446173, |
|
"grad_norm": 2.5189950466156006, |
|
"learning_rate": 2.6798179059180577e-06, |
|
"loss": 0.2413, |
|
"step": 2586 |
|
}, |
|
{ |
|
"epoch": 0.7462909749308639, |
|
"grad_norm": 1.8702943325042725, |
|
"learning_rate": 2.670713201820941e-06, |
|
"loss": 0.2242, |
|
"step": 2589 |
|
}, |
|
{ |
|
"epoch": 0.7471557385171106, |
|
"grad_norm": 1.5371664762496948, |
|
"learning_rate": 2.6616084977238242e-06, |
|
"loss": 0.2089, |
|
"step": 2592 |
|
}, |
|
{ |
|
"epoch": 0.7480205021033572, |
|
"grad_norm": 2.6837081909179688, |
|
"learning_rate": 2.652503793626707e-06, |
|
"loss": 0.214, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.7488852656896039, |
|
"grad_norm": 2.3358068466186523, |
|
"learning_rate": 2.6433990895295904e-06, |
|
"loss": 0.243, |
|
"step": 2598 |
|
}, |
|
{ |
|
"epoch": 0.7494617747471017, |
|
"eval_loss": 0.2165103405714035, |
|
"eval_mse": 0.2165103500261903, |
|
"eval_runtime": 64.7115, |
|
"eval_samples_per_second": 15.453, |
|
"eval_steps_per_second": 3.863, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.7497500292758505, |
|
"grad_norm": 4.396569728851318, |
|
"learning_rate": 2.6342943854324736e-06, |
|
"loss": 0.2704, |
|
"step": 2601 |
|
}, |
|
{ |
|
"epoch": 0.7506147928620972, |
|
"grad_norm": 3.1913275718688965, |
|
"learning_rate": 2.625189681335357e-06, |
|
"loss": 0.2682, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 0.7514795564483439, |
|
"grad_norm": 2.679997444152832, |
|
"learning_rate": 2.61608497723824e-06, |
|
"loss": 0.1992, |
|
"step": 2607 |
|
}, |
|
{ |
|
"epoch": 0.7523443200345905, |
|
"grad_norm": 3.6164557933807373, |
|
"learning_rate": 2.606980273141123e-06, |
|
"loss": 0.2249, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.7532090836208372, |
|
"grad_norm": 2.7497098445892334, |
|
"learning_rate": 2.5978755690440063e-06, |
|
"loss": 0.245, |
|
"step": 2613 |
|
}, |
|
{ |
|
"epoch": 0.7540738472070838, |
|
"grad_norm": 2.927504777908325, |
|
"learning_rate": 2.5887708649468895e-06, |
|
"loss": 0.2114, |
|
"step": 2616 |
|
}, |
|
{ |
|
"epoch": 0.7549386107933305, |
|
"grad_norm": 1.5433193445205688, |
|
"learning_rate": 2.5796661608497724e-06, |
|
"loss": 0.2014, |
|
"step": 2619 |
|
}, |
|
{ |
|
"epoch": 0.7558033743795771, |
|
"grad_norm": 2.3192660808563232, |
|
"learning_rate": 2.570561456752656e-06, |
|
"loss": 0.2901, |
|
"step": 2622 |
|
}, |
|
{ |
|
"epoch": 0.7566681379658238, |
|
"grad_norm": 1.924660086631775, |
|
"learning_rate": 2.561456752655539e-06, |
|
"loss": 0.2085, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.7575329015520704, |
|
"grad_norm": 3.05586576461792, |
|
"learning_rate": 2.5523520485584217e-06, |
|
"loss": 0.2056, |
|
"step": 2628 |
|
}, |
|
{ |
|
"epoch": 0.7583976651383171, |
|
"grad_norm": 3.1275217533111572, |
|
"learning_rate": 2.5432473444613054e-06, |
|
"loss": 0.2282, |
|
"step": 2631 |
|
}, |
|
{ |
|
"epoch": 0.7592624287245638, |
|
"grad_norm": 2.8783745765686035, |
|
"learning_rate": 2.5341426403641883e-06, |
|
"loss": 0.2216, |
|
"step": 2634 |
|
}, |
|
{ |
|
"epoch": 0.7601271923108105, |
|
"grad_norm": 2.206467390060425, |
|
"learning_rate": 2.525037936267071e-06, |
|
"loss": 0.2383, |
|
"step": 2637 |
|
}, |
|
{ |
|
"epoch": 0.7609919558970571, |
|
"grad_norm": 1.7947461605072021, |
|
"learning_rate": 2.515933232169955e-06, |
|
"loss": 0.2242, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.7618567194833038, |
|
"grad_norm": 2.2603344917297363, |
|
"learning_rate": 2.5068285280728376e-06, |
|
"loss": 0.2196, |
|
"step": 2643 |
|
}, |
|
{ |
|
"epoch": 0.7627214830695505, |
|
"grad_norm": 4.005883693695068, |
|
"learning_rate": 2.497723823975721e-06, |
|
"loss": 0.2683, |
|
"step": 2646 |
|
}, |
|
{ |
|
"epoch": 0.7635862466557971, |
|
"grad_norm": 1.8957757949829102, |
|
"learning_rate": 2.488619119878604e-06, |
|
"loss": 0.2437, |
|
"step": 2649 |
|
}, |
|
{ |
|
"epoch": 0.7644510102420438, |
|
"grad_norm": 3.7723991870880127, |
|
"learning_rate": 2.4795144157814874e-06, |
|
"loss": 0.2453, |
|
"step": 2652 |
|
}, |
|
{ |
|
"epoch": 0.7653157738282904, |
|
"grad_norm": 2.494797945022583, |
|
"learning_rate": 2.4704097116843703e-06, |
|
"loss": 0.2285, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.7661805374145371, |
|
"grad_norm": 2.8127262592315674, |
|
"learning_rate": 2.4613050075872536e-06, |
|
"loss": 0.1973, |
|
"step": 2658 |
|
}, |
|
{ |
|
"epoch": 0.7670453010007837, |
|
"grad_norm": 1.8228410482406616, |
|
"learning_rate": 2.452200303490137e-06, |
|
"loss": 0.2155, |
|
"step": 2661 |
|
}, |
|
{ |
|
"epoch": 0.7679100645870304, |
|
"grad_norm": 5.944820404052734, |
|
"learning_rate": 2.44309559939302e-06, |
|
"loss": 0.2137, |
|
"step": 2664 |
|
}, |
|
{ |
|
"epoch": 0.768774828173277, |
|
"grad_norm": 1.749942421913147, |
|
"learning_rate": 2.4339908952959034e-06, |
|
"loss": 0.2134, |
|
"step": 2667 |
|
}, |
|
{ |
|
"epoch": 0.7696395917595237, |
|
"grad_norm": 2.2554376125335693, |
|
"learning_rate": 2.424886191198786e-06, |
|
"loss": 0.2397, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.7705043553457703, |
|
"grad_norm": 2.416917085647583, |
|
"learning_rate": 2.4157814871016695e-06, |
|
"loss": 0.2201, |
|
"step": 2673 |
|
}, |
|
{ |
|
"epoch": 0.771369118932017, |
|
"grad_norm": 2.578611373901367, |
|
"learning_rate": 2.4066767830045527e-06, |
|
"loss": 0.2197, |
|
"step": 2676 |
|
}, |
|
{ |
|
"epoch": 0.7722338825182636, |
|
"grad_norm": 2.857250690460205, |
|
"learning_rate": 2.397572078907436e-06, |
|
"loss": 0.2395, |
|
"step": 2679 |
|
}, |
|
{ |
|
"epoch": 0.7730986461045103, |
|
"grad_norm": 2.608745574951172, |
|
"learning_rate": 2.388467374810319e-06, |
|
"loss": 0.2421, |
|
"step": 2682 |
|
}, |
|
{ |
|
"epoch": 0.7739634096907569, |
|
"grad_norm": 2.9792795181274414, |
|
"learning_rate": 2.379362670713202e-06, |
|
"loss": 0.2319, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.7748281732770036, |
|
"grad_norm": 3.69161319732666, |
|
"learning_rate": 2.3702579666160854e-06, |
|
"loss": 0.2322, |
|
"step": 2688 |
|
}, |
|
{ |
|
"epoch": 0.7756929368632502, |
|
"grad_norm": 2.1806881427764893, |
|
"learning_rate": 2.3611532625189686e-06, |
|
"loss": 0.2392, |
|
"step": 2691 |
|
}, |
|
{ |
|
"epoch": 0.7765577004494969, |
|
"grad_norm": 4.177742004394531, |
|
"learning_rate": 2.3520485584218515e-06, |
|
"loss": 0.2221, |
|
"step": 2694 |
|
}, |
|
{ |
|
"epoch": 0.7774224640357436, |
|
"grad_norm": 3.6789214611053467, |
|
"learning_rate": 2.3429438543247347e-06, |
|
"loss": 0.2021, |
|
"step": 2697 |
|
}, |
|
{ |
|
"epoch": 0.7782872276219902, |
|
"grad_norm": 2.2298083305358887, |
|
"learning_rate": 2.333839150227618e-06, |
|
"loss": 0.1891, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.7782872276219902, |
|
"eval_loss": 0.22008302807807922, |
|
"eval_mse": 0.22008301681582815, |
|
"eval_runtime": 64.7274, |
|
"eval_samples_per_second": 15.449, |
|
"eval_steps_per_second": 3.862, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.7791519912082369, |
|
"grad_norm": 1.7022091150283813, |
|
"learning_rate": 2.324734446130501e-06, |
|
"loss": 0.2267, |
|
"step": 2703 |
|
}, |
|
{ |
|
"epoch": 0.7800167547944835, |
|
"grad_norm": 2.402782440185547, |
|
"learning_rate": 2.315629742033384e-06, |
|
"loss": 0.2112, |
|
"step": 2706 |
|
}, |
|
{ |
|
"epoch": 0.7808815183807302, |
|
"grad_norm": 12.773355484008789, |
|
"learning_rate": 2.3065250379362674e-06, |
|
"loss": 0.2078, |
|
"step": 2709 |
|
}, |
|
{ |
|
"epoch": 0.7817462819669768, |
|
"grad_norm": 2.3418192863464355, |
|
"learning_rate": 2.2974203338391502e-06, |
|
"loss": 0.2358, |
|
"step": 2712 |
|
}, |
|
{ |
|
"epoch": 0.7826110455532235, |
|
"grad_norm": 3.0130553245544434, |
|
"learning_rate": 2.2883156297420335e-06, |
|
"loss": 0.2576, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.7834758091394701, |
|
"grad_norm": 4.195178031921387, |
|
"learning_rate": 2.2792109256449168e-06, |
|
"loss": 0.2335, |
|
"step": 2718 |
|
}, |
|
{ |
|
"epoch": 0.7843405727257168, |
|
"grad_norm": 1.9151793718338013, |
|
"learning_rate": 2.2701062215477996e-06, |
|
"loss": 0.2328, |
|
"step": 2721 |
|
}, |
|
{ |
|
"epoch": 0.7852053363119634, |
|
"grad_norm": 2.500737190246582, |
|
"learning_rate": 2.261001517450683e-06, |
|
"loss": 0.2166, |
|
"step": 2724 |
|
}, |
|
{ |
|
"epoch": 0.7860700998982101, |
|
"grad_norm": 2.213944911956787, |
|
"learning_rate": 2.251896813353566e-06, |
|
"loss": 0.2296, |
|
"step": 2727 |
|
}, |
|
{ |
|
"epoch": 0.7869348634844567, |
|
"grad_norm": 4.156373023986816, |
|
"learning_rate": 2.2427921092564494e-06, |
|
"loss": 0.213, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.7877996270707034, |
|
"grad_norm": 1.7524887323379517, |
|
"learning_rate": 2.2336874051593322e-06, |
|
"loss": 0.1983, |
|
"step": 2733 |
|
}, |
|
{ |
|
"epoch": 0.78866439065695, |
|
"grad_norm": 2.1907401084899902, |
|
"learning_rate": 2.2245827010622155e-06, |
|
"loss": 0.2227, |
|
"step": 2736 |
|
}, |
|
{ |
|
"epoch": 0.7895291542431967, |
|
"grad_norm": 6.940857410430908, |
|
"learning_rate": 2.2154779969650988e-06, |
|
"loss": 0.2385, |
|
"step": 2739 |
|
}, |
|
{ |
|
"epoch": 0.7903939178294433, |
|
"grad_norm": 8.225829124450684, |
|
"learning_rate": 2.206373292867982e-06, |
|
"loss": 0.2084, |
|
"step": 2742 |
|
}, |
|
{ |
|
"epoch": 0.79125868141569, |
|
"grad_norm": 1.9167605638504028, |
|
"learning_rate": 2.197268588770865e-06, |
|
"loss": 0.2065, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.7921234450019367, |
|
"grad_norm": 2.273833751678467, |
|
"learning_rate": 2.188163884673748e-06, |
|
"loss": 0.2379, |
|
"step": 2748 |
|
}, |
|
{ |
|
"epoch": 0.7929882085881834, |
|
"grad_norm": 2.6898539066314697, |
|
"learning_rate": 2.1790591805766314e-06, |
|
"loss": 0.2173, |
|
"step": 2751 |
|
}, |
|
{ |
|
"epoch": 0.7938529721744301, |
|
"grad_norm": 3.421860694885254, |
|
"learning_rate": 2.1699544764795147e-06, |
|
"loss": 0.2463, |
|
"step": 2754 |
|
}, |
|
{ |
|
"epoch": 0.7947177357606767, |
|
"grad_norm": 5.2192487716674805, |
|
"learning_rate": 2.1608497723823975e-06, |
|
"loss": 0.238, |
|
"step": 2757 |
|
}, |
|
{ |
|
"epoch": 0.7955824993469234, |
|
"grad_norm": 2.7427492141723633, |
|
"learning_rate": 2.1517450682852808e-06, |
|
"loss": 0.221, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.79644726293317, |
|
"grad_norm": 2.2411558628082275, |
|
"learning_rate": 2.142640364188164e-06, |
|
"loss": 0.2266, |
|
"step": 2763 |
|
}, |
|
{ |
|
"epoch": 0.7973120265194167, |
|
"grad_norm": 3.7260963916778564, |
|
"learning_rate": 2.1335356600910473e-06, |
|
"loss": 0.2538, |
|
"step": 2766 |
|
}, |
|
{ |
|
"epoch": 0.7981767901056633, |
|
"grad_norm": 2.7217321395874023, |
|
"learning_rate": 2.12443095599393e-06, |
|
"loss": 0.2246, |
|
"step": 2769 |
|
}, |
|
{ |
|
"epoch": 0.79904155369191, |
|
"grad_norm": 2.651681900024414, |
|
"learning_rate": 2.1153262518968134e-06, |
|
"loss": 0.2334, |
|
"step": 2772 |
|
}, |
|
{ |
|
"epoch": 0.7999063172781566, |
|
"grad_norm": 1.9291447401046753, |
|
"learning_rate": 2.1062215477996967e-06, |
|
"loss": 0.2122, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.8007710808644033, |
|
"grad_norm": 2.2406091690063477, |
|
"learning_rate": 2.09711684370258e-06, |
|
"loss": 0.219, |
|
"step": 2778 |
|
}, |
|
{ |
|
"epoch": 0.80163584445065, |
|
"grad_norm": 2.099543809890747, |
|
"learning_rate": 2.0880121396054632e-06, |
|
"loss": 0.2376, |
|
"step": 2781 |
|
}, |
|
{ |
|
"epoch": 0.8025006080368966, |
|
"grad_norm": 2.7685112953186035, |
|
"learning_rate": 2.078907435508346e-06, |
|
"loss": 0.2352, |
|
"step": 2784 |
|
}, |
|
{ |
|
"epoch": 0.8033653716231433, |
|
"grad_norm": 2.3922276496887207, |
|
"learning_rate": 2.0698027314112293e-06, |
|
"loss": 0.2482, |
|
"step": 2787 |
|
}, |
|
{ |
|
"epoch": 0.8042301352093899, |
|
"grad_norm": 2.0313024520874023, |
|
"learning_rate": 2.0606980273141126e-06, |
|
"loss": 0.2328, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.8050948987956366, |
|
"grad_norm": 2.234933853149414, |
|
"learning_rate": 2.051593323216996e-06, |
|
"loss": 0.2315, |
|
"step": 2793 |
|
}, |
|
{ |
|
"epoch": 0.8059596623818832, |
|
"grad_norm": 2.251796245574951, |
|
"learning_rate": 2.0424886191198787e-06, |
|
"loss": 0.2669, |
|
"step": 2796 |
|
}, |
|
{ |
|
"epoch": 0.8068244259681299, |
|
"grad_norm": 2.3901267051696777, |
|
"learning_rate": 2.033383915022762e-06, |
|
"loss": 0.2355, |
|
"step": 2799 |
|
}, |
|
{ |
|
"epoch": 0.8071126804968788, |
|
"eval_loss": 0.2166597843170166, |
|
"eval_mse": 0.2166597851237748, |
|
"eval_runtime": 64.6977, |
|
"eval_samples_per_second": 15.457, |
|
"eval_steps_per_second": 3.864, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.8076891895543765, |
|
"grad_norm": 2.007464647293091, |
|
"learning_rate": 2.0242792109256452e-06, |
|
"loss": 0.2071, |
|
"step": 2802 |
|
}, |
|
{ |
|
"epoch": 0.8085539531406232, |
|
"grad_norm": 3.2795073986053467, |
|
"learning_rate": 2.0151745068285285e-06, |
|
"loss": 0.2191, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.8094187167268698, |
|
"grad_norm": 4.439976692199707, |
|
"learning_rate": 2.0060698027314113e-06, |
|
"loss": 0.2236, |
|
"step": 2808 |
|
}, |
|
{ |
|
"epoch": 0.8102834803131165, |
|
"grad_norm": 2.0333492755889893, |
|
"learning_rate": 1.9969650986342946e-06, |
|
"loss": 0.2395, |
|
"step": 2811 |
|
}, |
|
{ |
|
"epoch": 0.8111482438993631, |
|
"grad_norm": 3.058572292327881, |
|
"learning_rate": 1.987860394537178e-06, |
|
"loss": 0.2226, |
|
"step": 2814 |
|
}, |
|
{ |
|
"epoch": 0.8120130074856098, |
|
"grad_norm": 1.869295358657837, |
|
"learning_rate": 1.978755690440061e-06, |
|
"loss": 0.1924, |
|
"step": 2817 |
|
}, |
|
{ |
|
"epoch": 0.8128777710718564, |
|
"grad_norm": 2.6618704795837402, |
|
"learning_rate": 1.969650986342944e-06, |
|
"loss": 0.2401, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.8137425346581031, |
|
"grad_norm": 2.647307872772217, |
|
"learning_rate": 1.9605462822458273e-06, |
|
"loss": 0.2416, |
|
"step": 2823 |
|
}, |
|
{ |
|
"epoch": 0.8146072982443497, |
|
"grad_norm": 2.7761855125427246, |
|
"learning_rate": 1.9514415781487105e-06, |
|
"loss": 0.2248, |
|
"step": 2826 |
|
}, |
|
{ |
|
"epoch": 0.8154720618305964, |
|
"grad_norm": 2.126979112625122, |
|
"learning_rate": 1.9423368740515934e-06, |
|
"loss": 0.2115, |
|
"step": 2829 |
|
}, |
|
{ |
|
"epoch": 0.816336825416843, |
|
"grad_norm": 1.8128594160079956, |
|
"learning_rate": 1.9332321699544766e-06, |
|
"loss": 0.2208, |
|
"step": 2832 |
|
}, |
|
{ |
|
"epoch": 0.8172015890030897, |
|
"grad_norm": 2.527730703353882, |
|
"learning_rate": 1.92412746585736e-06, |
|
"loss": 0.2531, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.8180663525893364, |
|
"grad_norm": 2.3727030754089355, |
|
"learning_rate": 1.9150227617602427e-06, |
|
"loss": 0.205, |
|
"step": 2838 |
|
}, |
|
{ |
|
"epoch": 0.818931116175583, |
|
"grad_norm": 1.696937918663025, |
|
"learning_rate": 1.9059180576631262e-06, |
|
"loss": 0.2091, |
|
"step": 2841 |
|
}, |
|
{ |
|
"epoch": 0.8197958797618297, |
|
"grad_norm": 1.8432416915893555, |
|
"learning_rate": 1.8968133535660093e-06, |
|
"loss": 0.2378, |
|
"step": 2844 |
|
}, |
|
{ |
|
"epoch": 0.8206606433480763, |
|
"grad_norm": 3.7460954189300537, |
|
"learning_rate": 1.8877086494688923e-06, |
|
"loss": 0.2068, |
|
"step": 2847 |
|
}, |
|
{ |
|
"epoch": 0.821525406934323, |
|
"grad_norm": 3.467381000518799, |
|
"learning_rate": 1.8786039453717756e-06, |
|
"loss": 0.2033, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.8223901705205696, |
|
"grad_norm": 1.7389558553695679, |
|
"learning_rate": 1.8694992412746589e-06, |
|
"loss": 0.2268, |
|
"step": 2853 |
|
}, |
|
{ |
|
"epoch": 0.8232549341068163, |
|
"grad_norm": 2.4290714263916016, |
|
"learning_rate": 1.860394537177542e-06, |
|
"loss": 0.2535, |
|
"step": 2856 |
|
}, |
|
{ |
|
"epoch": 0.8241196976930629, |
|
"grad_norm": 1.9990437030792236, |
|
"learning_rate": 1.851289833080425e-06, |
|
"loss": 0.2351, |
|
"step": 2859 |
|
}, |
|
{ |
|
"epoch": 0.8249844612793096, |
|
"grad_norm": 2.0908126831054688, |
|
"learning_rate": 1.8421851289833082e-06, |
|
"loss": 0.2604, |
|
"step": 2862 |
|
}, |
|
{ |
|
"epoch": 0.8258492248655563, |
|
"grad_norm": 1.664847731590271, |
|
"learning_rate": 1.8330804248861913e-06, |
|
"loss": 0.2202, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.826713988451803, |
|
"grad_norm": 2.0932564735412598, |
|
"learning_rate": 1.8239757207890745e-06, |
|
"loss": 0.2398, |
|
"step": 2868 |
|
}, |
|
{ |
|
"epoch": 0.8275787520380496, |
|
"grad_norm": 1.6450400352478027, |
|
"learning_rate": 1.8148710166919576e-06, |
|
"loss": 0.2132, |
|
"step": 2871 |
|
}, |
|
{ |
|
"epoch": 0.8284435156242963, |
|
"grad_norm": 3.0486483573913574, |
|
"learning_rate": 1.8057663125948407e-06, |
|
"loss": 0.2488, |
|
"step": 2874 |
|
}, |
|
{ |
|
"epoch": 0.829308279210543, |
|
"grad_norm": 4.125293254852295, |
|
"learning_rate": 1.796661608497724e-06, |
|
"loss": 0.229, |
|
"step": 2877 |
|
}, |
|
{ |
|
"epoch": 0.8301730427967896, |
|
"grad_norm": 2.4615678787231445, |
|
"learning_rate": 1.7875569044006072e-06, |
|
"loss": 0.2302, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.8310378063830363, |
|
"grad_norm": 2.3922412395477295, |
|
"learning_rate": 1.7784522003034905e-06, |
|
"loss": 0.2023, |
|
"step": 2883 |
|
}, |
|
{ |
|
"epoch": 0.8319025699692829, |
|
"grad_norm": 2.2707550525665283, |
|
"learning_rate": 1.7693474962063733e-06, |
|
"loss": 0.2125, |
|
"step": 2886 |
|
}, |
|
{ |
|
"epoch": 0.8327673335555296, |
|
"grad_norm": 2.5821027755737305, |
|
"learning_rate": 1.7602427921092566e-06, |
|
"loss": 0.2022, |
|
"step": 2889 |
|
}, |
|
{ |
|
"epoch": 0.8336320971417762, |
|
"grad_norm": 1.5880707502365112, |
|
"learning_rate": 1.7511380880121398e-06, |
|
"loss": 0.2494, |
|
"step": 2892 |
|
}, |
|
{ |
|
"epoch": 0.8344968607280229, |
|
"grad_norm": 2.1693811416625977, |
|
"learning_rate": 1.742033383915023e-06, |
|
"loss": 0.2364, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.8353616243142695, |
|
"grad_norm": 2.134542465209961, |
|
"learning_rate": 1.732928679817906e-06, |
|
"loss": 0.2231, |
|
"step": 2898 |
|
}, |
|
{ |
|
"epoch": 0.8359381333717673, |
|
"eval_loss": 0.2168218344449997, |
|
"eval_mse": 0.21682184532732934, |
|
"eval_runtime": 64.4646, |
|
"eval_samples_per_second": 15.512, |
|
"eval_steps_per_second": 3.878, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.8362263879005162, |
|
"grad_norm": 2.96658992767334, |
|
"learning_rate": 1.7238239757207892e-06, |
|
"loss": 0.2326, |
|
"step": 2901 |
|
}, |
|
{ |
|
"epoch": 0.8370911514867628, |
|
"grad_norm": 2.925720453262329, |
|
"learning_rate": 1.7147192716236725e-06, |
|
"loss": 0.2265, |
|
"step": 2904 |
|
}, |
|
{ |
|
"epoch": 0.8379559150730095, |
|
"grad_norm": 2.380636692047119, |
|
"learning_rate": 1.7056145675265557e-06, |
|
"loss": 0.2272, |
|
"step": 2907 |
|
}, |
|
{ |
|
"epoch": 0.8388206786592561, |
|
"grad_norm": 1.5978946685791016, |
|
"learning_rate": 1.6965098634294386e-06, |
|
"loss": 0.2238, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.8396854422455028, |
|
"grad_norm": 2.8584859371185303, |
|
"learning_rate": 1.6874051593323218e-06, |
|
"loss": 0.1995, |
|
"step": 2913 |
|
}, |
|
{ |
|
"epoch": 0.8405502058317494, |
|
"grad_norm": 2.5383777618408203, |
|
"learning_rate": 1.6783004552352051e-06, |
|
"loss": 0.247, |
|
"step": 2916 |
|
}, |
|
{ |
|
"epoch": 0.8414149694179961, |
|
"grad_norm": 3.073789119720459, |
|
"learning_rate": 1.6691957511380882e-06, |
|
"loss": 0.2323, |
|
"step": 2919 |
|
}, |
|
{ |
|
"epoch": 0.8422797330042427, |
|
"grad_norm": 3.0712456703186035, |
|
"learning_rate": 1.6600910470409712e-06, |
|
"loss": 0.2123, |
|
"step": 2922 |
|
}, |
|
{ |
|
"epoch": 0.8431444965904894, |
|
"grad_norm": 2.1077163219451904, |
|
"learning_rate": 1.6509863429438545e-06, |
|
"loss": 0.1908, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.844009260176736, |
|
"grad_norm": 3.4034552574157715, |
|
"learning_rate": 1.6418816388467375e-06, |
|
"loss": 0.2366, |
|
"step": 2928 |
|
}, |
|
{ |
|
"epoch": 0.8448740237629827, |
|
"grad_norm": 2.9055449962615967, |
|
"learning_rate": 1.6327769347496208e-06, |
|
"loss": 0.2156, |
|
"step": 2931 |
|
}, |
|
{ |
|
"epoch": 0.8457387873492294, |
|
"grad_norm": 2.108920097351074, |
|
"learning_rate": 1.6236722306525039e-06, |
|
"loss": 0.2424, |
|
"step": 2934 |
|
}, |
|
{ |
|
"epoch": 0.846603550935476, |
|
"grad_norm": 2.038133144378662, |
|
"learning_rate": 1.614567526555387e-06, |
|
"loss": 0.2302, |
|
"step": 2937 |
|
}, |
|
{ |
|
"epoch": 0.8474683145217227, |
|
"grad_norm": 4.533400058746338, |
|
"learning_rate": 1.6054628224582702e-06, |
|
"loss": 0.1981, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.8483330781079693, |
|
"grad_norm": 3.4339189529418945, |
|
"learning_rate": 1.5963581183611534e-06, |
|
"loss": 0.2072, |
|
"step": 2943 |
|
}, |
|
{ |
|
"epoch": 0.849197841694216, |
|
"grad_norm": 2.856205463409424, |
|
"learning_rate": 1.5872534142640367e-06, |
|
"loss": 0.2157, |
|
"step": 2946 |
|
}, |
|
{ |
|
"epoch": 0.8500626052804626, |
|
"grad_norm": 3.1973226070404053, |
|
"learning_rate": 1.5781487101669196e-06, |
|
"loss": 0.2361, |
|
"step": 2949 |
|
}, |
|
{ |
|
"epoch": 0.8509273688667093, |
|
"grad_norm": 2.0900986194610596, |
|
"learning_rate": 1.5690440060698028e-06, |
|
"loss": 0.1873, |
|
"step": 2952 |
|
}, |
|
{ |
|
"epoch": 0.8517921324529559, |
|
"grad_norm": 2.7864749431610107, |
|
"learning_rate": 1.559939301972686e-06, |
|
"loss": 0.2268, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.8526568960392026, |
|
"grad_norm": 1.8518025875091553, |
|
"learning_rate": 1.5508345978755694e-06, |
|
"loss": 0.2399, |
|
"step": 2958 |
|
}, |
|
{ |
|
"epoch": 0.8535216596254492, |
|
"grad_norm": 1.9978440999984741, |
|
"learning_rate": 1.5417298937784522e-06, |
|
"loss": 0.2445, |
|
"step": 2961 |
|
}, |
|
{ |
|
"epoch": 0.8543864232116959, |
|
"grad_norm": 3.5526225566864014, |
|
"learning_rate": 1.5326251896813355e-06, |
|
"loss": 0.2132, |
|
"step": 2964 |
|
}, |
|
{ |
|
"epoch": 0.8552511867979425, |
|
"grad_norm": 2.19614315032959, |
|
"learning_rate": 1.5235204855842187e-06, |
|
"loss": 0.2329, |
|
"step": 2967 |
|
}, |
|
{ |
|
"epoch": 0.8561159503841892, |
|
"grad_norm": 2.024761438369751, |
|
"learning_rate": 1.514415781487102e-06, |
|
"loss": 0.2351, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.8569807139704358, |
|
"grad_norm": 2.374737024307251, |
|
"learning_rate": 1.5053110773899848e-06, |
|
"loss": 0.2085, |
|
"step": 2973 |
|
}, |
|
{ |
|
"epoch": 0.8578454775566825, |
|
"grad_norm": 2.209737539291382, |
|
"learning_rate": 1.496206373292868e-06, |
|
"loss": 0.1754, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 0.8587102411429292, |
|
"grad_norm": 3.4437668323516846, |
|
"learning_rate": 1.4871016691957514e-06, |
|
"loss": 0.2529, |
|
"step": 2979 |
|
}, |
|
{ |
|
"epoch": 0.8595750047291759, |
|
"grad_norm": 1.9130094051361084, |
|
"learning_rate": 1.4779969650986344e-06, |
|
"loss": 0.2431, |
|
"step": 2982 |
|
}, |
|
{ |
|
"epoch": 0.8604397683154226, |
|
"grad_norm": 1.7369916439056396, |
|
"learning_rate": 1.4688922610015175e-06, |
|
"loss": 0.2131, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.8613045319016692, |
|
"grad_norm": 3.240520477294922, |
|
"learning_rate": 1.4597875569044007e-06, |
|
"loss": 0.2401, |
|
"step": 2988 |
|
}, |
|
{ |
|
"epoch": 0.8621692954879159, |
|
"grad_norm": 2.4426064491271973, |
|
"learning_rate": 1.4506828528072838e-06, |
|
"loss": 0.2089, |
|
"step": 2991 |
|
}, |
|
{ |
|
"epoch": 0.8630340590741625, |
|
"grad_norm": 1.9682718515396118, |
|
"learning_rate": 1.441578148710167e-06, |
|
"loss": 0.1858, |
|
"step": 2994 |
|
}, |
|
{ |
|
"epoch": 0.8638988226604092, |
|
"grad_norm": 1.8037463426589966, |
|
"learning_rate": 1.4324734446130503e-06, |
|
"loss": 0.2126, |
|
"step": 2997 |
|
}, |
|
{ |
|
"epoch": 0.8647635862466558, |
|
"grad_norm": 3.8557345867156982, |
|
"learning_rate": 1.4233687405159332e-06, |
|
"loss": 0.2274, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.8647635862466558, |
|
"eval_loss": 0.22426578402519226, |
|
"eval_mse": 0.22426578066963704, |
|
"eval_runtime": 64.2143, |
|
"eval_samples_per_second": 15.573, |
|
"eval_steps_per_second": 3.893, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.8656283498329025, |
|
"grad_norm": 3.097010850906372, |
|
"learning_rate": 1.4142640364188164e-06, |
|
"loss": 0.229, |
|
"step": 3003 |
|
}, |
|
{ |
|
"epoch": 0.8664931134191491, |
|
"grad_norm": 3.1431732177734375, |
|
"learning_rate": 1.4051593323216997e-06, |
|
"loss": 0.2437, |
|
"step": 3006 |
|
}, |
|
{ |
|
"epoch": 0.8673578770053958, |
|
"grad_norm": 1.8530339002609253, |
|
"learning_rate": 1.396054628224583e-06, |
|
"loss": 0.2201, |
|
"step": 3009 |
|
}, |
|
{ |
|
"epoch": 0.8682226405916424, |
|
"grad_norm": 2.807565927505493, |
|
"learning_rate": 1.3869499241274658e-06, |
|
"loss": 0.2237, |
|
"step": 3012 |
|
}, |
|
{ |
|
"epoch": 0.8690874041778891, |
|
"grad_norm": 3.1385457515716553, |
|
"learning_rate": 1.377845220030349e-06, |
|
"loss": 0.2645, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.8699521677641358, |
|
"grad_norm": 4.684901714324951, |
|
"learning_rate": 1.3687405159332323e-06, |
|
"loss": 0.2562, |
|
"step": 3018 |
|
}, |
|
{ |
|
"epoch": 0.8708169313503824, |
|
"grad_norm": 2.9381299018859863, |
|
"learning_rate": 1.3596358118361156e-06, |
|
"loss": 0.2408, |
|
"step": 3021 |
|
}, |
|
{ |
|
"epoch": 0.8716816949366291, |
|
"grad_norm": 1.9034892320632935, |
|
"learning_rate": 1.3505311077389985e-06, |
|
"loss": 0.2087, |
|
"step": 3024 |
|
}, |
|
{ |
|
"epoch": 0.8725464585228757, |
|
"grad_norm": 1.968497395515442, |
|
"learning_rate": 1.3414264036418817e-06, |
|
"loss": 0.2028, |
|
"step": 3027 |
|
}, |
|
{ |
|
"epoch": 0.8734112221091224, |
|
"grad_norm": 2.3359177112579346, |
|
"learning_rate": 1.332321699544765e-06, |
|
"loss": 0.2305, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.874275985695369, |
|
"grad_norm": 1.7949774265289307, |
|
"learning_rate": 1.3232169954476482e-06, |
|
"loss": 0.229, |
|
"step": 3033 |
|
}, |
|
{ |
|
"epoch": 0.8751407492816157, |
|
"grad_norm": 2.1725058555603027, |
|
"learning_rate": 1.314112291350531e-06, |
|
"loss": 0.2158, |
|
"step": 3036 |
|
}, |
|
{ |
|
"epoch": 0.8760055128678623, |
|
"grad_norm": 2.5111076831817627, |
|
"learning_rate": 1.3050075872534144e-06, |
|
"loss": 0.2032, |
|
"step": 3039 |
|
}, |
|
{ |
|
"epoch": 0.876870276454109, |
|
"grad_norm": 1.4585468769073486, |
|
"learning_rate": 1.2959028831562976e-06, |
|
"loss": 0.2064, |
|
"step": 3042 |
|
}, |
|
{ |
|
"epoch": 0.8777350400403556, |
|
"grad_norm": 2.4684135913848877, |
|
"learning_rate": 1.2867981790591807e-06, |
|
"loss": 0.2069, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.8785998036266023, |
|
"grad_norm": 2.0519392490386963, |
|
"learning_rate": 1.277693474962064e-06, |
|
"loss": 0.2172, |
|
"step": 3048 |
|
}, |
|
{ |
|
"epoch": 0.8794645672128489, |
|
"grad_norm": 2.9718551635742188, |
|
"learning_rate": 1.268588770864947e-06, |
|
"loss": 0.2218, |
|
"step": 3051 |
|
}, |
|
{ |
|
"epoch": 0.8803293307990956, |
|
"grad_norm": 1.8732651472091675, |
|
"learning_rate": 1.25948406676783e-06, |
|
"loss": 0.2337, |
|
"step": 3054 |
|
}, |
|
{ |
|
"epoch": 0.8811940943853422, |
|
"grad_norm": 3.006303071975708, |
|
"learning_rate": 1.2503793626707133e-06, |
|
"loss": 0.2238, |
|
"step": 3057 |
|
}, |
|
{ |
|
"epoch": 0.8820588579715889, |
|
"grad_norm": 5.645898818969727, |
|
"learning_rate": 1.2412746585735964e-06, |
|
"loss": 0.2583, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.8829236215578355, |
|
"grad_norm": 2.2179112434387207, |
|
"learning_rate": 1.2321699544764796e-06, |
|
"loss": 0.2308, |
|
"step": 3063 |
|
}, |
|
{ |
|
"epoch": 0.8837883851440822, |
|
"grad_norm": 3.6568005084991455, |
|
"learning_rate": 1.2230652503793627e-06, |
|
"loss": 0.2128, |
|
"step": 3066 |
|
}, |
|
{ |
|
"epoch": 0.8846531487303289, |
|
"grad_norm": 3.0393970012664795, |
|
"learning_rate": 1.213960546282246e-06, |
|
"loss": 0.2233, |
|
"step": 3069 |
|
}, |
|
{ |
|
"epoch": 0.8855179123165755, |
|
"grad_norm": 1.949182391166687, |
|
"learning_rate": 1.204855842185129e-06, |
|
"loss": 0.2409, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.8863826759028222, |
|
"grad_norm": 1.6602507829666138, |
|
"learning_rate": 1.1957511380880123e-06, |
|
"loss": 0.2312, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.8872474394890688, |
|
"grad_norm": 2.22861909866333, |
|
"learning_rate": 1.1866464339908953e-06, |
|
"loss": 0.2539, |
|
"step": 3078 |
|
}, |
|
{ |
|
"epoch": 0.8881122030753155, |
|
"grad_norm": 3.126702070236206, |
|
"learning_rate": 1.1775417298937786e-06, |
|
"loss": 0.2566, |
|
"step": 3081 |
|
}, |
|
{ |
|
"epoch": 0.8889769666615621, |
|
"grad_norm": 2.471876382827759, |
|
"learning_rate": 1.1684370257966617e-06, |
|
"loss": 0.2042, |
|
"step": 3084 |
|
}, |
|
{ |
|
"epoch": 0.8898417302478088, |
|
"grad_norm": 2.3772592544555664, |
|
"learning_rate": 1.159332321699545e-06, |
|
"loss": 0.2595, |
|
"step": 3087 |
|
}, |
|
{ |
|
"epoch": 0.8907064938340554, |
|
"grad_norm": 1.8966178894042969, |
|
"learning_rate": 1.150227617602428e-06, |
|
"loss": 0.2255, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.8915712574203021, |
|
"grad_norm": 3.4512031078338623, |
|
"learning_rate": 1.1411229135053112e-06, |
|
"loss": 0.2417, |
|
"step": 3093 |
|
}, |
|
{ |
|
"epoch": 0.8924360210065487, |
|
"grad_norm": 9.493666648864746, |
|
"learning_rate": 1.1320182094081943e-06, |
|
"loss": 0.2244, |
|
"step": 3096 |
|
}, |
|
{ |
|
"epoch": 0.8933007845927955, |
|
"grad_norm": 2.2911179065704346, |
|
"learning_rate": 1.1229135053110776e-06, |
|
"loss": 0.2287, |
|
"step": 3099 |
|
}, |
|
{ |
|
"epoch": 0.8935890391215443, |
|
"eval_loss": 0.22029922902584076, |
|
"eval_mse": 0.2202992255240679, |
|
"eval_runtime": 64.4714, |
|
"eval_samples_per_second": 15.511, |
|
"eval_steps_per_second": 3.878, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.8941655481790421, |
|
"grad_norm": 2.2213857173919678, |
|
"learning_rate": 1.1138088012139606e-06, |
|
"loss": 0.1838, |
|
"step": 3102 |
|
}, |
|
{ |
|
"epoch": 0.8950303117652888, |
|
"grad_norm": 1.7121168375015259, |
|
"learning_rate": 1.1047040971168439e-06, |
|
"loss": 0.2132, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.8958950753515355, |
|
"grad_norm": 2.8772072792053223, |
|
"learning_rate": 1.095599393019727e-06, |
|
"loss": 0.223, |
|
"step": 3108 |
|
}, |
|
{ |
|
"epoch": 0.8967598389377821, |
|
"grad_norm": 1.947067141532898, |
|
"learning_rate": 1.08649468892261e-06, |
|
"loss": 0.2117, |
|
"step": 3111 |
|
}, |
|
{ |
|
"epoch": 0.8976246025240288, |
|
"grad_norm": 2.486713171005249, |
|
"learning_rate": 1.0773899848254933e-06, |
|
"loss": 0.2476, |
|
"step": 3114 |
|
}, |
|
{ |
|
"epoch": 0.8984893661102754, |
|
"grad_norm": 2.81208872795105, |
|
"learning_rate": 1.0682852807283763e-06, |
|
"loss": 0.2077, |
|
"step": 3117 |
|
}, |
|
{ |
|
"epoch": 0.8993541296965221, |
|
"grad_norm": 3.0056064128875732, |
|
"learning_rate": 1.0591805766312596e-06, |
|
"loss": 0.2419, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.9002188932827687, |
|
"grad_norm": 1.8039710521697998, |
|
"learning_rate": 1.0500758725341426e-06, |
|
"loss": 0.2211, |
|
"step": 3123 |
|
}, |
|
{ |
|
"epoch": 0.9010836568690154, |
|
"grad_norm": 2.9514808654785156, |
|
"learning_rate": 1.0409711684370259e-06, |
|
"loss": 0.2669, |
|
"step": 3126 |
|
}, |
|
{ |
|
"epoch": 0.901948420455262, |
|
"grad_norm": 1.9579650163650513, |
|
"learning_rate": 1.031866464339909e-06, |
|
"loss": 0.2693, |
|
"step": 3129 |
|
}, |
|
{ |
|
"epoch": 0.9028131840415087, |
|
"grad_norm": 3.1238980293273926, |
|
"learning_rate": 1.0227617602427922e-06, |
|
"loss": 0.1991, |
|
"step": 3132 |
|
}, |
|
{ |
|
"epoch": 0.9036779476277553, |
|
"grad_norm": 4.342383861541748, |
|
"learning_rate": 1.0136570561456753e-06, |
|
"loss": 0.2167, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.904542711214002, |
|
"grad_norm": 2.0083911418914795, |
|
"learning_rate": 1.0045523520485585e-06, |
|
"loss": 0.1924, |
|
"step": 3138 |
|
}, |
|
{ |
|
"epoch": 0.9054074748002486, |
|
"grad_norm": 2.090289354324341, |
|
"learning_rate": 9.954476479514416e-07, |
|
"loss": 0.1962, |
|
"step": 3141 |
|
}, |
|
{ |
|
"epoch": 0.9062722383864953, |
|
"grad_norm": 3.258606433868408, |
|
"learning_rate": 9.863429438543249e-07, |
|
"loss": 0.2439, |
|
"step": 3144 |
|
}, |
|
{ |
|
"epoch": 0.9071370019727419, |
|
"grad_norm": 2.009249210357666, |
|
"learning_rate": 9.77238239757208e-07, |
|
"loss": 0.2197, |
|
"step": 3147 |
|
}, |
|
{ |
|
"epoch": 0.9080017655589886, |
|
"grad_norm": 1.645767331123352, |
|
"learning_rate": 9.681335356600912e-07, |
|
"loss": 0.2283, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.9088665291452352, |
|
"grad_norm": 2.6519992351531982, |
|
"learning_rate": 9.590288315629742e-07, |
|
"loss": 0.226, |
|
"step": 3153 |
|
}, |
|
{ |
|
"epoch": 0.9097312927314819, |
|
"grad_norm": 2.1228978633880615, |
|
"learning_rate": 9.499241274658574e-07, |
|
"loss": 0.2424, |
|
"step": 3156 |
|
}, |
|
{ |
|
"epoch": 0.9105960563177286, |
|
"grad_norm": 1.9438750743865967, |
|
"learning_rate": 9.408194233687407e-07, |
|
"loss": 0.2418, |
|
"step": 3159 |
|
}, |
|
{ |
|
"epoch": 0.9114608199039752, |
|
"grad_norm": 2.031235933303833, |
|
"learning_rate": 9.317147192716237e-07, |
|
"loss": 0.2006, |
|
"step": 3162 |
|
}, |
|
{ |
|
"epoch": 0.9123255834902219, |
|
"grad_norm": 2.0259816646575928, |
|
"learning_rate": 9.22610015174507e-07, |
|
"loss": 0.2174, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.9131903470764685, |
|
"grad_norm": 2.0144436359405518, |
|
"learning_rate": 9.1350531107739e-07, |
|
"loss": 0.2115, |
|
"step": 3168 |
|
}, |
|
{ |
|
"epoch": 0.9140551106627152, |
|
"grad_norm": 2.4154086112976074, |
|
"learning_rate": 9.044006069802733e-07, |
|
"loss": 0.2443, |
|
"step": 3171 |
|
}, |
|
{ |
|
"epoch": 0.9149198742489618, |
|
"grad_norm": 2.0454399585723877, |
|
"learning_rate": 8.952959028831563e-07, |
|
"loss": 0.2032, |
|
"step": 3174 |
|
}, |
|
{ |
|
"epoch": 0.9157846378352085, |
|
"grad_norm": 3.3808867931365967, |
|
"learning_rate": 8.861911987860396e-07, |
|
"loss": 0.2256, |
|
"step": 3177 |
|
}, |
|
{ |
|
"epoch": 0.9166494014214551, |
|
"grad_norm": 4.769220352172852, |
|
"learning_rate": 8.770864946889227e-07, |
|
"loss": 0.2395, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.9175141650077018, |
|
"grad_norm": 3.655897855758667, |
|
"learning_rate": 8.679817905918058e-07, |
|
"loss": 0.2491, |
|
"step": 3183 |
|
}, |
|
{ |
|
"epoch": 0.9183789285939484, |
|
"grad_norm": 3.9675004482269287, |
|
"learning_rate": 8.58877086494689e-07, |
|
"loss": 0.2262, |
|
"step": 3186 |
|
}, |
|
{ |
|
"epoch": 0.9192436921801951, |
|
"grad_norm": 3.2647206783294678, |
|
"learning_rate": 8.497723823975721e-07, |
|
"loss": 0.222, |
|
"step": 3189 |
|
}, |
|
{ |
|
"epoch": 0.9201084557664417, |
|
"grad_norm": 2.699422597885132, |
|
"learning_rate": 8.406676783004553e-07, |
|
"loss": 0.225, |
|
"step": 3192 |
|
}, |
|
{ |
|
"epoch": 0.9209732193526884, |
|
"grad_norm": 2.65602970123291, |
|
"learning_rate": 8.315629742033385e-07, |
|
"loss": 0.249, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.921837982938935, |
|
"grad_norm": 2.1228225231170654, |
|
"learning_rate": 8.224582701062215e-07, |
|
"loss": 0.261, |
|
"step": 3198 |
|
}, |
|
{ |
|
"epoch": 0.9224144919964329, |
|
"eval_loss": 0.21858178079128265, |
|
"eval_mse": 0.21858179174736142, |
|
"eval_runtime": 64.5701, |
|
"eval_samples_per_second": 15.487, |
|
"eval_steps_per_second": 3.872, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.9227027465251817, |
|
"grad_norm": 3.9595842361450195, |
|
"learning_rate": 8.133535660091048e-07, |
|
"loss": 0.2243, |
|
"step": 3201 |
|
}, |
|
{ |
|
"epoch": 0.9235675101114283, |
|
"grad_norm": 2.7755258083343506, |
|
"learning_rate": 8.042488619119878e-07, |
|
"loss": 0.2175, |
|
"step": 3204 |
|
}, |
|
{ |
|
"epoch": 0.924432273697675, |
|
"grad_norm": 2.876634120941162, |
|
"learning_rate": 7.951441578148711e-07, |
|
"loss": 0.2263, |
|
"step": 3207 |
|
}, |
|
{ |
|
"epoch": 0.9252970372839217, |
|
"grad_norm": 1.809976577758789, |
|
"learning_rate": 7.860394537177542e-07, |
|
"loss": 0.2161, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.9261618008701683, |
|
"grad_norm": 2.5012404918670654, |
|
"learning_rate": 7.769347496206374e-07, |
|
"loss": 0.2412, |
|
"step": 3213 |
|
}, |
|
{ |
|
"epoch": 0.9270265644564151, |
|
"grad_norm": 2.09769868850708, |
|
"learning_rate": 7.678300455235206e-07, |
|
"loss": 0.2556, |
|
"step": 3216 |
|
}, |
|
{ |
|
"epoch": 0.9278913280426617, |
|
"grad_norm": 2.4902303218841553, |
|
"learning_rate": 7.587253414264036e-07, |
|
"loss": 0.2184, |
|
"step": 3219 |
|
}, |
|
{ |
|
"epoch": 0.9287560916289084, |
|
"grad_norm": 1.705115795135498, |
|
"learning_rate": 7.496206373292869e-07, |
|
"loss": 0.2104, |
|
"step": 3222 |
|
}, |
|
{ |
|
"epoch": 0.929620855215155, |
|
"grad_norm": 2.0965850353240967, |
|
"learning_rate": 7.4051593323217e-07, |
|
"loss": 0.2282, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.9304856188014017, |
|
"grad_norm": 1.9065147638320923, |
|
"learning_rate": 7.314112291350532e-07, |
|
"loss": 0.2261, |
|
"step": 3228 |
|
}, |
|
{ |
|
"epoch": 0.9313503823876483, |
|
"grad_norm": 1.8584680557250977, |
|
"learning_rate": 7.223065250379363e-07, |
|
"loss": 0.1955, |
|
"step": 3231 |
|
}, |
|
{ |
|
"epoch": 0.932215145973895, |
|
"grad_norm": 2.8803727626800537, |
|
"learning_rate": 7.132018209408196e-07, |
|
"loss": 0.2133, |
|
"step": 3234 |
|
}, |
|
{ |
|
"epoch": 0.9330799095601416, |
|
"grad_norm": 2.6942360401153564, |
|
"learning_rate": 7.040971168437026e-07, |
|
"loss": 0.21, |
|
"step": 3237 |
|
}, |
|
{ |
|
"epoch": 0.9339446731463883, |
|
"grad_norm": 3.0036349296569824, |
|
"learning_rate": 6.949924127465859e-07, |
|
"loss": 0.2172, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.934809436732635, |
|
"grad_norm": 3.479217290878296, |
|
"learning_rate": 6.858877086494689e-07, |
|
"loss": 0.222, |
|
"step": 3243 |
|
}, |
|
{ |
|
"epoch": 0.9356742003188816, |
|
"grad_norm": 8.07664680480957, |
|
"learning_rate": 6.767830045523521e-07, |
|
"loss": 0.2236, |
|
"step": 3246 |
|
}, |
|
{ |
|
"epoch": 0.9365389639051283, |
|
"grad_norm": 3.184757709503174, |
|
"learning_rate": 6.676783004552352e-07, |
|
"loss": 0.2471, |
|
"step": 3249 |
|
}, |
|
{ |
|
"epoch": 0.9374037274913749, |
|
"grad_norm": 1.7666175365447998, |
|
"learning_rate": 6.585735963581184e-07, |
|
"loss": 0.2145, |
|
"step": 3252 |
|
}, |
|
{ |
|
"epoch": 0.9382684910776216, |
|
"grad_norm": 1.856346607208252, |
|
"learning_rate": 6.494688922610016e-07, |
|
"loss": 0.2334, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.9391332546638682, |
|
"grad_norm": 2.367562770843506, |
|
"learning_rate": 6.403641881638847e-07, |
|
"loss": 0.2301, |
|
"step": 3258 |
|
}, |
|
{ |
|
"epoch": 0.9399980182501149, |
|
"grad_norm": 2.3909690380096436, |
|
"learning_rate": 6.312594840667678e-07, |
|
"loss": 0.2305, |
|
"step": 3261 |
|
}, |
|
{ |
|
"epoch": 0.9408627818363615, |
|
"grad_norm": 2.0632777214050293, |
|
"learning_rate": 6.22154779969651e-07, |
|
"loss": 0.2083, |
|
"step": 3264 |
|
}, |
|
{ |
|
"epoch": 0.9417275454226082, |
|
"grad_norm": 2.854069471359253, |
|
"learning_rate": 6.130500758725342e-07, |
|
"loss": 0.2112, |
|
"step": 3267 |
|
}, |
|
{ |
|
"epoch": 0.9425923090088548, |
|
"grad_norm": 3.0743472576141357, |
|
"learning_rate": 6.039453717754174e-07, |
|
"loss": 0.225, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.9434570725951015, |
|
"grad_norm": 1.8354507684707642, |
|
"learning_rate": 5.948406676783005e-07, |
|
"loss": 0.2067, |
|
"step": 3273 |
|
}, |
|
{ |
|
"epoch": 0.9443218361813481, |
|
"grad_norm": 7.717159748077393, |
|
"learning_rate": 5.857359635811837e-07, |
|
"loss": 0.2307, |
|
"step": 3276 |
|
}, |
|
{ |
|
"epoch": 0.9451865997675948, |
|
"grad_norm": 2.406531572341919, |
|
"learning_rate": 5.766312594840668e-07, |
|
"loss": 0.2245, |
|
"step": 3279 |
|
}, |
|
{ |
|
"epoch": 0.9460513633538414, |
|
"grad_norm": 2.0075788497924805, |
|
"learning_rate": 5.675265553869499e-07, |
|
"loss": 0.227, |
|
"step": 3282 |
|
}, |
|
{ |
|
"epoch": 0.9469161269400881, |
|
"grad_norm": 1.8937417268753052, |
|
"learning_rate": 5.584218512898331e-07, |
|
"loss": 0.214, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.9477808905263347, |
|
"grad_norm": 1.6452524662017822, |
|
"learning_rate": 5.493171471927162e-07, |
|
"loss": 0.2088, |
|
"step": 3288 |
|
}, |
|
{ |
|
"epoch": 0.9486456541125814, |
|
"grad_norm": 1.942367672920227, |
|
"learning_rate": 5.402124430955994e-07, |
|
"loss": 0.2008, |
|
"step": 3291 |
|
}, |
|
{ |
|
"epoch": 0.949510417698828, |
|
"grad_norm": 1.6608052253723145, |
|
"learning_rate": 5.311077389984825e-07, |
|
"loss": 0.2131, |
|
"step": 3294 |
|
}, |
|
{ |
|
"epoch": 0.9503751812850747, |
|
"grad_norm": 1.681344747543335, |
|
"learning_rate": 5.220030349013658e-07, |
|
"loss": 0.2093, |
|
"step": 3297 |
|
}, |
|
{ |
|
"epoch": 0.9512399448713214, |
|
"grad_norm": 2.0465240478515625, |
|
"learning_rate": 5.12898330804249e-07, |
|
"loss": 0.2187, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.9512399448713214, |
|
"eval_loss": 0.21758121252059937, |
|
"eval_mse": 0.21758120648143814, |
|
"eval_runtime": 64.738, |
|
"eval_samples_per_second": 15.447, |
|
"eval_steps_per_second": 3.862, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.952104708457568, |
|
"grad_norm": 2.127420425415039, |
|
"learning_rate": 5.037936267071321e-07, |
|
"loss": 0.267, |
|
"step": 3303 |
|
}, |
|
{ |
|
"epoch": 0.9529694720438147, |
|
"grad_norm": 1.9527215957641602, |
|
"learning_rate": 4.946889226100153e-07, |
|
"loss": 0.2304, |
|
"step": 3306 |
|
}, |
|
{ |
|
"epoch": 0.9538342356300613, |
|
"grad_norm": 2.5131490230560303, |
|
"learning_rate": 4.855842185128983e-07, |
|
"loss": 0.1996, |
|
"step": 3309 |
|
}, |
|
{ |
|
"epoch": 0.954698999216308, |
|
"grad_norm": 1.555931568145752, |
|
"learning_rate": 4.7647951441578155e-07, |
|
"loss": 0.2054, |
|
"step": 3312 |
|
}, |
|
{ |
|
"epoch": 0.9555637628025546, |
|
"grad_norm": 2.3513290882110596, |
|
"learning_rate": 4.673748103186647e-07, |
|
"loss": 0.2015, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.9564285263888013, |
|
"grad_norm": 4.119522571563721, |
|
"learning_rate": 4.582701062215478e-07, |
|
"loss": 0.1802, |
|
"step": 3318 |
|
}, |
|
{ |
|
"epoch": 0.9572932899750479, |
|
"grad_norm": 2.0488264560699463, |
|
"learning_rate": 4.49165402124431e-07, |
|
"loss": 0.218, |
|
"step": 3321 |
|
}, |
|
{ |
|
"epoch": 0.9581580535612946, |
|
"grad_norm": 4.189717769622803, |
|
"learning_rate": 4.4006069802731414e-07, |
|
"loss": 0.2439, |
|
"step": 3324 |
|
}, |
|
{ |
|
"epoch": 0.9590228171475412, |
|
"grad_norm": 3.0399768352508545, |
|
"learning_rate": 4.309559939301973e-07, |
|
"loss": 0.2606, |
|
"step": 3327 |
|
}, |
|
{ |
|
"epoch": 0.9598875807337879, |
|
"grad_norm": 3.3942065238952637, |
|
"learning_rate": 4.2185128983308046e-07, |
|
"loss": 0.2137, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.9607523443200346, |
|
"grad_norm": 2.329697608947754, |
|
"learning_rate": 4.127465857359636e-07, |
|
"loss": 0.2363, |
|
"step": 3333 |
|
}, |
|
{ |
|
"epoch": 0.9616171079062813, |
|
"grad_norm": 1.8820949792861938, |
|
"learning_rate": 4.0364188163884673e-07, |
|
"loss": 0.2208, |
|
"step": 3336 |
|
}, |
|
{ |
|
"epoch": 0.962481871492528, |
|
"grad_norm": 2.777941942214966, |
|
"learning_rate": 3.945371775417299e-07, |
|
"loss": 0.2254, |
|
"step": 3339 |
|
}, |
|
{ |
|
"epoch": 0.9633466350787746, |
|
"grad_norm": 3.551192045211792, |
|
"learning_rate": 3.8543247344461305e-07, |
|
"loss": 0.2155, |
|
"step": 3342 |
|
}, |
|
{ |
|
"epoch": 0.9642113986650213, |
|
"grad_norm": 1.5239380598068237, |
|
"learning_rate": 3.763277693474962e-07, |
|
"loss": 0.2336, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.9650761622512679, |
|
"grad_norm": 3.446213722229004, |
|
"learning_rate": 3.6722306525037937e-07, |
|
"loss": 0.2139, |
|
"step": 3348 |
|
}, |
|
{ |
|
"epoch": 0.9659409258375146, |
|
"grad_norm": 2.0579702854156494, |
|
"learning_rate": 3.581183611532626e-07, |
|
"loss": 0.2032, |
|
"step": 3351 |
|
}, |
|
{ |
|
"epoch": 0.9668056894237612, |
|
"grad_norm": 1.746421456336975, |
|
"learning_rate": 3.4901365705614574e-07, |
|
"loss": 0.1885, |
|
"step": 3354 |
|
}, |
|
{ |
|
"epoch": 0.9676704530100079, |
|
"grad_norm": 3.787752628326416, |
|
"learning_rate": 3.399089529590289e-07, |
|
"loss": 0.2145, |
|
"step": 3357 |
|
}, |
|
{ |
|
"epoch": 0.9685352165962545, |
|
"grad_norm": 2.8552401065826416, |
|
"learning_rate": 3.3080424886191206e-07, |
|
"loss": 0.1699, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.9693999801825012, |
|
"grad_norm": 2.1320273876190186, |
|
"learning_rate": 3.2169954476479517e-07, |
|
"loss": 0.1813, |
|
"step": 3363 |
|
}, |
|
{ |
|
"epoch": 0.9702647437687478, |
|
"grad_norm": 2.03324031829834, |
|
"learning_rate": 3.1259484066767833e-07, |
|
"loss": 0.2193, |
|
"step": 3366 |
|
}, |
|
{ |
|
"epoch": 0.9711295073549945, |
|
"grad_norm": 1.8292734622955322, |
|
"learning_rate": 3.034901365705615e-07, |
|
"loss": 0.2111, |
|
"step": 3369 |
|
}, |
|
{ |
|
"epoch": 0.9719942709412411, |
|
"grad_norm": 2.5490074157714844, |
|
"learning_rate": 2.9438543247344465e-07, |
|
"loss": 0.218, |
|
"step": 3372 |
|
}, |
|
{ |
|
"epoch": 0.9728590345274878, |
|
"grad_norm": 3.7310287952423096, |
|
"learning_rate": 2.852807283763278e-07, |
|
"loss": 0.2132, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.9737237981137344, |
|
"grad_norm": 1.762787938117981, |
|
"learning_rate": 2.7617602427921097e-07, |
|
"loss": 0.2054, |
|
"step": 3378 |
|
}, |
|
{ |
|
"epoch": 0.9745885616999811, |
|
"grad_norm": 2.4129533767700195, |
|
"learning_rate": 2.670713201820941e-07, |
|
"loss": 0.2089, |
|
"step": 3381 |
|
}, |
|
{ |
|
"epoch": 0.9754533252862277, |
|
"grad_norm": 2.115504026412964, |
|
"learning_rate": 2.5796661608497724e-07, |
|
"loss": 0.2308, |
|
"step": 3384 |
|
}, |
|
{ |
|
"epoch": 0.9763180888724744, |
|
"grad_norm": 1.7608245611190796, |
|
"learning_rate": 2.488619119878604e-07, |
|
"loss": 0.2233, |
|
"step": 3387 |
|
}, |
|
{ |
|
"epoch": 0.977182852458721, |
|
"grad_norm": 2.5845463275909424, |
|
"learning_rate": 2.3975720789074356e-07, |
|
"loss": 0.2285, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.9780476160449677, |
|
"grad_norm": 10.719658851623535, |
|
"learning_rate": 2.3065250379362674e-07, |
|
"loss": 0.2317, |
|
"step": 3393 |
|
}, |
|
{ |
|
"epoch": 0.9789123796312144, |
|
"grad_norm": 1.8989415168762207, |
|
"learning_rate": 2.215477996965099e-07, |
|
"loss": 0.1977, |
|
"step": 3396 |
|
}, |
|
{ |
|
"epoch": 0.979777143217461, |
|
"grad_norm": 2.626805305480957, |
|
"learning_rate": 2.1244309559939304e-07, |
|
"loss": 0.2069, |
|
"step": 3399 |
|
}, |
|
{ |
|
"epoch": 0.9800653977462099, |
|
"eval_loss": 0.21775692701339722, |
|
"eval_mse": 0.21775692150488976, |
|
"eval_runtime": 64.3973, |
|
"eval_samples_per_second": 15.529, |
|
"eval_steps_per_second": 3.882, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.9806419068037077, |
|
"grad_norm": 3.636981964111328, |
|
"learning_rate": 2.033383915022762e-07, |
|
"loss": 0.2272, |
|
"step": 3402 |
|
}, |
|
{ |
|
"epoch": 0.9815066703899543, |
|
"grad_norm": 2.3272805213928223, |
|
"learning_rate": 1.9423368740515936e-07, |
|
"loss": 0.215, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.982371433976201, |
|
"grad_norm": 3.822941303253174, |
|
"learning_rate": 1.851289833080425e-07, |
|
"loss": 0.2589, |
|
"step": 3408 |
|
}, |
|
{ |
|
"epoch": 0.9832361975624476, |
|
"grad_norm": 1.864588975906372, |
|
"learning_rate": 1.7602427921092565e-07, |
|
"loss": 0.2135, |
|
"step": 3411 |
|
}, |
|
{ |
|
"epoch": 0.9841009611486943, |
|
"grad_norm": 2.8255016803741455, |
|
"learning_rate": 1.669195751138088e-07, |
|
"loss": 0.2505, |
|
"step": 3414 |
|
}, |
|
{ |
|
"epoch": 0.9849657247349409, |
|
"grad_norm": 2.431203603744507, |
|
"learning_rate": 1.5781487101669194e-07, |
|
"loss": 0.2892, |
|
"step": 3417 |
|
}, |
|
{ |
|
"epoch": 0.9858304883211876, |
|
"grad_norm": 4.612681865692139, |
|
"learning_rate": 1.4871016691957513e-07, |
|
"loss": 0.2465, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.9866952519074342, |
|
"grad_norm": 2.1581344604492188, |
|
"learning_rate": 1.3960546282245826e-07, |
|
"loss": 0.2233, |
|
"step": 3423 |
|
}, |
|
{ |
|
"epoch": 0.9875600154936809, |
|
"grad_norm": 2.2907607555389404, |
|
"learning_rate": 1.3050075872534145e-07, |
|
"loss": 0.2177, |
|
"step": 3426 |
|
}, |
|
{ |
|
"epoch": 0.9884247790799275, |
|
"grad_norm": 1.9750571250915527, |
|
"learning_rate": 1.2139605462822459e-07, |
|
"loss": 0.1949, |
|
"step": 3429 |
|
}, |
|
{ |
|
"epoch": 0.9892895426661742, |
|
"grad_norm": 2.6737992763519287, |
|
"learning_rate": 1.1229135053110775e-07, |
|
"loss": 0.2263, |
|
"step": 3432 |
|
}, |
|
{ |
|
"epoch": 0.9901543062524208, |
|
"grad_norm": 1.981949806213379, |
|
"learning_rate": 1.031866464339909e-07, |
|
"loss": 0.2285, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.9910190698386675, |
|
"grad_norm": 2.5654916763305664, |
|
"learning_rate": 9.408194233687405e-08, |
|
"loss": 0.2308, |
|
"step": 3438 |
|
}, |
|
{ |
|
"epoch": 0.9918838334249142, |
|
"grad_norm": 1.8888391256332397, |
|
"learning_rate": 8.497723823975723e-08, |
|
"loss": 0.2311, |
|
"step": 3441 |
|
}, |
|
{ |
|
"epoch": 0.9927485970111608, |
|
"grad_norm": 1.7521294355392456, |
|
"learning_rate": 7.587253414264037e-08, |
|
"loss": 0.1957, |
|
"step": 3444 |
|
}, |
|
{ |
|
"epoch": 0.9936133605974075, |
|
"grad_norm": 2.322394609451294, |
|
"learning_rate": 6.676783004552352e-08, |
|
"loss": 0.2198, |
|
"step": 3447 |
|
}, |
|
{ |
|
"epoch": 0.9944781241836542, |
|
"grad_norm": 1.9925882816314697, |
|
"learning_rate": 5.7663125948406686e-08, |
|
"loss": 0.2426, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.9953428877699009, |
|
"grad_norm": 2.0008606910705566, |
|
"learning_rate": 4.855842185128984e-08, |
|
"loss": 0.2125, |
|
"step": 3453 |
|
}, |
|
{ |
|
"epoch": 0.9962076513561475, |
|
"grad_norm": 4.1277570724487305, |
|
"learning_rate": 3.9453717754172986e-08, |
|
"loss": 0.2444, |
|
"step": 3456 |
|
}, |
|
{ |
|
"epoch": 0.9970724149423942, |
|
"grad_norm": 2.3507277965545654, |
|
"learning_rate": 3.0349013657056146e-08, |
|
"loss": 0.2183, |
|
"step": 3459 |
|
}, |
|
{ |
|
"epoch": 0.9979371785286408, |
|
"grad_norm": 2.143399238586426, |
|
"learning_rate": 2.1244309559939306e-08, |
|
"loss": 0.2128, |
|
"step": 3462 |
|
}, |
|
{ |
|
"epoch": 0.9988019421148875, |
|
"grad_norm": 1.850144863128662, |
|
"learning_rate": 1.213960546282246e-08, |
|
"loss": 0.1883, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.9996667057011341, |
|
"grad_norm": 1.8591262102127075, |
|
"learning_rate": 3.034901365705615e-09, |
|
"loss": 0.2093, |
|
"step": 3468 |
|
}, |
|
{ |
|
"epoch": 0.999954960229883, |
|
"step": 3469, |
|
"total_flos": 9.410742865058857e+17, |
|
"train_loss": 0.2716184546260883, |
|
"train_runtime": 39469.9974, |
|
"train_samples_per_second": 11.25, |
|
"train_steps_per_second": 0.088 |
|
} |
|
], |
|
"logging_steps": 3, |
|
"max_steps": 3469, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.410742865058857e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|