|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 0, |
|
"global_step": 528, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.001893939393939394, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 9.981060606060606e-06, |
|
"loss": 1.7963, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003787878787878788, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 9.962121212121212e-06, |
|
"loss": 1.7754, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005681818181818182, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 9.943181818181819e-06, |
|
"loss": 1.7396, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007575757575757576, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 9.924242424242425e-06, |
|
"loss": 1.7643, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00946969696969697, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 9.90530303030303e-06, |
|
"loss": 1.7028, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.011363636363636364, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 9.886363636363637e-06, |
|
"loss": 1.7429, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.013257575757575758, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 9.867424242424243e-06, |
|
"loss": 1.751, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.015151515151515152, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 9.84848484848485e-06, |
|
"loss": 1.7441, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.017045454545454544, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.829545454545455e-06, |
|
"loss": 1.7713, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01893939393939394, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 9.810606060606061e-06, |
|
"loss": 1.6467, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.020833333333333332, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 9.791666666666666e-06, |
|
"loss": 1.7119, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.022727272727272728, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.772727272727273e-06, |
|
"loss": 1.6096, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02462121212121212, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 9.75378787878788e-06, |
|
"loss": 1.6245, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.026515151515151516, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 9.734848484848486e-06, |
|
"loss": 1.6297, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.028409090909090908, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 9.715909090909091e-06, |
|
"loss": 1.6394, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.030303030303030304, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.696969696969698e-06, |
|
"loss": 1.5655, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.032196969696969696, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.678030303030304e-06, |
|
"loss": 1.593, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03409090909090909, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 9.65909090909091e-06, |
|
"loss": 1.6071, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03598484848484849, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 9.640151515151516e-06, |
|
"loss": 1.4755, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03787878787878788, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 9.621212121212122e-06, |
|
"loss": 1.562, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03977272727272727, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 9.602272727272727e-06, |
|
"loss": 1.558, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.041666666666666664, |
|
"grad_norm": 0.2470703125, |
|
"learning_rate": 9.583333333333335e-06, |
|
"loss": 1.5019, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.043560606060606064, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.56439393939394e-06, |
|
"loss": 1.59, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.045454545454545456, |
|
"grad_norm": 0.2392578125, |
|
"learning_rate": 9.545454545454547e-06, |
|
"loss": 1.47, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04734848484848485, |
|
"grad_norm": 0.23046875, |
|
"learning_rate": 9.526515151515152e-06, |
|
"loss": 1.5116, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04924242424242424, |
|
"grad_norm": 0.208984375, |
|
"learning_rate": 9.507575757575758e-06, |
|
"loss": 1.463, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05113636363636364, |
|
"grad_norm": 0.2255859375, |
|
"learning_rate": 9.488636363636365e-06, |
|
"loss": 1.4856, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05303030303030303, |
|
"grad_norm": 0.2119140625, |
|
"learning_rate": 9.469696969696971e-06, |
|
"loss": 1.4342, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.054924242424242424, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 9.450757575757576e-06, |
|
"loss": 1.4546, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.056818181818181816, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 9.431818181818183e-06, |
|
"loss": 1.4768, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.058712121212121215, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 9.41287878787879e-06, |
|
"loss": 1.3967, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06060606060606061, |
|
"grad_norm": 0.2080078125, |
|
"learning_rate": 9.393939393939396e-06, |
|
"loss": 1.4417, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0625, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 1.3645, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06439393939393939, |
|
"grad_norm": 0.2080078125, |
|
"learning_rate": 9.356060606060606e-06, |
|
"loss": 1.4291, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06628787878787878, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 9.337121212121212e-06, |
|
"loss": 1.3972, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06818181818181818, |
|
"grad_norm": 0.205078125, |
|
"learning_rate": 9.318181818181819e-06, |
|
"loss": 1.3673, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07007575757575757, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 9.299242424242425e-06, |
|
"loss": 1.3402, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07196969696969698, |
|
"grad_norm": 0.201171875, |
|
"learning_rate": 9.28030303030303e-06, |
|
"loss": 1.4012, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07386363636363637, |
|
"grad_norm": 0.2333984375, |
|
"learning_rate": 9.261363636363637e-06, |
|
"loss": 1.2972, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07575757575757576, |
|
"grad_norm": 0.1884765625, |
|
"learning_rate": 9.242424242424244e-06, |
|
"loss": 1.409, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07765151515151515, |
|
"grad_norm": 0.2021484375, |
|
"learning_rate": 9.22348484848485e-06, |
|
"loss": 1.3533, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07954545454545454, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 9.204545454545455e-06, |
|
"loss": 1.4493, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.08143939393939394, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 9.185606060606062e-06, |
|
"loss": 1.3153, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08333333333333333, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 1.3608, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08522727272727272, |
|
"grad_norm": 0.1650390625, |
|
"learning_rate": 9.147727272727273e-06, |
|
"loss": 1.3115, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08712121212121213, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 9.12878787878788e-06, |
|
"loss": 1.324, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08901515151515152, |
|
"grad_norm": 0.208984375, |
|
"learning_rate": 9.109848484848486e-06, |
|
"loss": 1.3352, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 1.3193, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0928030303030303, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 9.071969696969698e-06, |
|
"loss": 1.2729, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0946969696969697, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 9.053030303030304e-06, |
|
"loss": 1.3045, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09659090909090909, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 9.03409090909091e-06, |
|
"loss": 1.2968, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09848484848484848, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 9.015151515151516e-06, |
|
"loss": 1.3272, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.10037878787878787, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 8.996212121212122e-06, |
|
"loss": 1.2825, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.10227272727272728, |
|
"grad_norm": 0.205078125, |
|
"learning_rate": 8.977272727272727e-06, |
|
"loss": 1.2588, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.10416666666666667, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 8.958333333333334e-06, |
|
"loss": 1.2485, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.10606060606060606, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 8.93939393939394e-06, |
|
"loss": 1.2276, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10795454545454546, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 8.920454545454547e-06, |
|
"loss": 1.3342, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10984848484848485, |
|
"grad_norm": 0.208984375, |
|
"learning_rate": 8.901515151515152e-06, |
|
"loss": 1.2678, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.11174242424242424, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 8.882575757575758e-06, |
|
"loss": 1.2523, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.11363636363636363, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 8.863636363636365e-06, |
|
"loss": 1.2582, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11553030303030302, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 8.844696969696971e-06, |
|
"loss": 1.2021, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.11742424242424243, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 8.825757575757576e-06, |
|
"loss": 1.2091, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11931818181818182, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 8.806818181818183e-06, |
|
"loss": 1.3111, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 8.787878787878788e-06, |
|
"loss": 1.2265, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.12310606060606061, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 8.768939393939394e-06, |
|
"loss": 1.243, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 1.2032, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1268939393939394, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 8.731060606060606e-06, |
|
"loss": 1.2213, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12878787878787878, |
|
"grad_norm": 0.15625, |
|
"learning_rate": 8.712121212121212e-06, |
|
"loss": 1.1738, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.13068181818181818, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 8.693181818181819e-06, |
|
"loss": 1.2173, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.13257575757575757, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 8.674242424242426e-06, |
|
"loss": 1.2439, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.13446969696969696, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 8.65530303030303e-06, |
|
"loss": 1.1949, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13636363636363635, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 8.636363636363637e-06, |
|
"loss": 1.2421, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13825757575757575, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 8.617424242424242e-06, |
|
"loss": 1.149, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.14015151515151514, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 8.59848484848485e-06, |
|
"loss": 1.1985, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.14204545454545456, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 8.579545454545455e-06, |
|
"loss": 1.2139, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.14393939393939395, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 8.560606060606062e-06, |
|
"loss": 1.2225, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.14583333333333334, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 8.541666666666666e-06, |
|
"loss": 1.2365, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14772727272727273, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 8.522727272727273e-06, |
|
"loss": 1.2121, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14962121212121213, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 8.50378787878788e-06, |
|
"loss": 1.1981, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.15151515151515152, |
|
"grad_norm": 0.16015625, |
|
"learning_rate": 8.484848484848486e-06, |
|
"loss": 1.1861, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1534090909090909, |
|
"grad_norm": 0.20703125, |
|
"learning_rate": 8.465909090909091e-06, |
|
"loss": 1.2362, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1553030303030303, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 8.446969696969698e-06, |
|
"loss": 1.2322, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1571969696969697, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 8.428030303030304e-06, |
|
"loss": 1.1466, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1590909090909091, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 8.40909090909091e-06, |
|
"loss": 1.1789, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.16098484848484848, |
|
"grad_norm": 0.26171875, |
|
"learning_rate": 8.390151515151516e-06, |
|
"loss": 1.2105, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.16287878787878787, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 8.371212121212122e-06, |
|
"loss": 1.1877, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.16477272727272727, |
|
"grad_norm": 0.2255859375, |
|
"learning_rate": 8.352272727272727e-06, |
|
"loss": 1.1701, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.2096, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16856060606060605, |
|
"grad_norm": 0.1591796875, |
|
"learning_rate": 8.31439393939394e-06, |
|
"loss": 1.1431, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.17045454545454544, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 8.295454545454547e-06, |
|
"loss": 1.2117, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.17234848484848486, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 8.276515151515152e-06, |
|
"loss": 1.1393, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.17424242424242425, |
|
"grad_norm": 0.154296875, |
|
"learning_rate": 8.257575757575758e-06, |
|
"loss": 1.171, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.17613636363636365, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 8.238636363636365e-06, |
|
"loss": 1.1707, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.17803030303030304, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 8.219696969696971e-06, |
|
"loss": 1.1527, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.17992424242424243, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 8.200757575757576e-06, |
|
"loss": 1.1672, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 1.1511, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.18371212121212122, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 8.162878787878788e-06, |
|
"loss": 1.1525, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1856060606060606, |
|
"grad_norm": 0.228515625, |
|
"learning_rate": 8.143939393939394e-06, |
|
"loss": 1.1724, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1875, |
|
"grad_norm": 0.1552734375, |
|
"learning_rate": 8.125000000000001e-06, |
|
"loss": 1.1699, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.1893939393939394, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 8.106060606060606e-06, |
|
"loss": 1.1535, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19128787878787878, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 8.087121212121212e-06, |
|
"loss": 1.1673, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.19318181818181818, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 8.068181818181819e-06, |
|
"loss": 1.1941, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.19507575757575757, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 8.049242424242426e-06, |
|
"loss": 1.198, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.19696969696969696, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 8.03030303030303e-06, |
|
"loss": 1.1431, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.19886363636363635, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 8.011363636363637e-06, |
|
"loss": 1.1525, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.20075757575757575, |
|
"grad_norm": 0.26171875, |
|
"learning_rate": 7.992424242424242e-06, |
|
"loss": 1.1667, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.20265151515151514, |
|
"grad_norm": 0.2431640625, |
|
"learning_rate": 7.973484848484849e-06, |
|
"loss": 1.1767, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.20454545454545456, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 7.954545454545455e-06, |
|
"loss": 1.1665, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.20643939393939395, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 7.935606060606062e-06, |
|
"loss": 1.1504, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.20833333333333334, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 7.916666666666667e-06, |
|
"loss": 1.1342, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21022727272727273, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 7.897727272727273e-06, |
|
"loss": 1.1203, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.21212121212121213, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 7.87878787878788e-06, |
|
"loss": 1.1428, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.21401515151515152, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 7.859848484848486e-06, |
|
"loss": 1.1109, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.2159090909090909, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 7.840909090909091e-06, |
|
"loss": 1.1376, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2178030303030303, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 7.821969696969698e-06, |
|
"loss": 1.1574, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2196969696969697, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 7.803030303030303e-06, |
|
"loss": 1.1442, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2215909090909091, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 7.784090909090911e-06, |
|
"loss": 1.1367, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.22348484848484848, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 7.765151515151516e-06, |
|
"loss": 1.154, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.22537878787878787, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 7.746212121212122e-06, |
|
"loss": 1.1311, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.22727272727272727, |
|
"grad_norm": 0.25390625, |
|
"learning_rate": 7.727272727272727e-06, |
|
"loss": 1.1048, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.22916666666666666, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 7.708333333333334e-06, |
|
"loss": 1.0927, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.23106060606060605, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 7.68939393939394e-06, |
|
"loss": 1.1566, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.23295454545454544, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 7.670454545454547e-06, |
|
"loss": 1.1359, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.23484848484848486, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 7.651515151515152e-06, |
|
"loss": 1.196, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.23674242424242425, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 7.632575757575758e-06, |
|
"loss": 1.0799, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.23863636363636365, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 7.613636363636364e-06, |
|
"loss": 1.1465, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.24053030303030304, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 7.594696969696971e-06, |
|
"loss": 1.1252, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 1.0775, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.24431818181818182, |
|
"grad_norm": 0.166015625, |
|
"learning_rate": 7.556818181818183e-06, |
|
"loss": 1.1184, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.24621212121212122, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 7.537878787878789e-06, |
|
"loss": 1.1603, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2481060606060606, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 7.518939393939395e-06, |
|
"loss": 1.1033, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.23046875, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.1378, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.2518939393939394, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 7.481060606060606e-06, |
|
"loss": 1.1165, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2537878787878788, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 7.4621212121212125e-06, |
|
"loss": 1.1678, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.2556818181818182, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 7.443181818181818e-06, |
|
"loss": 1.1559, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.25757575757575757, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 7.424242424242425e-06, |
|
"loss": 1.0984, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.25946969696969696, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 7.4053030303030305e-06, |
|
"loss": 1.1306, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.26136363636363635, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 7.386363636363637e-06, |
|
"loss": 1.1568, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.26325757575757575, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 7.367424242424243e-06, |
|
"loss": 1.1173, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.26515151515151514, |
|
"grad_norm": 0.2578125, |
|
"learning_rate": 7.348484848484849e-06, |
|
"loss": 1.0802, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.26704545454545453, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 7.329545454545455e-06, |
|
"loss": 1.1028, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2689393939393939, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 7.310606060606062e-06, |
|
"loss": 1.0935, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.2708333333333333, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 7.291666666666667e-06, |
|
"loss": 1.1048, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 1.1398, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2746212121212121, |
|
"grad_norm": 0.2373046875, |
|
"learning_rate": 7.253787878787879e-06, |
|
"loss": 1.122, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2765151515151515, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 7.2348484848484855e-06, |
|
"loss": 1.0773, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2784090909090909, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 7.215909090909091e-06, |
|
"loss": 1.066, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2803030303030303, |
|
"grad_norm": 0.169921875, |
|
"learning_rate": 7.196969696969698e-06, |
|
"loss": 1.1131, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2821969696969697, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 7.1780303030303035e-06, |
|
"loss": 1.1247, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2840909090909091, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 7.15909090909091e-06, |
|
"loss": 1.1012, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2859848484848485, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 7.140151515151516e-06, |
|
"loss": 1.107, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2878787878787879, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 7.121212121212122e-06, |
|
"loss": 1.0854, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2897727272727273, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 7.102272727272727e-06, |
|
"loss": 1.0889, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2916666666666667, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 7.083333333333335e-06, |
|
"loss": 1.1018, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2935606060606061, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 7.0643939393939396e-06, |
|
"loss": 1.133, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.29545454545454547, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 7.045454545454546e-06, |
|
"loss": 1.0746, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.29734848484848486, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 7.026515151515152e-06, |
|
"loss": 1.0956, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.29924242424242425, |
|
"grad_norm": 0.1826171875, |
|
"learning_rate": 7.0075757575757585e-06, |
|
"loss": 1.0624, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.30113636363636365, |
|
"grad_norm": 0.21875, |
|
"learning_rate": 6.988636363636364e-06, |
|
"loss": 1.0742, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.30303030303030304, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 6.969696969696971e-06, |
|
"loss": 1.1139, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.30492424242424243, |
|
"grad_norm": 0.1669921875, |
|
"learning_rate": 6.9507575757575765e-06, |
|
"loss": 1.1004, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.3068181818181818, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 6.931818181818183e-06, |
|
"loss": 1.0942, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.3087121212121212, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 6.912878787878789e-06, |
|
"loss": 1.1033, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.3106060606060606, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 6.893939393939395e-06, |
|
"loss": 1.0978, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 0.1826171875, |
|
"learning_rate": 6.875e-06, |
|
"loss": 1.1675, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3143939393939394, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 6.856060606060606e-06, |
|
"loss": 1.0792, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3162878787878788, |
|
"grad_norm": 0.208984375, |
|
"learning_rate": 6.8371212121212125e-06, |
|
"loss": 1.097, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3181818181818182, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 6.818181818181818e-06, |
|
"loss": 1.0423, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.32007575757575757, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 6.799242424242425e-06, |
|
"loss": 1.1157, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.32196969696969696, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 6.780303030303031e-06, |
|
"loss": 1.0916, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32386363636363635, |
|
"grad_norm": 0.1826171875, |
|
"learning_rate": 6.761363636363637e-06, |
|
"loss": 1.1438, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.32575757575757575, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 6.742424242424243e-06, |
|
"loss": 1.1348, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.32765151515151514, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 6.7234848484848495e-06, |
|
"loss": 1.0739, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.32954545454545453, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 6.704545454545454e-06, |
|
"loss": 1.0961, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3314393939393939, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 6.685606060606061e-06, |
|
"loss": 1.0229, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.1111, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3352272727272727, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 6.647727272727273e-06, |
|
"loss": 1.0451, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3371212121212121, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 6.628787878787879e-06, |
|
"loss": 1.0819, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3390151515151515, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 6.6098484848484855e-06, |
|
"loss": 1.0276, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3409090909090909, |
|
"grad_norm": 0.220703125, |
|
"learning_rate": 6.590909090909091e-06, |
|
"loss": 1.1011, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3428030303030303, |
|
"grad_norm": 0.2021484375, |
|
"learning_rate": 6.571969696969698e-06, |
|
"loss": 1.0703, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3446969696969697, |
|
"grad_norm": 0.232421875, |
|
"learning_rate": 6.5530303030303036e-06, |
|
"loss": 1.0276, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.3465909090909091, |
|
"grad_norm": 0.20703125, |
|
"learning_rate": 6.53409090909091e-06, |
|
"loss": 1.123, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3484848484848485, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 6.515151515151516e-06, |
|
"loss": 1.0628, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.3503787878787879, |
|
"grad_norm": 0.203125, |
|
"learning_rate": 6.4962121212121224e-06, |
|
"loss": 1.1604, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3522727272727273, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 6.477272727272727e-06, |
|
"loss": 1.0925, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3541666666666667, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 6.458333333333334e-06, |
|
"loss": 1.0547, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3560606060606061, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 6.43939393939394e-06, |
|
"loss": 1.0688, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.35795454545454547, |
|
"grad_norm": 0.1875, |
|
"learning_rate": 6.420454545454546e-06, |
|
"loss": 1.031, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.35984848484848486, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 6.401515151515152e-06, |
|
"loss": 1.09, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.36174242424242425, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 6.3825757575757585e-06, |
|
"loss": 1.096, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 1.037, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.36553030303030304, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 6.344696969696971e-06, |
|
"loss": 1.0587, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.36742424242424243, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 6.3257575757575765e-06, |
|
"loss": 1.138, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3693181818181818, |
|
"grad_norm": 0.265625, |
|
"learning_rate": 6.306818181818183e-06, |
|
"loss": 1.0685, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3712121212121212, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 6.287878787878788e-06, |
|
"loss": 1.0301, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3731060606060606, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 6.2689393939393946e-06, |
|
"loss": 1.0451, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.0362, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3768939393939394, |
|
"grad_norm": 0.201171875, |
|
"learning_rate": 6.231060606060606e-06, |
|
"loss": 1.0863, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.3787878787878788, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 6.212121212121213e-06, |
|
"loss": 1.1231, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3806818181818182, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 6.193181818181818e-06, |
|
"loss": 1.0892, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.38257575757575757, |
|
"grad_norm": 0.2021484375, |
|
"learning_rate": 6.174242424242425e-06, |
|
"loss": 1.1051, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.38446969696969696, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 6.155303030303031e-06, |
|
"loss": 1.0526, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.38636363636363635, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 6.136363636363637e-06, |
|
"loss": 1.0809, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.38825757575757575, |
|
"grad_norm": 0.1884765625, |
|
"learning_rate": 6.117424242424242e-06, |
|
"loss": 1.0893, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.39015151515151514, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 6.0984848484848495e-06, |
|
"loss": 1.0341, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.39204545454545453, |
|
"grad_norm": 0.1826171875, |
|
"learning_rate": 6.079545454545454e-06, |
|
"loss": 1.0463, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3939393939393939, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 1.039, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3958333333333333, |
|
"grad_norm": 0.2138671875, |
|
"learning_rate": 6.041666666666667e-06, |
|
"loss": 1.1095, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3977272727272727, |
|
"grad_norm": 0.2412109375, |
|
"learning_rate": 6.022727272727273e-06, |
|
"loss": 1.0593, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3996212121212121, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 6.003787878787879e-06, |
|
"loss": 1.1322, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.4015151515151515, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 5.984848484848486e-06, |
|
"loss": 1.1224, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.4034090909090909, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 5.965909090909091e-06, |
|
"loss": 1.059, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.4053030303030303, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 5.946969696969698e-06, |
|
"loss": 1.1349, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.4071969696969697, |
|
"grad_norm": 0.17578125, |
|
"learning_rate": 5.928030303030304e-06, |
|
"loss": 1.0584, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.4090909090909091, |
|
"grad_norm": 0.1748046875, |
|
"learning_rate": 5.90909090909091e-06, |
|
"loss": 1.0697, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.4109848484848485, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 5.890151515151515e-06, |
|
"loss": 1.0758, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.4128787878787879, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 5.871212121212122e-06, |
|
"loss": 1.038, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.4147727272727273, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 5.852272727272727e-06, |
|
"loss": 1.0454, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 1.0517, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4185606060606061, |
|
"grad_norm": 0.1806640625, |
|
"learning_rate": 5.81439393939394e-06, |
|
"loss": 1.0624, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.42045454545454547, |
|
"grad_norm": 0.23046875, |
|
"learning_rate": 5.795454545454546e-06, |
|
"loss": 1.0628, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.42234848484848486, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 5.776515151515152e-06, |
|
"loss": 1.0505, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.42424242424242425, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 5.7575757575757586e-06, |
|
"loss": 1.0563, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.42613636363636365, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 5.738636363636364e-06, |
|
"loss": 1.0539, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.42803030303030304, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 5.719696969696971e-06, |
|
"loss": 1.0244, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.42992424242424243, |
|
"grad_norm": 0.20703125, |
|
"learning_rate": 5.700757575757576e-06, |
|
"loss": 1.055, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.4318181818181818, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 5.681818181818183e-06, |
|
"loss": 1.077, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.4337121212121212, |
|
"grad_norm": 0.173828125, |
|
"learning_rate": 5.662878787878788e-06, |
|
"loss": 1.0469, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.4356060606060606, |
|
"grad_norm": 0.1787109375, |
|
"learning_rate": 5.643939393939395e-06, |
|
"loss": 1.0753, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4375, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 5.625e-06, |
|
"loss": 1.1051, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.4393939393939394, |
|
"grad_norm": 0.1884765625, |
|
"learning_rate": 5.606060606060606e-06, |
|
"loss": 1.0168, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4412878787878788, |
|
"grad_norm": 0.2119140625, |
|
"learning_rate": 5.587121212121213e-06, |
|
"loss": 1.0703, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.4431818181818182, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 5.568181818181818e-06, |
|
"loss": 1.0704, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.44507575757575757, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 5.549242424242425e-06, |
|
"loss": 1.024, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.44696969696969696, |
|
"grad_norm": 0.2109375, |
|
"learning_rate": 5.530303030303031e-06, |
|
"loss": 1.0036, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.44886363636363635, |
|
"grad_norm": 0.2294921875, |
|
"learning_rate": 5.511363636363637e-06, |
|
"loss": 1.0425, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.45075757575757575, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 5.492424242424242e-06, |
|
"loss": 1.0471, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.45265151515151514, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 5.473484848484849e-06, |
|
"loss": 1.0404, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 1.0796, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4564393939393939, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 5.435606060606061e-06, |
|
"loss": 1.0376, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4583333333333333, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 5.416666666666667e-06, |
|
"loss": 1.0419, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.4602272727272727, |
|
"grad_norm": 0.2001953125, |
|
"learning_rate": 5.397727272727273e-06, |
|
"loss": 1.0938, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.4621212121212121, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 5.378787878787879e-06, |
|
"loss": 1.068, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.4640151515151515, |
|
"grad_norm": 0.2353515625, |
|
"learning_rate": 5.359848484848486e-06, |
|
"loss": 1.0211, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4659090909090909, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 5.340909090909091e-06, |
|
"loss": 1.0627, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.4678030303030303, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 5.321969696969698e-06, |
|
"loss": 1.0802, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4696969696969697, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 5.303030303030303e-06, |
|
"loss": 1.0905, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.4715909090909091, |
|
"grad_norm": 0.1875, |
|
"learning_rate": 5.28409090909091e-06, |
|
"loss": 1.0209, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.4734848484848485, |
|
"grad_norm": 0.267578125, |
|
"learning_rate": 5.265151515151515e-06, |
|
"loss": 1.0466, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4753787878787879, |
|
"grad_norm": 0.216796875, |
|
"learning_rate": 5.246212121212122e-06, |
|
"loss": 1.106, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4772727272727273, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 5.2272727272727274e-06, |
|
"loss": 1.105, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.4791666666666667, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 5.208333333333334e-06, |
|
"loss": 1.0542, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.4810606060606061, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 5.18939393939394e-06, |
|
"loss": 1.0407, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.48295454545454547, |
|
"grad_norm": 0.21875, |
|
"learning_rate": 5.170454545454546e-06, |
|
"loss": 1.0475, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 0.21484375, |
|
"learning_rate": 5.151515151515152e-06, |
|
"loss": 1.0253, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.48674242424242425, |
|
"grad_norm": 0.1884765625, |
|
"learning_rate": 5.132575757575759e-06, |
|
"loss": 1.0138, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.48863636363636365, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 5.113636363636364e-06, |
|
"loss": 1.0474, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.49053030303030304, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 5.094696969696971e-06, |
|
"loss": 1.1214, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.49242424242424243, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 5.075757575757576e-06, |
|
"loss": 1.0277, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4943181818181818, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 5.056818181818182e-06, |
|
"loss": 1.065, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.4962121212121212, |
|
"grad_norm": 0.2255859375, |
|
"learning_rate": 5.037878787878788e-06, |
|
"loss": 1.0555, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.4981060606060606, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 5.018939393939395e-06, |
|
"loss": 1.0351, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 5e-06, |
|
"loss": 1.0455, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5018939393939394, |
|
"grad_norm": 0.1875, |
|
"learning_rate": 4.981060606060606e-06, |
|
"loss": 1.0422, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5037878787878788, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 4.962121212121213e-06, |
|
"loss": 1.0157, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5056818181818182, |
|
"grad_norm": 0.2236328125, |
|
"learning_rate": 4.9431818181818184e-06, |
|
"loss": 1.0829, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.5075757575757576, |
|
"grad_norm": 0.2158203125, |
|
"learning_rate": 4.924242424242425e-06, |
|
"loss": 1.0898, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.509469696969697, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 4.905303030303031e-06, |
|
"loss": 1.0327, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5113636363636364, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 4.8863636363636365e-06, |
|
"loss": 1.0568, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5132575757575758, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 4.867424242424243e-06, |
|
"loss": 1.0147, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.5151515151515151, |
|
"grad_norm": 0.2041015625, |
|
"learning_rate": 4.848484848484849e-06, |
|
"loss": 1.0506, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.5170454545454546, |
|
"grad_norm": 0.265625, |
|
"learning_rate": 4.829545454545455e-06, |
|
"loss": 1.0745, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.5189393939393939, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 4.810606060606061e-06, |
|
"loss": 1.049, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.5208333333333334, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 4.791666666666668e-06, |
|
"loss": 1.0643, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5227272727272727, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 4.772727272727273e-06, |
|
"loss": 1.0551, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.5246212121212122, |
|
"grad_norm": 0.2158203125, |
|
"learning_rate": 4.753787878787879e-06, |
|
"loss": 1.0604, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5265151515151515, |
|
"grad_norm": 0.1884765625, |
|
"learning_rate": 4.734848484848486e-06, |
|
"loss": 1.0536, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5284090909090909, |
|
"grad_norm": 0.1884765625, |
|
"learning_rate": 4.715909090909091e-06, |
|
"loss": 1.0339, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5303030303030303, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 4.696969696969698e-06, |
|
"loss": 0.996, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5321969696969697, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 4.678030303030303e-06, |
|
"loss": 1.0682, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5340909090909091, |
|
"grad_norm": 0.2080078125, |
|
"learning_rate": 4.6590909090909095e-06, |
|
"loss": 1.0002, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5359848484848485, |
|
"grad_norm": 0.208984375, |
|
"learning_rate": 4.640151515151515e-06, |
|
"loss": 1.043, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5378787878787878, |
|
"grad_norm": 0.20703125, |
|
"learning_rate": 4.621212121212122e-06, |
|
"loss": 1.0011, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5397727272727273, |
|
"grad_norm": 0.26171875, |
|
"learning_rate": 4.6022727272727275e-06, |
|
"loss": 0.9569, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5416666666666666, |
|
"grad_norm": 0.220703125, |
|
"learning_rate": 4.583333333333333e-06, |
|
"loss": 1.0664, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5435606060606061, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 4.56439393939394e-06, |
|
"loss": 1.0537, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 0.248046875, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 1.004, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5473484848484849, |
|
"grad_norm": 0.205078125, |
|
"learning_rate": 4.526515151515152e-06, |
|
"loss": 1.0854, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5492424242424242, |
|
"grad_norm": 0.2158203125, |
|
"learning_rate": 4.507575757575758e-06, |
|
"loss": 1.0354, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5511363636363636, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 4.4886363636363636e-06, |
|
"loss": 1.0331, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.553030303030303, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 4.46969696969697e-06, |
|
"loss": 1.0081, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5549242424242424, |
|
"grad_norm": 0.228515625, |
|
"learning_rate": 4.450757575757576e-06, |
|
"loss": 1.0208, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5568181818181818, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 4.4318181818181824e-06, |
|
"loss": 1.0202, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5587121212121212, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 4.412878787878788e-06, |
|
"loss": 1.0211, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5606060606060606, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 4.393939393939394e-06, |
|
"loss": 1.0362, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5625, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 1.0356, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5643939393939394, |
|
"grad_norm": 0.21484375, |
|
"learning_rate": 4.356060606060606e-06, |
|
"loss": 1.0634, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5662878787878788, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 4.337121212121213e-06, |
|
"loss": 1.0213, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5681818181818182, |
|
"grad_norm": 0.1875, |
|
"learning_rate": 4.3181818181818185e-06, |
|
"loss": 1.0089, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5700757575757576, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 4.299242424242425e-06, |
|
"loss": 1.031, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.571969696969697, |
|
"grad_norm": 0.2001953125, |
|
"learning_rate": 4.280303030303031e-06, |
|
"loss": 1.1084, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5738636363636364, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 4.2613636363636365e-06, |
|
"loss": 1.0512, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5757575757575758, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 4.242424242424243e-06, |
|
"loss": 1.0471, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5776515151515151, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 4.223484848484849e-06, |
|
"loss": 0.985, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5795454545454546, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 4.204545454545455e-06, |
|
"loss": 1.0386, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5814393939393939, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 4.185606060606061e-06, |
|
"loss": 0.9627, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5833333333333334, |
|
"grad_norm": 0.203125, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 1.0047, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5852272727272727, |
|
"grad_norm": 0.1875, |
|
"learning_rate": 4.1477272727272734e-06, |
|
"loss": 1.0111, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5871212121212122, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 4.128787878787879e-06, |
|
"loss": 1.0222, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5890151515151515, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 4.109848484848486e-06, |
|
"loss": 1.0487, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5909090909090909, |
|
"grad_norm": 0.1884765625, |
|
"learning_rate": 4.0909090909090915e-06, |
|
"loss": 1.0101, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5928030303030303, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 4.071969696969697e-06, |
|
"loss": 1.0388, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5946969696969697, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 4.053030303030303e-06, |
|
"loss": 1.0022, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5965909090909091, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 4.0340909090909095e-06, |
|
"loss": 1.0172, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5984848484848485, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 4.015151515151515e-06, |
|
"loss": 1.0265, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.6003787878787878, |
|
"grad_norm": 0.1875, |
|
"learning_rate": 3.996212121212121e-06, |
|
"loss": 1.0261, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.6022727272727273, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 3.9772727272727275e-06, |
|
"loss": 1.0162, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.6041666666666666, |
|
"grad_norm": 0.24609375, |
|
"learning_rate": 3.958333333333333e-06, |
|
"loss": 1.0726, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 3.93939393939394e-06, |
|
"loss": 1.0004, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6079545454545454, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 3.9204545454545456e-06, |
|
"loss": 1.0371, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.6098484848484849, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 3.901515151515151e-06, |
|
"loss": 1.0491, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.6117424242424242, |
|
"grad_norm": 0.220703125, |
|
"learning_rate": 3.882575757575758e-06, |
|
"loss": 1.0305, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.6136363636363636, |
|
"grad_norm": 0.236328125, |
|
"learning_rate": 3.863636363636364e-06, |
|
"loss": 1.0276, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.615530303030303, |
|
"grad_norm": 0.2392578125, |
|
"learning_rate": 3.84469696969697e-06, |
|
"loss": 1.0749, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.6174242424242424, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 3.825757575757576e-06, |
|
"loss": 1.0202, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.6193181818181818, |
|
"grad_norm": 0.228515625, |
|
"learning_rate": 3.806818181818182e-06, |
|
"loss": 1.0087, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.6212121212121212, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 3.7878787878787882e-06, |
|
"loss": 1.0445, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.6231060606060606, |
|
"grad_norm": 0.224609375, |
|
"learning_rate": 3.7689393939393944e-06, |
|
"loss": 1.0177, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.9965, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6268939393939394, |
|
"grad_norm": 0.205078125, |
|
"learning_rate": 3.7310606060606062e-06, |
|
"loss": 1.069, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.6287878787878788, |
|
"grad_norm": 0.2138671875, |
|
"learning_rate": 3.7121212121212124e-06, |
|
"loss": 1.0465, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6306818181818182, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 3.6931818181818186e-06, |
|
"loss": 1.05, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6325757575757576, |
|
"grad_norm": 0.2080078125, |
|
"learning_rate": 3.6742424242424247e-06, |
|
"loss": 1.034, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.634469696969697, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 3.655303030303031e-06, |
|
"loss": 1.0072, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 0.2001953125, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 1.0752, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6382575757575758, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 3.6174242424242427e-06, |
|
"loss": 1.0422, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6401515151515151, |
|
"grad_norm": 0.2421875, |
|
"learning_rate": 3.598484848484849e-06, |
|
"loss": 0.9971, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6420454545454546, |
|
"grad_norm": 0.212890625, |
|
"learning_rate": 3.579545454545455e-06, |
|
"loss": 0.971, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6439393939393939, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 3.560606060606061e-06, |
|
"loss": 1.025, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6458333333333334, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 3.5416666666666673e-06, |
|
"loss": 1.024, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6477272727272727, |
|
"grad_norm": 0.248046875, |
|
"learning_rate": 3.522727272727273e-06, |
|
"loss": 1.0118, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6496212121212122, |
|
"grad_norm": 0.2158203125, |
|
"learning_rate": 3.5037878787878792e-06, |
|
"loss": 0.9955, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6515151515151515, |
|
"grad_norm": 0.2001953125, |
|
"learning_rate": 3.4848484848484854e-06, |
|
"loss": 1.0528, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.6534090909090909, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 3.4659090909090915e-06, |
|
"loss": 1.0043, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6553030303030303, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 3.4469696969696977e-06, |
|
"loss": 1.0287, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6571969696969697, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 3.428030303030303e-06, |
|
"loss": 1.0232, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6590909090909091, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 3.409090909090909e-06, |
|
"loss": 1.0388, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6609848484848485, |
|
"grad_norm": 0.2099609375, |
|
"learning_rate": 3.3901515151515153e-06, |
|
"loss": 1.0505, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6628787878787878, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 3.3712121212121214e-06, |
|
"loss": 1.0098, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6647727272727273, |
|
"grad_norm": 0.1875, |
|
"learning_rate": 3.352272727272727e-06, |
|
"loss": 1.024, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.2041015625, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.059, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6685606060606061, |
|
"grad_norm": 0.2216796875, |
|
"learning_rate": 3.3143939393939395e-06, |
|
"loss": 1.0339, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6704545454545454, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 3.2954545454545456e-06, |
|
"loss": 1.021, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6723484848484849, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 3.2765151515151518e-06, |
|
"loss": 1.0581, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6742424242424242, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 3.257575757575758e-06, |
|
"loss": 1.0264, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6761363636363636, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 3.2386363636363637e-06, |
|
"loss": 1.0273, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.678030303030303, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 3.21969696969697e-06, |
|
"loss": 0.9971, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6799242424242424, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 3.200757575757576e-06, |
|
"loss": 1.0068, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6818181818181818, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 3.181818181818182e-06, |
|
"loss": 1.009, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6837121212121212, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 3.1628787878787883e-06, |
|
"loss": 1.0302, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6856060606060606, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 3.143939393939394e-06, |
|
"loss": 1.0213, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6875, |
|
"grad_norm": 0.2021484375, |
|
"learning_rate": 3.125e-06, |
|
"loss": 1.0763, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6893939393939394, |
|
"grad_norm": 0.220703125, |
|
"learning_rate": 3.1060606060606063e-06, |
|
"loss": 1.0518, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6912878787878788, |
|
"grad_norm": 0.18359375, |
|
"learning_rate": 3.0871212121212125e-06, |
|
"loss": 1.0182, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6931818181818182, |
|
"grad_norm": 0.2001953125, |
|
"learning_rate": 3.0681818181818186e-06, |
|
"loss": 1.01, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6950757575757576, |
|
"grad_norm": 0.205078125, |
|
"learning_rate": 3.0492424242424248e-06, |
|
"loss": 1.0204, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.696969696969697, |
|
"grad_norm": 0.1875, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 0.9715, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6988636363636364, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 3.0113636363636366e-06, |
|
"loss": 1.0144, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.7007575757575758, |
|
"grad_norm": 0.2080078125, |
|
"learning_rate": 2.992424242424243e-06, |
|
"loss": 1.0633, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7026515151515151, |
|
"grad_norm": 0.2099609375, |
|
"learning_rate": 2.973484848484849e-06, |
|
"loss": 1.0299, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.7045454545454546, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 2.954545454545455e-06, |
|
"loss": 1.0252, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.7064393939393939, |
|
"grad_norm": 0.220703125, |
|
"learning_rate": 2.935606060606061e-06, |
|
"loss": 0.9969, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.7083333333333334, |
|
"grad_norm": 0.21484375, |
|
"learning_rate": 2.916666666666667e-06, |
|
"loss": 1.1488, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.7102272727272727, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 2.897727272727273e-06, |
|
"loss": 0.9887, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.7121212121212122, |
|
"grad_norm": 0.201171875, |
|
"learning_rate": 2.8787878787878793e-06, |
|
"loss": 1.0506, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.7140151515151515, |
|
"grad_norm": 0.1875, |
|
"learning_rate": 2.8598484848484854e-06, |
|
"loss": 1.0164, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.7159090909090909, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 2.8409090909090916e-06, |
|
"loss": 1.0289, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.7178030303030303, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 2.8219696969696973e-06, |
|
"loss": 1.0498, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.7196969696969697, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 2.803030303030303e-06, |
|
"loss": 1.0029, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7215909090909091, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 2.784090909090909e-06, |
|
"loss": 1.0073, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.7234848484848485, |
|
"grad_norm": 0.2041015625, |
|
"learning_rate": 2.7651515151515153e-06, |
|
"loss": 0.965, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.7253787878787878, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 2.746212121212121e-06, |
|
"loss": 1.0399, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 1.0859, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.7291666666666666, |
|
"grad_norm": 0.2099609375, |
|
"learning_rate": 2.7083333333333334e-06, |
|
"loss": 1.0315, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.7310606060606061, |
|
"grad_norm": 0.201171875, |
|
"learning_rate": 2.6893939393939395e-06, |
|
"loss": 1.0363, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.7329545454545454, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 2.6704545454545457e-06, |
|
"loss": 1.0575, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7348484848484849, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 2.6515151515151514e-06, |
|
"loss": 0.9732, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7367424242424242, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 2.6325757575757576e-06, |
|
"loss": 1.0255, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7386363636363636, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 2.6136363636363637e-06, |
|
"loss": 1.0481, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.740530303030303, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 2.59469696969697e-06, |
|
"loss": 1.0179, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7424242424242424, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 2.575757575757576e-06, |
|
"loss": 1.0121, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.7443181818181818, |
|
"grad_norm": 0.2470703125, |
|
"learning_rate": 2.556818181818182e-06, |
|
"loss": 1.0497, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7462121212121212, |
|
"grad_norm": 0.2021484375, |
|
"learning_rate": 2.537878787878788e-06, |
|
"loss": 1.0452, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7481060606060606, |
|
"grad_norm": 0.2333984375, |
|
"learning_rate": 2.518939393939394e-06, |
|
"loss": 0.9853, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.0029, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7518939393939394, |
|
"grad_norm": 0.23828125, |
|
"learning_rate": 2.4810606060606064e-06, |
|
"loss": 1.0078, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7537878787878788, |
|
"grad_norm": 0.216796875, |
|
"learning_rate": 2.4621212121212125e-06, |
|
"loss": 1.0086, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7556818181818182, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 2.4431818181818182e-06, |
|
"loss": 1.0296, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7575757575757576, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 2.4242424242424244e-06, |
|
"loss": 1.0285, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.759469696969697, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 2.4053030303030305e-06, |
|
"loss": 1.0278, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7613636363636364, |
|
"grad_norm": 0.2021484375, |
|
"learning_rate": 2.3863636363636367e-06, |
|
"loss": 1.003, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7632575757575758, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 2.367424242424243e-06, |
|
"loss": 1.0172, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7651515151515151, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 2.348484848484849e-06, |
|
"loss": 1.0342, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7670454545454546, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 2.3295454545454547e-06, |
|
"loss": 1.0667, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7689393939393939, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 2.310606060606061e-06, |
|
"loss": 0.9957, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7708333333333334, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 2.2916666666666666e-06, |
|
"loss": 1.0137, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7727272727272727, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 2.2727272727272728e-06, |
|
"loss": 1.0105, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7746212121212122, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 2.253787878787879e-06, |
|
"loss": 1.0261, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.7765151515151515, |
|
"grad_norm": 0.22265625, |
|
"learning_rate": 2.234848484848485e-06, |
|
"loss": 1.0223, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7784090909090909, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 2.2159090909090912e-06, |
|
"loss": 1.0132, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7803030303030303, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 2.196969696969697e-06, |
|
"loss": 1.0187, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7821969696969697, |
|
"grad_norm": 0.2451171875, |
|
"learning_rate": 2.178030303030303e-06, |
|
"loss": 1.0506, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7840909090909091, |
|
"grad_norm": 0.255859375, |
|
"learning_rate": 2.1590909090909092e-06, |
|
"loss": 1.0259, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7859848484848485, |
|
"grad_norm": 0.2080078125, |
|
"learning_rate": 2.1401515151515154e-06, |
|
"loss": 1.0628, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7878787878787878, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 2.1212121212121216e-06, |
|
"loss": 1.0166, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7897727272727273, |
|
"grad_norm": 0.2353515625, |
|
"learning_rate": 2.1022727272727277e-06, |
|
"loss": 1.0129, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7916666666666666, |
|
"grad_norm": 0.2109375, |
|
"learning_rate": 2.0833333333333334e-06, |
|
"loss": 1.0543, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7935606060606061, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 2.0643939393939396e-06, |
|
"loss": 0.9744, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7954545454545454, |
|
"grad_norm": 0.2109375, |
|
"learning_rate": 2.0454545454545457e-06, |
|
"loss": 1.0075, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7973484848484849, |
|
"grad_norm": 0.205078125, |
|
"learning_rate": 2.0265151515151515e-06, |
|
"loss": 1.0368, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7992424242424242, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 2.0075757575757576e-06, |
|
"loss": 1.0257, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.8011363636363636, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 1.9886363636363638e-06, |
|
"loss": 1.0123, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.803030303030303, |
|
"grad_norm": 0.26171875, |
|
"learning_rate": 1.96969696969697e-06, |
|
"loss": 1.0215, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.8049242424242424, |
|
"grad_norm": 0.2109375, |
|
"learning_rate": 1.9507575757575757e-06, |
|
"loss": 1.0081, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.8068181818181818, |
|
"grad_norm": 0.201171875, |
|
"learning_rate": 1.931818181818182e-06, |
|
"loss": 1.0203, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.8087121212121212, |
|
"grad_norm": 0.251953125, |
|
"learning_rate": 1.912878787878788e-06, |
|
"loss": 0.9781, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.8106060606060606, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 1.8939393939393941e-06, |
|
"loss": 1.0437, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.8125, |
|
"grad_norm": 0.220703125, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 1.0094, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.8143939393939394, |
|
"grad_norm": 0.25, |
|
"learning_rate": 1.8560606060606062e-06, |
|
"loss": 1.0487, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8162878787878788, |
|
"grad_norm": 0.224609375, |
|
"learning_rate": 1.8371212121212124e-06, |
|
"loss": 1.0197, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 0.201171875, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 0.9761, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.8200757575757576, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 1.7992424242424244e-06, |
|
"loss": 1.0172, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.821969696969697, |
|
"grad_norm": 0.2138671875, |
|
"learning_rate": 1.7803030303030306e-06, |
|
"loss": 1.0417, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.8238636363636364, |
|
"grad_norm": 0.2216796875, |
|
"learning_rate": 1.7613636363636365e-06, |
|
"loss": 1.055, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.8257575757575758, |
|
"grad_norm": 0.1875, |
|
"learning_rate": 1.7424242424242427e-06, |
|
"loss": 1.0217, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.8276515151515151, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 1.7234848484848488e-06, |
|
"loss": 1.0212, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.8295454545454546, |
|
"grad_norm": 0.2177734375, |
|
"learning_rate": 1.7045454545454546e-06, |
|
"loss": 1.0046, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.8314393939393939, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 1.6856060606060607e-06, |
|
"loss": 1.0271, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.2060546875, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.9767, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8352272727272727, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 1.6477272727272728e-06, |
|
"loss": 1.0363, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.8371212121212122, |
|
"grad_norm": 0.2373046875, |
|
"learning_rate": 1.628787878787879e-06, |
|
"loss": 1.0145, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.8390151515151515, |
|
"grad_norm": 0.203125, |
|
"learning_rate": 1.609848484848485e-06, |
|
"loss": 1.0217, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.8409090909090909, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 1.590909090909091e-06, |
|
"loss": 0.9888, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.8428030303030303, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 1.571969696969697e-06, |
|
"loss": 1.0196, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8446969696969697, |
|
"grad_norm": 0.2177734375, |
|
"learning_rate": 1.5530303030303032e-06, |
|
"loss": 1.0016, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.8465909090909091, |
|
"grad_norm": 0.23828125, |
|
"learning_rate": 1.5340909090909093e-06, |
|
"loss": 1.0335, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 1.5151515151515152e-06, |
|
"loss": 1.0176, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.8503787878787878, |
|
"grad_norm": 0.212890625, |
|
"learning_rate": 1.4962121212121214e-06, |
|
"loss": 1.0166, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8522727272727273, |
|
"grad_norm": 0.2236328125, |
|
"learning_rate": 1.4772727272727275e-06, |
|
"loss": 1.0212, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8541666666666666, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 1.4583333333333335e-06, |
|
"loss": 1.029, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8560606060606061, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 1.4393939393939396e-06, |
|
"loss": 0.9911, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8579545454545454, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 1.4204545454545458e-06, |
|
"loss": 1.026, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8598484848484849, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 1.4015151515151515e-06, |
|
"loss": 0.9957, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8617424242424242, |
|
"grad_norm": 0.23046875, |
|
"learning_rate": 1.3825757575757577e-06, |
|
"loss": 1.0497, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8636363636363636, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 1.3636363636363636e-06, |
|
"loss": 1.0472, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.865530303030303, |
|
"grad_norm": 0.255859375, |
|
"learning_rate": 1.3446969696969698e-06, |
|
"loss": 1.0525, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.8674242424242424, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 1.3257575757575757e-06, |
|
"loss": 1.0084, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8693181818181818, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 1.3068181818181819e-06, |
|
"loss": 1.008, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8712121212121212, |
|
"grad_norm": 0.2099609375, |
|
"learning_rate": 1.287878787878788e-06, |
|
"loss": 0.9981, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8731060606060606, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 1.268939393939394e-06, |
|
"loss": 1.0033, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.0505, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8768939393939394, |
|
"grad_norm": 0.2041015625, |
|
"learning_rate": 1.2310606060606063e-06, |
|
"loss": 1.0223, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8787878787878788, |
|
"grad_norm": 0.2099609375, |
|
"learning_rate": 1.2121212121212122e-06, |
|
"loss": 0.9805, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8806818181818182, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 1.1931818181818183e-06, |
|
"loss": 1.0563, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8825757575757576, |
|
"grad_norm": 0.2158203125, |
|
"learning_rate": 1.1742424242424245e-06, |
|
"loss": 1.0264, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.884469696969697, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 1.1553030303030304e-06, |
|
"loss": 1.0075, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8863636363636364, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 1.1363636363636364e-06, |
|
"loss": 1.0187, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.8882575757575758, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 1.1174242424242425e-06, |
|
"loss": 1.0015, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8901515151515151, |
|
"grad_norm": 0.2294921875, |
|
"learning_rate": 1.0984848484848485e-06, |
|
"loss": 1.0642, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8920454545454546, |
|
"grad_norm": 0.248046875, |
|
"learning_rate": 1.0795454545454546e-06, |
|
"loss": 1.0108, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8939393939393939, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 1.0606060606060608e-06, |
|
"loss": 1.0096, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8958333333333334, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 1.0416666666666667e-06, |
|
"loss": 1.0537, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8977272727272727, |
|
"grad_norm": 0.2158203125, |
|
"learning_rate": 1.0227272727272729e-06, |
|
"loss": 1.0312, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8996212121212122, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 1.0037878787878788e-06, |
|
"loss": 1.0218, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.9015151515151515, |
|
"grad_norm": 0.216796875, |
|
"learning_rate": 9.84848484848485e-07, |
|
"loss": 1.06, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.9034090909090909, |
|
"grad_norm": 0.2255859375, |
|
"learning_rate": 9.65909090909091e-07, |
|
"loss": 0.9917, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.9053030303030303, |
|
"grad_norm": 0.2021484375, |
|
"learning_rate": 9.469696969696971e-07, |
|
"loss": 1.0538, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.9071969696969697, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 9.280303030303031e-07, |
|
"loss": 1.0648, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 1.0167, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9109848484848485, |
|
"grad_norm": 0.2138671875, |
|
"learning_rate": 8.901515151515153e-07, |
|
"loss": 1.0157, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.9128787878787878, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 8.712121212121213e-07, |
|
"loss": 0.983, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.9147727272727273, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 8.522727272727273e-07, |
|
"loss": 0.9711, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.9166666666666666, |
|
"grad_norm": 0.205078125, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 1.0326, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.9185606060606061, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 8.143939393939395e-07, |
|
"loss": 1.0072, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.9204545454545454, |
|
"grad_norm": 0.1953125, |
|
"learning_rate": 7.954545454545455e-07, |
|
"loss": 0.9935, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.9223484848484849, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 7.765151515151516e-07, |
|
"loss": 0.972, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.9242424242424242, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 7.575757575757576e-07, |
|
"loss": 0.9696, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.9261363636363636, |
|
"grad_norm": 0.203125, |
|
"learning_rate": 7.386363636363638e-07, |
|
"loss": 1.0179, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.928030303030303, |
|
"grad_norm": 0.21875, |
|
"learning_rate": 7.196969696969698e-07, |
|
"loss": 1.0141, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9299242424242424, |
|
"grad_norm": 0.2041015625, |
|
"learning_rate": 7.007575757575758e-07, |
|
"loss": 0.9915, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.9318181818181818, |
|
"grad_norm": 0.1865234375, |
|
"learning_rate": 6.818181818181818e-07, |
|
"loss": 0.9909, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.9337121212121212, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 6.628787878787879e-07, |
|
"loss": 1.0208, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.9356060606060606, |
|
"grad_norm": 0.203125, |
|
"learning_rate": 6.43939393939394e-07, |
|
"loss": 1.043, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 6.25e-07, |
|
"loss": 0.9941, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.9393939393939394, |
|
"grad_norm": 0.208984375, |
|
"learning_rate": 6.060606060606061e-07, |
|
"loss": 1.033, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.9412878787878788, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 5.871212121212122e-07, |
|
"loss": 0.9724, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.9431818181818182, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 5.681818181818182e-07, |
|
"loss": 1.0303, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.9450757575757576, |
|
"grad_norm": 0.20703125, |
|
"learning_rate": 5.492424242424242e-07, |
|
"loss": 1.0489, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.946969696969697, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 5.303030303030304e-07, |
|
"loss": 1.0209, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9488636363636364, |
|
"grad_norm": 0.20703125, |
|
"learning_rate": 5.113636363636364e-07, |
|
"loss": 1.0542, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.9507575757575758, |
|
"grad_norm": 0.2109375, |
|
"learning_rate": 4.924242424242425e-07, |
|
"loss": 1.0406, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.9526515151515151, |
|
"grad_norm": 0.19140625, |
|
"learning_rate": 4.7348484848484853e-07, |
|
"loss": 0.991, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.9545454545454546, |
|
"grad_norm": 0.2470703125, |
|
"learning_rate": 4.5454545454545457e-07, |
|
"loss": 1.045, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.9564393939393939, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 4.3560606060606067e-07, |
|
"loss": 1.0152, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9583333333333334, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 4.1666666666666667e-07, |
|
"loss": 1.0243, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9602272727272727, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 3.9772727272727276e-07, |
|
"loss": 1.0035, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9621212121212122, |
|
"grad_norm": 0.1875, |
|
"learning_rate": 3.787878787878788e-07, |
|
"loss": 0.991, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.9640151515151515, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 3.598484848484849e-07, |
|
"loss": 1.0024, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9659090909090909, |
|
"grad_norm": 0.2470703125, |
|
"learning_rate": 3.409090909090909e-07, |
|
"loss": 1.0288, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9678030303030303, |
|
"grad_norm": 0.2060546875, |
|
"learning_rate": 3.21969696969697e-07, |
|
"loss": 1.0204, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 3.0303030303030305e-07, |
|
"loss": 1.0213, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.9715909090909091, |
|
"grad_norm": 0.25, |
|
"learning_rate": 2.840909090909091e-07, |
|
"loss": 1.0228, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.9734848484848485, |
|
"grad_norm": 0.263671875, |
|
"learning_rate": 2.651515151515152e-07, |
|
"loss": 1.0101, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.9753787878787878, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 2.4621212121212124e-07, |
|
"loss": 1.068, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9772727272727273, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 2.2727272727272729e-07, |
|
"loss": 1.0313, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.9791666666666666, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 2.0833333333333333e-07, |
|
"loss": 0.9904, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.9810606060606061, |
|
"grad_norm": 0.2060546875, |
|
"learning_rate": 1.893939393939394e-07, |
|
"loss": 1.0292, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9829545454545454, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 1.7045454545454545e-07, |
|
"loss": 1.0392, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9848484848484849, |
|
"grad_norm": 0.2109375, |
|
"learning_rate": 1.5151515151515152e-07, |
|
"loss": 1.0461, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9867424242424242, |
|
"grad_norm": 0.197265625, |
|
"learning_rate": 1.325757575757576e-07, |
|
"loss": 1.0209, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.9886363636363636, |
|
"grad_norm": 0.20703125, |
|
"learning_rate": 1.1363636363636364e-07, |
|
"loss": 1.0252, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.990530303030303, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 9.46969696969697e-08, |
|
"loss": 0.9835, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9924242424242424, |
|
"grad_norm": 0.203125, |
|
"learning_rate": 7.575757575757576e-08, |
|
"loss": 1.0126, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.9943181818181818, |
|
"grad_norm": 0.1943359375, |
|
"learning_rate": 5.681818181818182e-08, |
|
"loss": 0.9871, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.9962121212121212, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 3.787878787878788e-08, |
|
"loss": 1.0243, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.9981060606060606, |
|
"grad_norm": 0.21484375, |
|
"learning_rate": 1.893939393939394e-08, |
|
"loss": 1.0246, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2431640625, |
|
"learning_rate": 0.0, |
|
"loss": 0.9495, |
|
"step": 528 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 528, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 0, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.695542908054143e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|