|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 40302, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.5227460711331677e-08, |
|
"loss": 3.3575, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.128205128205128e-08, |
|
"loss": 3.3023, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.775020678246484e-08, |
|
"loss": 2.9134, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0421836228287841e-07, |
|
"loss": 2.7766, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3068651778329198e-07, |
|
"loss": 2.6203, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.5715467328370554e-07, |
|
"loss": 2.221, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.836228287841191e-07, |
|
"loss": 1.6761, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.1009098428453268e-07, |
|
"loss": 1.3371, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.3655913978494625e-07, |
|
"loss": 1.102, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.6302729528535976e-07, |
|
"loss": 0.9863, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.8949545078577336e-07, |
|
"loss": 0.8123, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.159636062861869e-07, |
|
"loss": 0.6005, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.424317617866005e-07, |
|
"loss": 0.4275, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.6889991728701403e-07, |
|
"loss": 0.4868, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.9536807278742763e-07, |
|
"loss": 0.4454, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.2183622828784117e-07, |
|
"loss": 0.3431, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.4830438378825477e-07, |
|
"loss": 0.3373, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.743589743589743e-07, |
|
"loss": 0.351, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.00827129859388e-07, |
|
"loss": 0.3565, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.272952853598015e-07, |
|
"loss": 0.2179, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.537634408602149e-07, |
|
"loss": 0.3892, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.802315963606285e-07, |
|
"loss": 0.3388, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6.066997518610421e-07, |
|
"loss": 0.4345, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6.331679073614557e-07, |
|
"loss": 0.1948, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.596360628618692e-07, |
|
"loss": 0.2907, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.861042183622828e-07, |
|
"loss": 0.3393, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 7.125723738626964e-07, |
|
"loss": 0.2568, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 7.3904052936311e-07, |
|
"loss": 0.3041, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.655086848635235e-07, |
|
"loss": 0.3316, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.919768403639371e-07, |
|
"loss": 0.3193, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.184449958643507e-07, |
|
"loss": 0.3393, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.449131513647643e-07, |
|
"loss": 0.2933, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.713813068651778e-07, |
|
"loss": 0.236, |
|
"step": 2112 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 8.978494623655913e-07, |
|
"loss": 0.305, |
|
"step": 2176 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.243176178660049e-07, |
|
"loss": 0.2158, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.507857733664185e-07, |
|
"loss": 0.3261, |
|
"step": 2304 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.77253928866832e-07, |
|
"loss": 0.2282, |
|
"step": 2368 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.997624326892619e-07, |
|
"loss": 0.3758, |
|
"step": 2432 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.980730651462358e-07, |
|
"loss": 0.2466, |
|
"step": 2496 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.963836976032098e-07, |
|
"loss": 0.2283, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.946943300601838e-07, |
|
"loss": 0.2605, |
|
"step": 2624 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.930049625171575e-07, |
|
"loss": 0.2818, |
|
"step": 2688 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.913155949741315e-07, |
|
"loss": 0.3004, |
|
"step": 2752 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.896262274311054e-07, |
|
"loss": 0.2703, |
|
"step": 2816 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.879368598880794e-07, |
|
"loss": 0.2924, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.862474923450532e-07, |
|
"loss": 0.3661, |
|
"step": 2944 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.845581248020271e-07, |
|
"loss": 0.3517, |
|
"step": 3008 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.82868757259001e-07, |
|
"loss": 0.2773, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.81179389715975e-07, |
|
"loss": 0.3023, |
|
"step": 3136 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.79490022172949e-07, |
|
"loss": 0.2831, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.77800654629923e-07, |
|
"loss": 0.2693, |
|
"step": 3264 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.76111287086897e-07, |
|
"loss": 0.2534, |
|
"step": 3328 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.744219195438707e-07, |
|
"loss": 0.183, |
|
"step": 3392 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.727325520008446e-07, |
|
"loss": 0.1786, |
|
"step": 3456 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.710431844578186e-07, |
|
"loss": 0.3985, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.693538169147924e-07, |
|
"loss": 0.2585, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.676644493717663e-07, |
|
"loss": 0.2088, |
|
"step": 3648 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.659750818287403e-07, |
|
"loss": 0.1913, |
|
"step": 3712 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.642857142857142e-07, |
|
"loss": 0.2747, |
|
"step": 3776 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.625963467426882e-07, |
|
"loss": 0.2283, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.609069791996622e-07, |
|
"loss": 0.305, |
|
"step": 3904 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.59217611656636e-07, |
|
"loss": 0.2648, |
|
"step": 3968 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.575282441136099e-07, |
|
"loss": 0.2277, |
|
"step": 4032 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.558388765705839e-07, |
|
"loss": 0.2445, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.541495090275578e-07, |
|
"loss": 0.3094, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.524601414845317e-07, |
|
"loss": 0.2558, |
|
"step": 4224 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.507707739415056e-07, |
|
"loss": 0.2467, |
|
"step": 4288 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.490814063984796e-07, |
|
"loss": 0.2412, |
|
"step": 4352 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.473920388554535e-07, |
|
"loss": 0.2564, |
|
"step": 4416 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.457026713124273e-07, |
|
"loss": 0.2373, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.440133037694013e-07, |
|
"loss": 0.34, |
|
"step": 4544 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.423239362263751e-07, |
|
"loss": 0.2398, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.406345686833491e-07, |
|
"loss": 0.2493, |
|
"step": 4672 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.389452011403231e-07, |
|
"loss": 0.2398, |
|
"step": 4736 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.37255833597297e-07, |
|
"loss": 0.2536, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.355664660542709e-07, |
|
"loss": 0.3538, |
|
"step": 4864 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.338770985112448e-07, |
|
"loss": 0.2481, |
|
"step": 4928 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.321877309682187e-07, |
|
"loss": 0.2152, |
|
"step": 4992 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.304983634251927e-07, |
|
"loss": 0.3056, |
|
"step": 5056 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.288089958821665e-07, |
|
"loss": 0.2473, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.271196283391405e-07, |
|
"loss": 0.234, |
|
"step": 5184 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.254302607961144e-07, |
|
"loss": 0.1578, |
|
"step": 5248 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.237672896209481e-07, |
|
"loss": 0.2566, |
|
"step": 5312 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.22077922077922e-07, |
|
"loss": 0.3794, |
|
"step": 5376 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.203885545348959e-07, |
|
"loss": 0.2453, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.186991869918699e-07, |
|
"loss": 0.2361, |
|
"step": 5504 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.170098194488438e-07, |
|
"loss": 0.1734, |
|
"step": 5568 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.153204519058178e-07, |
|
"loss": 0.1762, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.136310843627916e-07, |
|
"loss": 0.2933, |
|
"step": 5696 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.119417168197655e-07, |
|
"loss": 0.3152, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.102523492767395e-07, |
|
"loss": 0.1966, |
|
"step": 5824 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.085629817337133e-07, |
|
"loss": 0.298, |
|
"step": 5888 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.068736141906873e-07, |
|
"loss": 0.2437, |
|
"step": 5952 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.051842466476613e-07, |
|
"loss": 0.305, |
|
"step": 6016 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.034948791046352e-07, |
|
"loss": 0.1959, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.018055115616091e-07, |
|
"loss": 0.2465, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.00116144018583e-07, |
|
"loss": 0.2624, |
|
"step": 6208 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.984267764755569e-07, |
|
"loss": 0.2772, |
|
"step": 6272 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.967374089325309e-07, |
|
"loss": 0.2208, |
|
"step": 6336 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.950480413895047e-07, |
|
"loss": 0.2695, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.933586738464787e-07, |
|
"loss": 0.3235, |
|
"step": 6464 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.916693063034526e-07, |
|
"loss": 0.2802, |
|
"step": 6528 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.899799387604265e-07, |
|
"loss": 0.2383, |
|
"step": 6592 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.882905712174005e-07, |
|
"loss": 0.2238, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.866012036743744e-07, |
|
"loss": 0.2847, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.849118361313483e-07, |
|
"loss": 0.2713, |
|
"step": 6784 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.832224685883221e-07, |
|
"loss": 0.2156, |
|
"step": 6848 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.815331010452961e-07, |
|
"loss": 0.2832, |
|
"step": 6912 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.798437335022701e-07, |
|
"loss": 0.2675, |
|
"step": 6976 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.781543659592439e-07, |
|
"loss": 0.2506, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.764649984162179e-07, |
|
"loss": 0.2807, |
|
"step": 7104 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.747756308731918e-07, |
|
"loss": 0.2802, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.730862633301658e-07, |
|
"loss": 0.2125, |
|
"step": 7232 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.713968957871396e-07, |
|
"loss": 0.2739, |
|
"step": 7296 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.697075282441135e-07, |
|
"loss": 0.2341, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.680181607010875e-07, |
|
"loss": 0.2267, |
|
"step": 7424 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.663287931580614e-07, |
|
"loss": 0.3504, |
|
"step": 7488 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.646394256150353e-07, |
|
"loss": 0.2675, |
|
"step": 7552 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.629500580720093e-07, |
|
"loss": 0.1694, |
|
"step": 7616 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.612606905289832e-07, |
|
"loss": 0.2865, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.595713229859571e-07, |
|
"loss": 0.2737, |
|
"step": 7744 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.578819554429309e-07, |
|
"loss": 0.2201, |
|
"step": 7808 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 8.561925878999049e-07, |
|
"loss": 0.1701, |
|
"step": 7872 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 8.545032203568789e-07, |
|
"loss": 0.2566, |
|
"step": 7936 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 8.528138528138527e-07, |
|
"loss": 0.274, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 8.511244852708267e-07, |
|
"loss": 0.3413, |
|
"step": 8064 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 8.494351177278007e-07, |
|
"loss": 0.2035, |
|
"step": 8128 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 8.477721465526343e-07, |
|
"loss": 0.2612, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 8.460827790096083e-07, |
|
"loss": 0.2322, |
|
"step": 8256 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.443934114665822e-07, |
|
"loss": 0.1929, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.427040439235561e-07, |
|
"loss": 0.2212, |
|
"step": 8384 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.4101467638053e-07, |
|
"loss": 0.1912, |
|
"step": 8448 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.39325308837504e-07, |
|
"loss": 0.3325, |
|
"step": 8512 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.376359412944777e-07, |
|
"loss": 0.2341, |
|
"step": 8576 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.359465737514517e-07, |
|
"loss": 0.248, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 8.342572062084257e-07, |
|
"loss": 0.268, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 8.325678386653996e-07, |
|
"loss": 0.3279, |
|
"step": 8768 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 8.308784711223735e-07, |
|
"loss": 0.2175, |
|
"step": 8832 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 8.291891035793475e-07, |
|
"loss": 0.2641, |
|
"step": 8896 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 8.274997360363214e-07, |
|
"loss": 0.2435, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 8.258103684932953e-07, |
|
"loss": 0.208, |
|
"step": 9024 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.241210009502691e-07, |
|
"loss": 0.2631, |
|
"step": 9088 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.224316334072431e-07, |
|
"loss": 0.2774, |
|
"step": 9152 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.207422658642171e-07, |
|
"loss": 0.2279, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.190528983211909e-07, |
|
"loss": 0.1885, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.173635307781649e-07, |
|
"loss": 0.2396, |
|
"step": 9344 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.156741632351388e-07, |
|
"loss": 0.269, |
|
"step": 9408 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.139847956921128e-07, |
|
"loss": 0.315, |
|
"step": 9472 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.122954281490867e-07, |
|
"loss": 0.2165, |
|
"step": 9536 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.106060606060605e-07, |
|
"loss": 0.2035, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.089166930630345e-07, |
|
"loss": 0.2472, |
|
"step": 9664 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.072273255200083e-07, |
|
"loss": 0.2641, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.055379579769823e-07, |
|
"loss": 0.1937, |
|
"step": 9792 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.038485904339563e-07, |
|
"loss": 0.2108, |
|
"step": 9856 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.021592228909302e-07, |
|
"loss": 0.2866, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.004698553479041e-07, |
|
"loss": 0.2707, |
|
"step": 9984 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.98780487804878e-07, |
|
"loss": 0.2249, |
|
"step": 10048 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.970911202618519e-07, |
|
"loss": 0.2183, |
|
"step": 10112 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 7.954017527188258e-07, |
|
"loss": 0.234, |
|
"step": 10176 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 7.937123851757997e-07, |
|
"loss": 0.2504, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 7.920230176327737e-07, |
|
"loss": 0.1973, |
|
"step": 10304 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 7.903336500897477e-07, |
|
"loss": 0.2003, |
|
"step": 10368 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 7.886442825467215e-07, |
|
"loss": 0.2558, |
|
"step": 10432 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 7.869549150036955e-07, |
|
"loss": 0.2694, |
|
"step": 10496 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.852655474606694e-07, |
|
"loss": 0.2633, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.835761799176432e-07, |
|
"loss": 0.2649, |
|
"step": 10624 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 7.818868123746172e-07, |
|
"loss": 0.2874, |
|
"step": 10688 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 7.801974448315911e-07, |
|
"loss": 0.2393, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.785080772885651e-07, |
|
"loss": 0.2303, |
|
"step": 10816 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.768451061133987e-07, |
|
"loss": 0.2417, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.751557385703727e-07, |
|
"loss": 0.272, |
|
"step": 10944 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.734663710273465e-07, |
|
"loss": 0.2005, |
|
"step": 11008 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.717770034843205e-07, |
|
"loss": 0.1505, |
|
"step": 11072 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.700876359412945e-07, |
|
"loss": 0.2839, |
|
"step": 11136 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.683982683982684e-07, |
|
"loss": 0.2805, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.667089008552423e-07, |
|
"loss": 0.2407, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.650195333122162e-07, |
|
"loss": 0.1769, |
|
"step": 11328 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.633301657691901e-07, |
|
"loss": 0.2223, |
|
"step": 11392 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.61640798226164e-07, |
|
"loss": 0.2609, |
|
"step": 11456 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.599514306831379e-07, |
|
"loss": 0.3837, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.582620631401119e-07, |
|
"loss": 0.2162, |
|
"step": 11584 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.565726955970858e-07, |
|
"loss": 0.2076, |
|
"step": 11648 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.548833280540597e-07, |
|
"loss": 0.266, |
|
"step": 11712 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.531939605110337e-07, |
|
"loss": 0.1652, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.515045929680076e-07, |
|
"loss": 0.2416, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.498152254249815e-07, |
|
"loss": 0.2471, |
|
"step": 11904 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.481258578819553e-07, |
|
"loss": 0.2285, |
|
"step": 11968 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 7.464364903389293e-07, |
|
"loss": 0.2368, |
|
"step": 12032 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 7.447471227959033e-07, |
|
"loss": 0.1847, |
|
"step": 12096 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 7.430577552528771e-07, |
|
"loss": 0.2826, |
|
"step": 12160 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 7.413683877098511e-07, |
|
"loss": 0.2931, |
|
"step": 12224 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 7.396790201668251e-07, |
|
"loss": 0.1722, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.37989652623799e-07, |
|
"loss": 0.2673, |
|
"step": 12352 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 7.363002850807728e-07, |
|
"loss": 0.2244, |
|
"step": 12416 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 7.346109175377467e-07, |
|
"loss": 0.1532, |
|
"step": 12480 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 7.329215499947207e-07, |
|
"loss": 0.2207, |
|
"step": 12544 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 7.312321824516946e-07, |
|
"loss": 0.2362, |
|
"step": 12608 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 7.295428149086685e-07, |
|
"loss": 0.2222, |
|
"step": 12672 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.278534473656425e-07, |
|
"loss": 0.2664, |
|
"step": 12736 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.261640798226164e-07, |
|
"loss": 0.1632, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.244747122795903e-07, |
|
"loss": 0.2212, |
|
"step": 12864 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.227853447365642e-07, |
|
"loss": 0.2026, |
|
"step": 12928 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 7.210959771935381e-07, |
|
"loss": 0.2562, |
|
"step": 12992 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 7.194066096505121e-07, |
|
"loss": 0.2038, |
|
"step": 13056 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.177436384753458e-07, |
|
"loss": 0.1829, |
|
"step": 13120 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.160542709323197e-07, |
|
"loss": 0.217, |
|
"step": 13184 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.143649033892935e-07, |
|
"loss": 0.2303, |
|
"step": 13248 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.126755358462675e-07, |
|
"loss": 0.1996, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.109861683032415e-07, |
|
"loss": 0.206, |
|
"step": 13376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.092968007602153e-07, |
|
"loss": 0.2405, |
|
"step": 13440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 7.076074332171893e-07, |
|
"loss": 0.1609, |
|
"step": 13504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 7.059180656741632e-07, |
|
"loss": 0.1735, |
|
"step": 13568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 7.042286981311372e-07, |
|
"loss": 0.1477, |
|
"step": 13632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 7.02539330588111e-07, |
|
"loss": 0.2664, |
|
"step": 13696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 7.008499630450849e-07, |
|
"loss": 0.1506, |
|
"step": 13760 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 6.991605955020589e-07, |
|
"loss": 0.1315, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 6.974712279590327e-07, |
|
"loss": 0.1494, |
|
"step": 13888 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 6.957818604160067e-07, |
|
"loss": 0.1959, |
|
"step": 13952 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 6.940924928729807e-07, |
|
"loss": 0.1266, |
|
"step": 14016 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 6.924031253299546e-07, |
|
"loss": 0.1257, |
|
"step": 14080 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 6.907137577869285e-07, |
|
"loss": 0.1429, |
|
"step": 14144 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 6.890243902439023e-07, |
|
"loss": 0.1911, |
|
"step": 14208 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 6.873350227008763e-07, |
|
"loss": 0.1307, |
|
"step": 14272 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 6.856456551578503e-07, |
|
"loss": 0.1199, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 6.839562876148241e-07, |
|
"loss": 0.1367, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 6.822669200717981e-07, |
|
"loss": 0.102, |
|
"step": 14464 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 6.805775525287721e-07, |
|
"loss": 0.2138, |
|
"step": 14528 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 6.788881849857459e-07, |
|
"loss": 0.1363, |
|
"step": 14592 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 6.771988174427199e-07, |
|
"loss": 0.1177, |
|
"step": 14656 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 6.755094498996937e-07, |
|
"loss": 0.1916, |
|
"step": 14720 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 6.738200823566677e-07, |
|
"loss": 0.1557, |
|
"step": 14784 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 6.721307148136416e-07, |
|
"loss": 0.1847, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 6.704413472706155e-07, |
|
"loss": 0.1381, |
|
"step": 14912 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 6.687519797275895e-07, |
|
"loss": 0.1897, |
|
"step": 14976 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 6.670626121845634e-07, |
|
"loss": 0.1866, |
|
"step": 15040 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 6.653732446415373e-07, |
|
"loss": 0.143, |
|
"step": 15104 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 6.636838770985113e-07, |
|
"loss": 0.1729, |
|
"step": 15168 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 6.619945095554851e-07, |
|
"loss": 0.2373, |
|
"step": 15232 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 6.60305142012459e-07, |
|
"loss": 0.1464, |
|
"step": 15296 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 6.586157744694329e-07, |
|
"loss": 0.1707, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 6.569264069264069e-07, |
|
"loss": 0.1757, |
|
"step": 15424 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 6.552370393833809e-07, |
|
"loss": 0.2099, |
|
"step": 15488 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 6.535476718403547e-07, |
|
"loss": 0.2021, |
|
"step": 15552 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 6.518583042973287e-07, |
|
"loss": 0.1288, |
|
"step": 15616 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 6.501689367543026e-07, |
|
"loss": 0.1461, |
|
"step": 15680 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 6.484795692112764e-07, |
|
"loss": 0.1831, |
|
"step": 15744 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 6.467902016682504e-07, |
|
"loss": 0.0907, |
|
"step": 15808 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 6.451008341252243e-07, |
|
"loss": 0.1144, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 6.434114665821983e-07, |
|
"loss": 0.1916, |
|
"step": 15936 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 6.417220990391721e-07, |
|
"loss": 0.138, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.400327314961461e-07, |
|
"loss": 0.1923, |
|
"step": 16064 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.383433639531201e-07, |
|
"loss": 0.1353, |
|
"step": 16128 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.36653996410094e-07, |
|
"loss": 0.2265, |
|
"step": 16192 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.349646288670678e-07, |
|
"loss": 0.1781, |
|
"step": 16256 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.332752613240417e-07, |
|
"loss": 0.1241, |
|
"step": 16320 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.315858937810157e-07, |
|
"loss": 0.1227, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.298965262379896e-07, |
|
"loss": 0.1769, |
|
"step": 16448 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.282071586949635e-07, |
|
"loss": 0.1482, |
|
"step": 16512 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.265177911519375e-07, |
|
"loss": 0.1909, |
|
"step": 16576 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.248284236089115e-07, |
|
"loss": 0.1083, |
|
"step": 16640 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.231390560658853e-07, |
|
"loss": 0.1837, |
|
"step": 16704 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.214496885228592e-07, |
|
"loss": 0.1213, |
|
"step": 16768 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.197603209798331e-07, |
|
"loss": 0.1744, |
|
"step": 16832 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.18070953436807e-07, |
|
"loss": 0.1638, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.16381585893781e-07, |
|
"loss": 0.1712, |
|
"step": 16960 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 6.146922183507549e-07, |
|
"loss": 0.1842, |
|
"step": 17024 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 6.130028508077289e-07, |
|
"loss": 0.2725, |
|
"step": 17088 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 6.113398796325625e-07, |
|
"loss": 0.1689, |
|
"step": 17152 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 6.096505120895365e-07, |
|
"loss": 0.1041, |
|
"step": 17216 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 6.079611445465103e-07, |
|
"loss": 0.1797, |
|
"step": 17280 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 6.062717770034843e-07, |
|
"loss": 0.2498, |
|
"step": 17344 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 6.045824094604583e-07, |
|
"loss": 0.1451, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 6.028930419174322e-07, |
|
"loss": 0.1862, |
|
"step": 17472 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 6.01203674374406e-07, |
|
"loss": 0.1495, |
|
"step": 17536 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.995143068313799e-07, |
|
"loss": 0.1119, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.978249392883539e-07, |
|
"loss": 0.2057, |
|
"step": 17664 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.961355717453278e-07, |
|
"loss": 0.1437, |
|
"step": 17728 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.944462042023017e-07, |
|
"loss": 0.1174, |
|
"step": 17792 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.927568366592757e-07, |
|
"loss": 0.1908, |
|
"step": 17856 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.910674691162496e-07, |
|
"loss": 0.1652, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.893781015732235e-07, |
|
"loss": 0.1413, |
|
"step": 17984 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 5.876887340301974e-07, |
|
"loss": 0.1883, |
|
"step": 18048 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.859993664871713e-07, |
|
"loss": 0.1673, |
|
"step": 18112 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5.843099989441452e-07, |
|
"loss": 0.1304, |
|
"step": 18176 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 5.826206314011191e-07, |
|
"loss": 0.1971, |
|
"step": 18240 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 5.809312638580931e-07, |
|
"loss": 0.1318, |
|
"step": 18304 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 5.792418963150671e-07, |
|
"loss": 0.1817, |
|
"step": 18368 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 5.775525287720409e-07, |
|
"loss": 0.1825, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 5.758895575968747e-07, |
|
"loss": 0.1423, |
|
"step": 18496 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 5.742001900538485e-07, |
|
"loss": 0.1742, |
|
"step": 18560 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 5.725108225108225e-07, |
|
"loss": 0.1447, |
|
"step": 18624 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 5.708214549677965e-07, |
|
"loss": 0.1451, |
|
"step": 18688 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 5.691320874247704e-07, |
|
"loss": 0.188, |
|
"step": 18752 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 5.674427198817442e-07, |
|
"loss": 0.1554, |
|
"step": 18816 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.657533523387181e-07, |
|
"loss": 0.0989, |
|
"step": 18880 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.640639847956921e-07, |
|
"loss": 0.184, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.62374617252666e-07, |
|
"loss": 0.0934, |
|
"step": 19008 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.606852497096399e-07, |
|
"loss": 0.173, |
|
"step": 19072 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.589958821666139e-07, |
|
"loss": 0.1898, |
|
"step": 19136 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.573065146235878e-07, |
|
"loss": 0.1446, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.556171470805617e-07, |
|
"loss": 0.1758, |
|
"step": 19264 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.539277795375356e-07, |
|
"loss": 0.1509, |
|
"step": 19328 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.522384119945095e-07, |
|
"loss": 0.301, |
|
"step": 19392 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.505490444514834e-07, |
|
"loss": 0.107, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.488596769084573e-07, |
|
"loss": 0.1417, |
|
"step": 19520 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.471703093654313e-07, |
|
"loss": 0.152, |
|
"step": 19584 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.454809418224053e-07, |
|
"loss": 0.1762, |
|
"step": 19648 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.437915742793791e-07, |
|
"loss": 0.1559, |
|
"step": 19712 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.421022067363531e-07, |
|
"loss": 0.1965, |
|
"step": 19776 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.404128391933269e-07, |
|
"loss": 0.1924, |
|
"step": 19840 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.387234716503009e-07, |
|
"loss": 0.1208, |
|
"step": 19904 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.370341041072748e-07, |
|
"loss": 0.1676, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.353447365642487e-07, |
|
"loss": 0.1461, |
|
"step": 20032 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.336553690212227e-07, |
|
"loss": 0.2259, |
|
"step": 20096 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.319660014781965e-07, |
|
"loss": 0.1377, |
|
"step": 20160 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.302766339351705e-07, |
|
"loss": 0.1578, |
|
"step": 20224 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.285872663921445e-07, |
|
"loss": 0.1253, |
|
"step": 20288 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.268978988491183e-07, |
|
"loss": 0.2099, |
|
"step": 20352 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 5.252085313060922e-07, |
|
"loss": 0.1226, |
|
"step": 20416 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 5.235191637630661e-07, |
|
"loss": 0.1869, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 5.218297962200401e-07, |
|
"loss": 0.1255, |
|
"step": 20544 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 5.201668250448737e-07, |
|
"loss": 0.1905, |
|
"step": 20608 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 5.184774575018477e-07, |
|
"loss": 0.1708, |
|
"step": 20672 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 5.167880899588216e-07, |
|
"loss": 0.163, |
|
"step": 20736 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 5.150987224157955e-07, |
|
"loss": 0.1716, |
|
"step": 20800 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 5.134093548727695e-07, |
|
"loss": 0.1274, |
|
"step": 20864 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 5.117199873297435e-07, |
|
"loss": 0.1403, |
|
"step": 20928 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 5.100306197867173e-07, |
|
"loss": 0.1995, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 5.083412522436913e-07, |
|
"loss": 0.1767, |
|
"step": 21056 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 5.066518847006651e-07, |
|
"loss": 0.1903, |
|
"step": 21120 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 5.049625171576391e-07, |
|
"loss": 0.142, |
|
"step": 21184 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 5.03273149614613e-07, |
|
"loss": 0.1716, |
|
"step": 21248 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 5.015837820715869e-07, |
|
"loss": 0.1786, |
|
"step": 21312 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.998944145285609e-07, |
|
"loss": 0.2228, |
|
"step": 21376 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.982050469855347e-07, |
|
"loss": 0.1619, |
|
"step": 21440 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.965156794425087e-07, |
|
"loss": 0.1718, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.948263118994826e-07, |
|
"loss": 0.1151, |
|
"step": 21568 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.931369443564565e-07, |
|
"loss": 0.1673, |
|
"step": 21632 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.914475768134305e-07, |
|
"loss": 0.2063, |
|
"step": 21696 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.897582092704043e-07, |
|
"loss": 0.1335, |
|
"step": 21760 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.880688417273783e-07, |
|
"loss": 0.1217, |
|
"step": 21824 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.863794741843523e-07, |
|
"loss": 0.2009, |
|
"step": 21888 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.846901066413261e-07, |
|
"loss": 0.2075, |
|
"step": 21952 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.830007390983e-07, |
|
"loss": 0.2211, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.813113715552739e-07, |
|
"loss": 0.1547, |
|
"step": 22080 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.796220040122479e-07, |
|
"loss": 0.2471, |
|
"step": 22144 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.779326364692219e-07, |
|
"loss": 0.131, |
|
"step": 22208 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.762432689261957e-07, |
|
"loss": 0.1717, |
|
"step": 22272 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.7455390138316963e-07, |
|
"loss": 0.1274, |
|
"step": 22336 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.728645338401436e-07, |
|
"loss": 0.1374, |
|
"step": 22400 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.711751662971175e-07, |
|
"loss": 0.1467, |
|
"step": 22464 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.6948579875409136e-07, |
|
"loss": 0.2168, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.6779643121106533e-07, |
|
"loss": 0.1264, |
|
"step": 22592 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.6610706366803924e-07, |
|
"loss": 0.1222, |
|
"step": 22656 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.64444092492873e-07, |
|
"loss": 0.1715, |
|
"step": 22720 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.6275472494984683e-07, |
|
"loss": 0.1394, |
|
"step": 22784 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.610653574068208e-07, |
|
"loss": 0.1648, |
|
"step": 22848 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.593759898637947e-07, |
|
"loss": 0.1854, |
|
"step": 22912 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.5768662232076867e-07, |
|
"loss": 0.1098, |
|
"step": 22976 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.5599725477774253e-07, |
|
"loss": 0.1099, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.543078872347165e-07, |
|
"loss": 0.1653, |
|
"step": 23104 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.526185196916904e-07, |
|
"loss": 0.1917, |
|
"step": 23168 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.5092915214866436e-07, |
|
"loss": 0.2046, |
|
"step": 23232 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.492397846056382e-07, |
|
"loss": 0.1353, |
|
"step": 23296 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.4755041706261213e-07, |
|
"loss": 0.152, |
|
"step": 23360 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.458610495195861e-07, |
|
"loss": 0.178, |
|
"step": 23424 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.4417168197656e-07, |
|
"loss": 0.1666, |
|
"step": 23488 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.424823144335339e-07, |
|
"loss": 0.1452, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.407929468905078e-07, |
|
"loss": 0.1384, |
|
"step": 23616 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.391035793474818e-07, |
|
"loss": 0.171, |
|
"step": 23680 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.374142118044557e-07, |
|
"loss": 0.2005, |
|
"step": 23744 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.3572484426142955e-07, |
|
"loss": 0.1804, |
|
"step": 23808 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.340354767184035e-07, |
|
"loss": 0.1528, |
|
"step": 23872 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.3234610917537743e-07, |
|
"loss": 0.1233, |
|
"step": 23936 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.306567416323514e-07, |
|
"loss": 0.1556, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.2896737408932525e-07, |
|
"loss": 0.1198, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.272780065462992e-07, |
|
"loss": 0.2046, |
|
"step": 24128 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.255886390032731e-07, |
|
"loss": 0.1274, |
|
"step": 24192 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.238992714602471e-07, |
|
"loss": 0.2104, |
|
"step": 24256 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.2220990391722094e-07, |
|
"loss": 0.1954, |
|
"step": 24320 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.205205363741949e-07, |
|
"loss": 0.1826, |
|
"step": 24384 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.188311688311688e-07, |
|
"loss": 0.1573, |
|
"step": 24448 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.171418012881427e-07, |
|
"loss": 0.0717, |
|
"step": 24512 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.1545243374511664e-07, |
|
"loss": 0.1276, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.1376306620209055e-07, |
|
"loss": 0.1869, |
|
"step": 24640 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.120736986590645e-07, |
|
"loss": 0.1593, |
|
"step": 24704 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.103843311160384e-07, |
|
"loss": 0.159, |
|
"step": 24768 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.0869496357301233e-07, |
|
"loss": 0.1226, |
|
"step": 24832 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.0700559602998624e-07, |
|
"loss": 0.1372, |
|
"step": 24896 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.053162284869602e-07, |
|
"loss": 0.1391, |
|
"step": 24960 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.036268609439341e-07, |
|
"loss": 0.1583, |
|
"step": 25024 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.0193749340090797e-07, |
|
"loss": 0.1139, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.0024812585788193e-07, |
|
"loss": 0.151, |
|
"step": 25152 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.9855875831485584e-07, |
|
"loss": 0.0999, |
|
"step": 25216 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.968693907718298e-07, |
|
"loss": 0.1652, |
|
"step": 25280 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.9518002322880366e-07, |
|
"loss": 0.2249, |
|
"step": 25344 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.934906556857776e-07, |
|
"loss": 0.1898, |
|
"step": 25408 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.9180128814275154e-07, |
|
"loss": 0.1548, |
|
"step": 25472 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.901383169675853e-07, |
|
"loss": 0.1457, |
|
"step": 25536 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.8844894942455913e-07, |
|
"loss": 0.1548, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.867595818815331e-07, |
|
"loss": 0.1829, |
|
"step": 25664 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.85070214338507e-07, |
|
"loss": 0.1119, |
|
"step": 25728 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.833808467954809e-07, |
|
"loss": 0.1695, |
|
"step": 25792 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.816914792524548e-07, |
|
"loss": 0.1507, |
|
"step": 25856 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.8000211170942874e-07, |
|
"loss": 0.1416, |
|
"step": 25920 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.783127441664027e-07, |
|
"loss": 0.0902, |
|
"step": 25984 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.766233766233766e-07, |
|
"loss": 0.1472, |
|
"step": 26048 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.749340090803505e-07, |
|
"loss": 0.1624, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.7324464153732443e-07, |
|
"loss": 0.1903, |
|
"step": 26176 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.715552739942984e-07, |
|
"loss": 0.1224, |
|
"step": 26240 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.698659064512723e-07, |
|
"loss": 0.16, |
|
"step": 26304 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.6817653890824616e-07, |
|
"loss": 0.2316, |
|
"step": 26368 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.664871713652201e-07, |
|
"loss": 0.2141, |
|
"step": 26432 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.6479780382219403e-07, |
|
"loss": 0.1727, |
|
"step": 26496 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.63108436279168e-07, |
|
"loss": 0.232, |
|
"step": 26560 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.6141906873614185e-07, |
|
"loss": 0.1381, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.597297011931158e-07, |
|
"loss": 0.142, |
|
"step": 26688 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.580403336500897e-07, |
|
"loss": 0.1338, |
|
"step": 26752 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.563509661070637e-07, |
|
"loss": 0.1643, |
|
"step": 26816 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.5466159856403755e-07, |
|
"loss": 0.0738, |
|
"step": 26880 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.5297223102101146e-07, |
|
"loss": 0.1029, |
|
"step": 26944 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.512828634779854e-07, |
|
"loss": 0.1062, |
|
"step": 27008 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.4959349593495933e-07, |
|
"loss": 0.1157, |
|
"step": 27072 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.4790412839193324e-07, |
|
"loss": 0.1691, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.4621476084890715e-07, |
|
"loss": 0.1523, |
|
"step": 27200 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.445253933058811e-07, |
|
"loss": 0.0866, |
|
"step": 27264 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.42836025762855e-07, |
|
"loss": 0.121, |
|
"step": 27328 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.4114665821982893e-07, |
|
"loss": 0.1268, |
|
"step": 27392 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.3945729067680284e-07, |
|
"loss": 0.0935, |
|
"step": 27456 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.3776792313377675e-07, |
|
"loss": 0.0949, |
|
"step": 27520 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.361049519586105e-07, |
|
"loss": 0.0734, |
|
"step": 27584 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.3441558441558435e-07, |
|
"loss": 0.114, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.327262168725583e-07, |
|
"loss": 0.0864, |
|
"step": 27712 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.310368493295322e-07, |
|
"loss": 0.1461, |
|
"step": 27776 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.293474817865062e-07, |
|
"loss": 0.1451, |
|
"step": 27840 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.2765811424348004e-07, |
|
"loss": 0.1097, |
|
"step": 27904 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.25968746700454e-07, |
|
"loss": 0.1424, |
|
"step": 27968 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.242793791574279e-07, |
|
"loss": 0.0722, |
|
"step": 28032 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.225900116144019e-07, |
|
"loss": 0.0862, |
|
"step": 28096 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.2090064407137574e-07, |
|
"loss": 0.1058, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.1921127652834965e-07, |
|
"loss": 0.0926, |
|
"step": 28224 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.175219089853236e-07, |
|
"loss": 0.1422, |
|
"step": 28288 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.158325414422975e-07, |
|
"loss": 0.1014, |
|
"step": 28352 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.1414317389927143e-07, |
|
"loss": 0.0833, |
|
"step": 28416 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.1245380635624534e-07, |
|
"loss": 0.13, |
|
"step": 28480 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.107644388132193e-07, |
|
"loss": 0.1471, |
|
"step": 28544 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.090750712701932e-07, |
|
"loss": 0.0807, |
|
"step": 28608 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.073857037271671e-07, |
|
"loss": 0.0988, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.0569633618414103e-07, |
|
"loss": 0.1179, |
|
"step": 28736 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.0400696864111494e-07, |
|
"loss": 0.1278, |
|
"step": 28800 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.023176010980889e-07, |
|
"loss": 0.0852, |
|
"step": 28864 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.0062823355506276e-07, |
|
"loss": 0.0817, |
|
"step": 28928 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.9893886601203673e-07, |
|
"loss": 0.0794, |
|
"step": 28992 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.9724949846901064e-07, |
|
"loss": 0.0682, |
|
"step": 29056 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.955601309259846e-07, |
|
"loss": 0.0776, |
|
"step": 29120 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.9387076338295846e-07, |
|
"loss": 0.1251, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.921813958399324e-07, |
|
"loss": 0.1066, |
|
"step": 29248 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.9049202829690633e-07, |
|
"loss": 0.1278, |
|
"step": 29312 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.888026607538803e-07, |
|
"loss": 0.148, |
|
"step": 29376 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.8711329321085415e-07, |
|
"loss": 0.0931, |
|
"step": 29440 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.8542392566782806e-07, |
|
"loss": 0.1033, |
|
"step": 29504 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.83734558124802e-07, |
|
"loss": 0.0973, |
|
"step": 29568 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.8204519058177593e-07, |
|
"loss": 0.0643, |
|
"step": 29632 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.8035582303874984e-07, |
|
"loss": 0.0787, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.7866645549572375e-07, |
|
"loss": 0.1271, |
|
"step": 29760 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.770034843205575e-07, |
|
"loss": 0.078, |
|
"step": 29824 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.753141167775314e-07, |
|
"loss": 0.1415, |
|
"step": 29888 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.736247492345053e-07, |
|
"loss": 0.0868, |
|
"step": 29952 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.719353816914792e-07, |
|
"loss": 0.1619, |
|
"step": 30016 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.702460141484532e-07, |
|
"loss": 0.0987, |
|
"step": 30080 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.685566466054271e-07, |
|
"loss": 0.1622, |
|
"step": 30144 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.6686727906240095e-07, |
|
"loss": 0.0879, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.651779115193749e-07, |
|
"loss": 0.0993, |
|
"step": 30272 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.6348854397634883e-07, |
|
"loss": 0.0721, |
|
"step": 30336 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.617991764333228e-07, |
|
"loss": 0.1047, |
|
"step": 30400 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.6010980889029665e-07, |
|
"loss": 0.1116, |
|
"step": 30464 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.584204413472706e-07, |
|
"loss": 0.1064, |
|
"step": 30528 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.567310738042445e-07, |
|
"loss": 0.1814, |
|
"step": 30592 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.550417062612185e-07, |
|
"loss": 0.1228, |
|
"step": 30656 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.5335233871819234e-07, |
|
"loss": 0.0903, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.5166297117516625e-07, |
|
"loss": 0.098, |
|
"step": 30784 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.499736036321402e-07, |
|
"loss": 0.0897, |
|
"step": 30848 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.482842360891141e-07, |
|
"loss": 0.1095, |
|
"step": 30912 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.4659486854608803e-07, |
|
"loss": 0.0821, |
|
"step": 30976 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.4490550100306194e-07, |
|
"loss": 0.0947, |
|
"step": 31040 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.432161334600359e-07, |
|
"loss": 0.0744, |
|
"step": 31104 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.415267659170098e-07, |
|
"loss": 0.112, |
|
"step": 31168 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.3983739837398373e-07, |
|
"loss": 0.0845, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.3814803083095764e-07, |
|
"loss": 0.1089, |
|
"step": 31296 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.3645866328793157e-07, |
|
"loss": 0.0861, |
|
"step": 31360 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.3476929574490548e-07, |
|
"loss": 0.1337, |
|
"step": 31424 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.3307992820187942e-07, |
|
"loss": 0.1439, |
|
"step": 31488 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.313905606588533e-07, |
|
"loss": 0.1117, |
|
"step": 31552 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.2970119311582724e-07, |
|
"loss": 0.1389, |
|
"step": 31616 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.2801182557280115e-07, |
|
"loss": 0.1078, |
|
"step": 31680 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.263224580297751e-07, |
|
"loss": 0.0763, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.24633090486749e-07, |
|
"loss": 0.1198, |
|
"step": 31808 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.2294372294372293e-07, |
|
"loss": 0.0634, |
|
"step": 31872 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.2125435540069684e-07, |
|
"loss": 0.1114, |
|
"step": 31936 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.1956498785767078e-07, |
|
"loss": 0.1049, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.178756203146447e-07, |
|
"loss": 0.0953, |
|
"step": 32064 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.162126491394784e-07, |
|
"loss": 0.135, |
|
"step": 32128 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.1452328159645231e-07, |
|
"loss": 0.1524, |
|
"step": 32192 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.1283391405342625e-07, |
|
"loss": 0.1692, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.1114454651040013e-07, |
|
"loss": 0.1011, |
|
"step": 32320 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.0945517896737407e-07, |
|
"loss": 0.125, |
|
"step": 32384 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.0776581142434798e-07, |
|
"loss": 0.0962, |
|
"step": 32448 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.0607644388132192e-07, |
|
"loss": 0.1273, |
|
"step": 32512 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.0438707633829583e-07, |
|
"loss": 0.086, |
|
"step": 32576 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.0269770879526976e-07, |
|
"loss": 0.0813, |
|
"step": 32640 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.0100834125224367e-07, |
|
"loss": 0.1316, |
|
"step": 32704 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.993189737092176e-07, |
|
"loss": 0.1399, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.9762960616619152e-07, |
|
"loss": 0.0931, |
|
"step": 32832 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.9594023862316543e-07, |
|
"loss": 0.0858, |
|
"step": 32896 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.9425087108013934e-07, |
|
"loss": 0.093, |
|
"step": 32960 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.9256150353711328e-07, |
|
"loss": 0.0952, |
|
"step": 33024 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.908721359940872e-07, |
|
"loss": 0.1085, |
|
"step": 33088 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8918276845106112e-07, |
|
"loss": 0.0892, |
|
"step": 33152 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8749340090803503e-07, |
|
"loss": 0.0802, |
|
"step": 33216 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.8580403336500897e-07, |
|
"loss": 0.0923, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.8411466582198288e-07, |
|
"loss": 0.0691, |
|
"step": 33344 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.8242529827895682e-07, |
|
"loss": 0.1072, |
|
"step": 33408 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.807359307359307e-07, |
|
"loss": 0.0763, |
|
"step": 33472 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.7904656319290464e-07, |
|
"loss": 0.096, |
|
"step": 33536 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.7735719564987855e-07, |
|
"loss": 0.1385, |
|
"step": 33600 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.7566782810685248e-07, |
|
"loss": 0.1036, |
|
"step": 33664 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.739784605638264e-07, |
|
"loss": 0.0914, |
|
"step": 33728 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.7228909302080033e-07, |
|
"loss": 0.1478, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.7059972547777424e-07, |
|
"loss": 0.1637, |
|
"step": 33856 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.6891035793474818e-07, |
|
"loss": 0.1009, |
|
"step": 33920 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.672209903917221e-07, |
|
"loss": 0.1195, |
|
"step": 33984 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.6553162284869603e-07, |
|
"loss": 0.0979, |
|
"step": 34048 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.638422553056699e-07, |
|
"loss": 0.0697, |
|
"step": 34112 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.6215288776264385e-07, |
|
"loss": 0.0639, |
|
"step": 34176 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.6046352021961776e-07, |
|
"loss": 0.0864, |
|
"step": 34240 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.587741526765917e-07, |
|
"loss": 0.0731, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.570847851335656e-07, |
|
"loss": 0.2053, |
|
"step": 34368 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.5539541759053954e-07, |
|
"loss": 0.0951, |
|
"step": 34432 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.5370605004751345e-07, |
|
"loss": 0.1149, |
|
"step": 34496 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.5201668250448739e-07, |
|
"loss": 0.093, |
|
"step": 34560 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.5032731496146127e-07, |
|
"loss": 0.1261, |
|
"step": 34624 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.486379474184352e-07, |
|
"loss": 0.1402, |
|
"step": 34688 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.4694857987540912e-07, |
|
"loss": 0.0892, |
|
"step": 34752 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.4525921233238305e-07, |
|
"loss": 0.0735, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.4356984478935696e-07, |
|
"loss": 0.0898, |
|
"step": 34880 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.4190687361419067e-07, |
|
"loss": 0.1032, |
|
"step": 34944 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.402175060711646e-07, |
|
"loss": 0.0752, |
|
"step": 35008 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.3852813852813852e-07, |
|
"loss": 0.1409, |
|
"step": 35072 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.3683877098511246e-07, |
|
"loss": 0.1379, |
|
"step": 35136 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.3514940344208637e-07, |
|
"loss": 0.1302, |
|
"step": 35200 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.334600358990603e-07, |
|
"loss": 0.1093, |
|
"step": 35264 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.3177066835603421e-07, |
|
"loss": 0.1072, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.3008130081300813e-07, |
|
"loss": 0.0894, |
|
"step": 35392 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.2839193326998204e-07, |
|
"loss": 0.0913, |
|
"step": 35456 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.2670256572695597e-07, |
|
"loss": 0.1304, |
|
"step": 35520 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.2501319818392988e-07, |
|
"loss": 0.1343, |
|
"step": 35584 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.2332383064090382e-07, |
|
"loss": 0.1177, |
|
"step": 35648 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.2163446309787773e-07, |
|
"loss": 0.1414, |
|
"step": 35712 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.1994509555485164e-07, |
|
"loss": 0.0879, |
|
"step": 35776 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.1825572801182556e-07, |
|
"loss": 0.0914, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.1656636046879949e-07, |
|
"loss": 0.0834, |
|
"step": 35904 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.1487699292577341e-07, |
|
"loss": 0.0865, |
|
"step": 35968 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.1318762538274733e-07, |
|
"loss": 0.1522, |
|
"step": 36032 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.1149825783972126e-07, |
|
"loss": 0.116, |
|
"step": 36096 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.0980889029669517e-07, |
|
"loss": 0.1102, |
|
"step": 36160 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.0811952275366909e-07, |
|
"loss": 0.0826, |
|
"step": 36224 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.0643015521064301e-07, |
|
"loss": 0.1233, |
|
"step": 36288 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.0474078766761694e-07, |
|
"loss": 0.046, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.0305142012459085e-07, |
|
"loss": 0.08, |
|
"step": 36416 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.0136205258156477e-07, |
|
"loss": 0.1432, |
|
"step": 36480 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.967268503853869e-08, |
|
"loss": 0.1198, |
|
"step": 36544 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.798331749551262e-08, |
|
"loss": 0.0608, |
|
"step": 36608 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.629394995248654e-08, |
|
"loss": 0.0904, |
|
"step": 36672 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.460458240946045e-08, |
|
"loss": 0.1108, |
|
"step": 36736 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.291521486643437e-08, |
|
"loss": 0.1117, |
|
"step": 36800 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.12258473234083e-08, |
|
"loss": 0.1341, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.953647978038222e-08, |
|
"loss": 0.0781, |
|
"step": 36928 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.784711223735613e-08, |
|
"loss": 0.1685, |
|
"step": 36992 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.615774469433005e-08, |
|
"loss": 0.0998, |
|
"step": 37056 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.446837715130398e-08, |
|
"loss": 0.1055, |
|
"step": 37120 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 8.27790096082779e-08, |
|
"loss": 0.0375, |
|
"step": 37184 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 8.11160384331116e-08, |
|
"loss": 0.0493, |
|
"step": 37248 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 7.942667089008552e-08, |
|
"loss": 0.0584, |
|
"step": 37312 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 7.773730334705945e-08, |
|
"loss": 0.1123, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 7.604793580403336e-08, |
|
"loss": 0.0626, |
|
"step": 37440 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 7.435856826100728e-08, |
|
"loss": 0.1379, |
|
"step": 37504 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 7.26692007179812e-08, |
|
"loss": 0.0661, |
|
"step": 37568 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 7.097983317495513e-08, |
|
"loss": 0.0792, |
|
"step": 37632 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 6.929046563192905e-08, |
|
"loss": 0.1278, |
|
"step": 37696 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 6.760109808890296e-08, |
|
"loss": 0.0835, |
|
"step": 37760 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 6.591173054587688e-08, |
|
"loss": 0.1209, |
|
"step": 37824 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 6.42223630028508e-08, |
|
"loss": 0.1063, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 6.253299545982473e-08, |
|
"loss": 0.1668, |
|
"step": 37952 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 6.084362791679864e-08, |
|
"loss": 0.076, |
|
"step": 38016 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 5.915426037377256e-08, |
|
"loss": 0.0798, |
|
"step": 38080 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.7464892830746486e-08, |
|
"loss": 0.0793, |
|
"step": 38144 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 5.577552528772041e-08, |
|
"loss": 0.0684, |
|
"step": 38208 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 5.4086157744694326e-08, |
|
"loss": 0.0792, |
|
"step": 38272 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 5.239679020166825e-08, |
|
"loss": 0.121, |
|
"step": 38336 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 5.0707422658642167e-08, |
|
"loss": 0.091, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.901805511561609e-08, |
|
"loss": 0.1091, |
|
"step": 38464 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.732868757259001e-08, |
|
"loss": 0.0961, |
|
"step": 38528 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.563932002956393e-08, |
|
"loss": 0.125, |
|
"step": 38592 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.394995248653785e-08, |
|
"loss": 0.0568, |
|
"step": 38656 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.226058494351177e-08, |
|
"loss": 0.0913, |
|
"step": 38720 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.0571217400485694e-08, |
|
"loss": 0.159, |
|
"step": 38784 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.888184985745961e-08, |
|
"loss": 0.0627, |
|
"step": 38848 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.7192482314433534e-08, |
|
"loss": 0.0557, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.550311477140745e-08, |
|
"loss": 0.1531, |
|
"step": 38976 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.3813747228381374e-08, |
|
"loss": 0.0658, |
|
"step": 39040 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.212437968535529e-08, |
|
"loss": 0.1024, |
|
"step": 39104 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.0435012142329214e-08, |
|
"loss": 0.0946, |
|
"step": 39168 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.8745644599303134e-08, |
|
"loss": 0.0615, |
|
"step": 39232 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.7056277056277054e-08, |
|
"loss": 0.0845, |
|
"step": 39296 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.5366909513250974e-08, |
|
"loss": 0.0585, |
|
"step": 39360 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.3677541970224894e-08, |
|
"loss": 0.0982, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.1988174427198818e-08, |
|
"loss": 0.1524, |
|
"step": 39488 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.0298806884172738e-08, |
|
"loss": 0.1244, |
|
"step": 39552 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.8609439341146658e-08, |
|
"loss": 0.0925, |
|
"step": 39616 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.6920071798120578e-08, |
|
"loss": 0.1284, |
|
"step": 39680 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.5230704255094498e-08, |
|
"loss": 0.1328, |
|
"step": 39744 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.3541336712068418e-08, |
|
"loss": 0.0868, |
|
"step": 39808 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.185196916904234e-08, |
|
"loss": 0.1753, |
|
"step": 39872 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.016260162601626e-08, |
|
"loss": 0.0568, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 8.47323408299018e-09, |
|
"loss": 0.1291, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 6.783866539964101e-09, |
|
"loss": 0.1, |
|
"step": 40064 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 5.094498996938021e-09, |
|
"loss": 0.0706, |
|
"step": 40128 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.4051314539119417e-09, |
|
"loss": 0.065, |
|
"step": 40192 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.715763910885862e-09, |
|
"loss": 0.1014, |
|
"step": 40256 |
|
} |
|
], |
|
"logging_steps": 64, |
|
"max_steps": 40302, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 1.3692597675098112e+20, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|