|
{ |
|
"best_metric": 0.9585338620875524, |
|
"best_model_checkpoint": "vit-msn-small-wbc-classifier-mono-V-all/checkpoint-1040", |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 1040, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04807692307692308, |
|
"grad_norm": 4.224180221557617, |
|
"learning_rate": 4.807692307692308e-06, |
|
"loss": 0.5899, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09615384615384616, |
|
"grad_norm": 1.716335654258728, |
|
"learning_rate": 9.615384615384616e-06, |
|
"loss": 0.2795, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.14423076923076922, |
|
"grad_norm": 1.9335592985153198, |
|
"learning_rate": 1.4423076923076923e-05, |
|
"loss": 0.2528, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.19230769230769232, |
|
"grad_norm": 16.045499801635742, |
|
"learning_rate": 1.923076923076923e-05, |
|
"loss": 0.248, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2403846153846154, |
|
"grad_norm": 3.4744293689727783, |
|
"learning_rate": 2.4038461538461542e-05, |
|
"loss": 0.2172, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.28846153846153844, |
|
"grad_norm": 7.248939514160156, |
|
"learning_rate": 2.8846153846153845e-05, |
|
"loss": 0.1973, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.33653846153846156, |
|
"grad_norm": 13.428326606750488, |
|
"learning_rate": 3.365384615384616e-05, |
|
"loss": 0.245, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 14.730627059936523, |
|
"learning_rate": 3.846153846153846e-05, |
|
"loss": 0.23, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4326923076923077, |
|
"grad_norm": 6.623648643493652, |
|
"learning_rate": 4.326923076923077e-05, |
|
"loss": 0.1977, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4807692307692308, |
|
"grad_norm": 3.5982465744018555, |
|
"learning_rate": 4.8076923076923084e-05, |
|
"loss": 0.1912, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5288461538461539, |
|
"grad_norm": 8.891210556030273, |
|
"learning_rate": 4.9679487179487185e-05, |
|
"loss": 0.1757, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5769230769230769, |
|
"grad_norm": 16.044160842895508, |
|
"learning_rate": 4.9145299145299147e-05, |
|
"loss": 0.2119, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 4.071866989135742, |
|
"learning_rate": 4.8611111111111115e-05, |
|
"loss": 0.1954, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6730769230769231, |
|
"grad_norm": 5.397825241088867, |
|
"learning_rate": 4.8076923076923084e-05, |
|
"loss": 0.1927, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.7211538461538461, |
|
"grad_norm": 2.4581189155578613, |
|
"learning_rate": 4.7542735042735045e-05, |
|
"loss": 0.1921, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 4.148955345153809, |
|
"learning_rate": 4.700854700854701e-05, |
|
"loss": 0.1945, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.8173076923076923, |
|
"grad_norm": 7.619889259338379, |
|
"learning_rate": 4.6474358974358976e-05, |
|
"loss": 0.1904, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.8653846153846154, |
|
"grad_norm": 8.802661895751953, |
|
"learning_rate": 4.594017094017094e-05, |
|
"loss": 0.1801, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.9134615384615384, |
|
"grad_norm": 1.7985179424285889, |
|
"learning_rate": 4.5405982905982906e-05, |
|
"loss": 0.1564, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 7.868402481079102, |
|
"learning_rate": 4.4871794871794874e-05, |
|
"loss": 0.1974, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9371655180379626, |
|
"eval_loss": 0.16416136920452118, |
|
"eval_runtime": 85.3977, |
|
"eval_samples_per_second": 304.142, |
|
"eval_steps_per_second": 4.754, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.0096153846153846, |
|
"grad_norm": 4.705746650695801, |
|
"learning_rate": 4.4337606837606836e-05, |
|
"loss": 0.203, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.0576923076923077, |
|
"grad_norm": 3.6939737796783447, |
|
"learning_rate": 4.3803418803418805e-05, |
|
"loss": 0.1752, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.1057692307692308, |
|
"grad_norm": 3.589677333831787, |
|
"learning_rate": 4.326923076923077e-05, |
|
"loss": 0.1858, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.1538461538461537, |
|
"grad_norm": 6.519199848175049, |
|
"learning_rate": 4.2735042735042735e-05, |
|
"loss": 0.1814, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.2019230769230769, |
|
"grad_norm": 8.503952026367188, |
|
"learning_rate": 4.2200854700854704e-05, |
|
"loss": 0.1783, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 2.7003586292266846, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 0.1611, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.2980769230769231, |
|
"grad_norm": 2.9637396335601807, |
|
"learning_rate": 4.1132478632478634e-05, |
|
"loss": 0.1807, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.3461538461538463, |
|
"grad_norm": 2.4124629497528076, |
|
"learning_rate": 4.05982905982906e-05, |
|
"loss": 0.1669, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.3942307692307692, |
|
"grad_norm": 5.0713791847229, |
|
"learning_rate": 4.006410256410257e-05, |
|
"loss": 0.1743, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.4423076923076923, |
|
"grad_norm": 2.7951672077178955, |
|
"learning_rate": 3.952991452991453e-05, |
|
"loss": 0.1717, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.4903846153846154, |
|
"grad_norm": 4.324550628662109, |
|
"learning_rate": 3.89957264957265e-05, |
|
"loss": 0.188, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 2.242941379547119, |
|
"learning_rate": 3.846153846153846e-05, |
|
"loss": 0.1914, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.5865384615384617, |
|
"grad_norm": 2.958832025527954, |
|
"learning_rate": 3.7927350427350425e-05, |
|
"loss": 0.1527, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.6346153846153846, |
|
"grad_norm": 3.1927058696746826, |
|
"learning_rate": 3.739316239316239e-05, |
|
"loss": 0.1822, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.6826923076923077, |
|
"grad_norm": 5.284717082977295, |
|
"learning_rate": 3.685897435897436e-05, |
|
"loss": 0.1587, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.7307692307692308, |
|
"grad_norm": 2.2974815368652344, |
|
"learning_rate": 3.6324786324786323e-05, |
|
"loss": 0.1494, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.7788461538461537, |
|
"grad_norm": 2.8491830825805664, |
|
"learning_rate": 3.579059829059829e-05, |
|
"loss": 0.1556, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.8269230769230769, |
|
"grad_norm": 2.7790327072143555, |
|
"learning_rate": 3.525641025641026e-05, |
|
"loss": 0.1552, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.875, |
|
"grad_norm": 2.0398130416870117, |
|
"learning_rate": 3.472222222222222e-05, |
|
"loss": 0.1704, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.9230769230769231, |
|
"grad_norm": 2.2376208305358887, |
|
"learning_rate": 3.418803418803419e-05, |
|
"loss": 0.1697, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.9711538461538463, |
|
"grad_norm": 2.9672436714172363, |
|
"learning_rate": 3.365384615384616e-05, |
|
"loss": 0.1589, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.950833557925538, |
|
"eval_loss": 0.1333734393119812, |
|
"eval_runtime": 85.0927, |
|
"eval_samples_per_second": 305.232, |
|
"eval_steps_per_second": 4.771, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.019230769230769, |
|
"grad_norm": 1.8962812423706055, |
|
"learning_rate": 3.311965811965812e-05, |
|
"loss": 0.1603, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.0673076923076925, |
|
"grad_norm": 3.2972991466522217, |
|
"learning_rate": 3.258547008547009e-05, |
|
"loss": 0.1524, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.1153846153846154, |
|
"grad_norm": 2.4130704402923584, |
|
"learning_rate": 3.205128205128206e-05, |
|
"loss": 0.1507, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.1634615384615383, |
|
"grad_norm": 4.791253089904785, |
|
"learning_rate": 3.151709401709402e-05, |
|
"loss": 0.1717, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.2115384615384617, |
|
"grad_norm": 3.113145112991333, |
|
"learning_rate": 3.098290598290599e-05, |
|
"loss": 0.1484, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.2596153846153846, |
|
"grad_norm": 2.3643486499786377, |
|
"learning_rate": 3.0448717948717947e-05, |
|
"loss": 0.1717, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.3076923076923075, |
|
"grad_norm": 3.9917261600494385, |
|
"learning_rate": 2.9914529914529915e-05, |
|
"loss": 0.1647, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.355769230769231, |
|
"grad_norm": 7.72503662109375, |
|
"learning_rate": 2.9380341880341884e-05, |
|
"loss": 0.1506, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.4038461538461537, |
|
"grad_norm": 2.822436809539795, |
|
"learning_rate": 2.8846153846153845e-05, |
|
"loss": 0.161, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.451923076923077, |
|
"grad_norm": 2.8079020977020264, |
|
"learning_rate": 2.8311965811965814e-05, |
|
"loss": 0.132, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.609222650527954, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.1531, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.5480769230769234, |
|
"grad_norm": 4.114030361175537, |
|
"learning_rate": 2.724358974358974e-05, |
|
"loss": 0.137, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.5961538461538463, |
|
"grad_norm": 2.093318223953247, |
|
"learning_rate": 2.670940170940171e-05, |
|
"loss": 0.1394, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.644230769230769, |
|
"grad_norm": 2.208742618560791, |
|
"learning_rate": 2.6175213675213678e-05, |
|
"loss": 0.1536, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.6923076923076925, |
|
"grad_norm": 2.9571166038513184, |
|
"learning_rate": 2.564102564102564e-05, |
|
"loss": 0.1576, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.7403846153846154, |
|
"grad_norm": 2.523430824279785, |
|
"learning_rate": 2.5106837606837608e-05, |
|
"loss": 0.1486, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.7884615384615383, |
|
"grad_norm": 3.496368885040283, |
|
"learning_rate": 2.4572649572649573e-05, |
|
"loss": 0.1463, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.8365384615384617, |
|
"grad_norm": 2.219247579574585, |
|
"learning_rate": 2.4038461538461542e-05, |
|
"loss": 0.1464, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.8846153846153846, |
|
"grad_norm": 2.0809261798858643, |
|
"learning_rate": 2.3504273504273504e-05, |
|
"loss": 0.1467, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.9326923076923075, |
|
"grad_norm": 2.1210248470306396, |
|
"learning_rate": 2.297008547008547e-05, |
|
"loss": 0.1557, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.980769230769231, |
|
"grad_norm": 1.880336046218872, |
|
"learning_rate": 2.2435897435897437e-05, |
|
"loss": 0.134, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9431332537635236, |
|
"eval_loss": 0.14658282697200775, |
|
"eval_runtime": 85.5913, |
|
"eval_samples_per_second": 303.454, |
|
"eval_steps_per_second": 4.743, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 3.0288461538461537, |
|
"grad_norm": 1.8079543113708496, |
|
"learning_rate": 2.1901709401709402e-05, |
|
"loss": 0.1553, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.076923076923077, |
|
"grad_norm": 1.4903086423873901, |
|
"learning_rate": 2.1367521367521368e-05, |
|
"loss": 0.1485, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.125, |
|
"grad_norm": 2.690070629119873, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 0.1423, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.173076923076923, |
|
"grad_norm": 5.739427089691162, |
|
"learning_rate": 2.02991452991453e-05, |
|
"loss": 0.1471, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.2211538461538463, |
|
"grad_norm": 2.2277045249938965, |
|
"learning_rate": 1.9764957264957266e-05, |
|
"loss": 0.1476, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.269230769230769, |
|
"grad_norm": 1.629521369934082, |
|
"learning_rate": 1.923076923076923e-05, |
|
"loss": 0.1465, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.3173076923076925, |
|
"grad_norm": 1.9700013399124146, |
|
"learning_rate": 1.8696581196581197e-05, |
|
"loss": 0.1317, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.3653846153846154, |
|
"grad_norm": 4.258269309997559, |
|
"learning_rate": 1.8162393162393162e-05, |
|
"loss": 0.1329, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.4134615384615383, |
|
"grad_norm": 6.2838664054870605, |
|
"learning_rate": 1.762820512820513e-05, |
|
"loss": 0.1686, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.4615384615384617, |
|
"grad_norm": 4.847503185272217, |
|
"learning_rate": 1.7094017094017095e-05, |
|
"loss": 0.1422, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.5096153846153846, |
|
"grad_norm": 2.352872371673584, |
|
"learning_rate": 1.655982905982906e-05, |
|
"loss": 0.1517, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.5576923076923075, |
|
"grad_norm": 2.504417896270752, |
|
"learning_rate": 1.602564102564103e-05, |
|
"loss": 0.1391, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.605769230769231, |
|
"grad_norm": 2.708151340484619, |
|
"learning_rate": 1.5491452991452994e-05, |
|
"loss": 0.118, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.6538461538461537, |
|
"grad_norm": 2.629040002822876, |
|
"learning_rate": 1.4957264957264958e-05, |
|
"loss": 0.146, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.7019230769230766, |
|
"grad_norm": 3.0288608074188232, |
|
"learning_rate": 1.4423076923076923e-05, |
|
"loss": 0.1261, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 2.5658912658691406, |
|
"learning_rate": 1.388888888888889e-05, |
|
"loss": 0.1533, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.7980769230769234, |
|
"grad_norm": 2.7179951667785645, |
|
"learning_rate": 1.3354700854700855e-05, |
|
"loss": 0.1413, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.8461538461538463, |
|
"grad_norm": 1.7115960121154785, |
|
"learning_rate": 1.282051282051282e-05, |
|
"loss": 0.1417, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.894230769230769, |
|
"grad_norm": 2.939439535140991, |
|
"learning_rate": 1.2286324786324787e-05, |
|
"loss": 0.1311, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.9423076923076925, |
|
"grad_norm": 3.047938585281372, |
|
"learning_rate": 1.1752136752136752e-05, |
|
"loss": 0.1247, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.9903846153846154, |
|
"grad_norm": 2.0042459964752197, |
|
"learning_rate": 1.1217948717948719e-05, |
|
"loss": 0.1488, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9566472875678589, |
|
"eval_loss": 0.11553934961557388, |
|
"eval_runtime": 85.0472, |
|
"eval_samples_per_second": 305.395, |
|
"eval_steps_per_second": 4.774, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 4.038461538461538, |
|
"grad_norm": 2.6015453338623047, |
|
"learning_rate": 1.0683760683760684e-05, |
|
"loss": 0.1375, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.086538461538462, |
|
"grad_norm": 2.7020106315612793, |
|
"learning_rate": 1.014957264957265e-05, |
|
"loss": 0.1389, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.134615384615385, |
|
"grad_norm": 2.7265429496765137, |
|
"learning_rate": 9.615384615384616e-06, |
|
"loss": 0.1327, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.1826923076923075, |
|
"grad_norm": 5.0143866539001465, |
|
"learning_rate": 9.081196581196581e-06, |
|
"loss": 0.1436, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.230769230769231, |
|
"grad_norm": 3.4509687423706055, |
|
"learning_rate": 8.547008547008548e-06, |
|
"loss": 0.1238, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.278846153846154, |
|
"grad_norm": 2.778099536895752, |
|
"learning_rate": 8.012820512820515e-06, |
|
"loss": 0.1219, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.326923076923077, |
|
"grad_norm": 2.5686230659484863, |
|
"learning_rate": 7.478632478632479e-06, |
|
"loss": 0.1291, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.375, |
|
"grad_norm": 2.001837730407715, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.1234, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.423076923076923, |
|
"grad_norm": 2.5673305988311768, |
|
"learning_rate": 6.41025641025641e-06, |
|
"loss": 0.1279, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.471153846153846, |
|
"grad_norm": 3.475268602371216, |
|
"learning_rate": 5.876068376068376e-06, |
|
"loss": 0.1272, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.519230769230769, |
|
"grad_norm": 2.018739700317383, |
|
"learning_rate": 5.341880341880342e-06, |
|
"loss": 0.135, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.5673076923076925, |
|
"grad_norm": 3.1651690006256104, |
|
"learning_rate": 4.807692307692308e-06, |
|
"loss": 0.1429, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.615384615384615, |
|
"grad_norm": 1.8769733905792236, |
|
"learning_rate": 4.273504273504274e-06, |
|
"loss": 0.1254, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.663461538461538, |
|
"grad_norm": 2.2759199142456055, |
|
"learning_rate": 3.7393162393162394e-06, |
|
"loss": 0.1255, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.711538461538462, |
|
"grad_norm": 2.8174450397491455, |
|
"learning_rate": 3.205128205128205e-06, |
|
"loss": 0.1325, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.759615384615385, |
|
"grad_norm": 3.362974166870117, |
|
"learning_rate": 2.670940170940171e-06, |
|
"loss": 0.1334, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.8076923076923075, |
|
"grad_norm": 2.34413743019104, |
|
"learning_rate": 2.136752136752137e-06, |
|
"loss": 0.125, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.855769230769231, |
|
"grad_norm": 3.007368326187134, |
|
"learning_rate": 1.6025641025641025e-06, |
|
"loss": 0.1288, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.903846153846154, |
|
"grad_norm": 3.8937089443206787, |
|
"learning_rate": 1.0683760683760685e-06, |
|
"loss": 0.1272, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.951923076923077, |
|
"grad_norm": 2.394737958908081, |
|
"learning_rate": 5.341880341880342e-07, |
|
"loss": 0.1322, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 1.8422698974609375, |
|
"learning_rate": 0.0, |
|
"loss": 0.1169, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9585338620875524, |
|
"eval_loss": 0.11395391821861267, |
|
"eval_runtime": 84.4488, |
|
"eval_samples_per_second": 307.559, |
|
"eval_steps_per_second": 4.808, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 1040, |
|
"total_flos": 5.2079419237154e+18, |
|
"train_loss": 0.16386900280530636, |
|
"train_runtime": 2488.4676, |
|
"train_samples_per_second": 106.949, |
|
"train_steps_per_second": 0.418 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1040, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.2079419237154e+18, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|