|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.98120873539868, |
|
"eval_steps": 500, |
|
"global_step": 4920, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02031488065007618, |
|
"grad_norm": 7.584718704223633, |
|
"learning_rate": 1.999990002577244e-05, |
|
"loss": 3.2828, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04062976130015236, |
|
"grad_norm": 6.282063007354736, |
|
"learning_rate": 1.999941036089858e-05, |
|
"loss": 3.084, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06094464195022854, |
|
"grad_norm": 4.432021617889404, |
|
"learning_rate": 1.9998512662721364e-05, |
|
"loss": 2.9074, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08125952260030472, |
|
"grad_norm": 4.4949116706848145, |
|
"learning_rate": 1.9997206967872025e-05, |
|
"loss": 2.7432, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1015744032503809, |
|
"grad_norm": 3.794994592666626, |
|
"learning_rate": 1.99954933296304e-05, |
|
"loss": 2.5308, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12188928390045708, |
|
"grad_norm": 3.7512996196746826, |
|
"learning_rate": 1.999337181792276e-05, |
|
"loss": 2.3957, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14220416455053325, |
|
"grad_norm": 3.3922431468963623, |
|
"learning_rate": 1.999084251931896e-05, |
|
"loss": 2.2018, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.16251904520060945, |
|
"grad_norm": 2.835357666015625, |
|
"learning_rate": 1.998790553702888e-05, |
|
"loss": 2.0505, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1828339258506856, |
|
"grad_norm": 2.4077184200286865, |
|
"learning_rate": 1.998456099089825e-05, |
|
"loss": 1.8802, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2031488065007618, |
|
"grad_norm": 2.9254958629608154, |
|
"learning_rate": 1.998080901740374e-05, |
|
"loss": 1.772, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22346368715083798, |
|
"grad_norm": 2.102215051651001, |
|
"learning_rate": 1.9976649769647392e-05, |
|
"loss": 1.7375, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.24377856780091417, |
|
"grad_norm": 2.1416189670562744, |
|
"learning_rate": 1.997208341735037e-05, |
|
"loss": 1.6127, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.26409344845099036, |
|
"grad_norm": 1.9121668338775635, |
|
"learning_rate": 1.9967110146846044e-05, |
|
"loss": 1.5663, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2844083291010665, |
|
"grad_norm": 1.7665735483169556, |
|
"learning_rate": 1.9961730161072383e-05, |
|
"loss": 1.5384, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3047232097511427, |
|
"grad_norm": 2.695359468460083, |
|
"learning_rate": 1.9955943679563655e-05, |
|
"loss": 1.4752, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3250380904012189, |
|
"grad_norm": 1.7461923360824585, |
|
"learning_rate": 1.9949750938441517e-05, |
|
"loss": 1.4581, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3453529710512951, |
|
"grad_norm": 1.856467366218567, |
|
"learning_rate": 1.994315219040532e-05, |
|
"loss": 1.4243, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3656678517013712, |
|
"grad_norm": 1.6817877292633057, |
|
"learning_rate": 1.9936147704721836e-05, |
|
"loss": 1.4437, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3859827323514474, |
|
"grad_norm": 1.6875474452972412, |
|
"learning_rate": 1.9928737767214267e-05, |
|
"loss": 1.4146, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4062976130015236, |
|
"grad_norm": 2.079127311706543, |
|
"learning_rate": 1.992092268025057e-05, |
|
"loss": 1.3857, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4266124936515998, |
|
"grad_norm": 1.4948805570602417, |
|
"learning_rate": 1.9912702762731118e-05, |
|
"loss": 1.3748, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.44692737430167595, |
|
"grad_norm": 1.6643149852752686, |
|
"learning_rate": 1.9904078350075703e-05, |
|
"loss": 1.3904, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.46724225495175215, |
|
"grad_norm": 2.044997215270996, |
|
"learning_rate": 1.9895049794209834e-05, |
|
"loss": 1.3519, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.48755713560182834, |
|
"grad_norm": 1.9969228506088257, |
|
"learning_rate": 1.9885617463550376e-05, |
|
"loss": 1.3637, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5078720162519045, |
|
"grad_norm": 2.5120513439178467, |
|
"learning_rate": 1.987678345628239e-05, |
|
"loss": 1.3516, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5281868969019807, |
|
"grad_norm": 2.049866199493408, |
|
"learning_rate": 1.9866585027500926e-05, |
|
"loss": 1.3149, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5485017775520569, |
|
"grad_norm": 2.5176775455474854, |
|
"learning_rate": 1.9855983985451647e-05, |
|
"loss": 1.2744, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.568816658202133, |
|
"grad_norm": 2.2515692710876465, |
|
"learning_rate": 1.9844980762717877e-05, |
|
"loss": 1.2986, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5891315388522093, |
|
"grad_norm": 1.9790077209472656, |
|
"learning_rate": 1.983357580829422e-05, |
|
"loss": 1.2556, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6094464195022854, |
|
"grad_norm": 2.0562939643859863, |
|
"learning_rate": 1.982176958756825e-05, |
|
"loss": 1.2565, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6297613001523616, |
|
"grad_norm": 2.895200252532959, |
|
"learning_rate": 1.9809562582301487e-05, |
|
"loss": 1.2217, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6500761808024378, |
|
"grad_norm": 2.379382371902466, |
|
"learning_rate": 1.9796955290609775e-05, |
|
"loss": 1.2186, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6703910614525139, |
|
"grad_norm": 1.5008665323257446, |
|
"learning_rate": 1.9783948226942936e-05, |
|
"loss": 1.2093, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6907059421025902, |
|
"grad_norm": 2.294449806213379, |
|
"learning_rate": 1.9770541922063778e-05, |
|
"loss": 1.228, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7110208227526663, |
|
"grad_norm": 1.5949811935424805, |
|
"learning_rate": 1.975673692302645e-05, |
|
"loss": 1.211, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7313357034027425, |
|
"grad_norm": 2.0285587310791016, |
|
"learning_rate": 1.97425337931541e-05, |
|
"loss": 1.2224, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7516505840528187, |
|
"grad_norm": 1.7120383977890015, |
|
"learning_rate": 1.97279331120159e-05, |
|
"loss": 1.2181, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7719654647028948, |
|
"grad_norm": 2.0460448265075684, |
|
"learning_rate": 1.97129354754034e-05, |
|
"loss": 1.206, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7922803453529711, |
|
"grad_norm": 2.479123592376709, |
|
"learning_rate": 1.96975414953062e-05, |
|
"loss": 1.1987, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8125952260030472, |
|
"grad_norm": 2.1566030979156494, |
|
"learning_rate": 1.9681751799887e-05, |
|
"loss": 1.2081, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8329101066531234, |
|
"grad_norm": 1.7905540466308594, |
|
"learning_rate": 1.9665567033455948e-05, |
|
"loss": 1.205, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8532249873031996, |
|
"grad_norm": 1.5104587078094482, |
|
"learning_rate": 1.9648987856444356e-05, |
|
"loss": 1.1808, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8735398679532758, |
|
"grad_norm": 1.601035714149475, |
|
"learning_rate": 1.9632014945377748e-05, |
|
"loss": 1.2039, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8938547486033519, |
|
"grad_norm": 1.4743677377700806, |
|
"learning_rate": 1.9614648992848265e-05, |
|
"loss": 1.1923, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9141696292534282, |
|
"grad_norm": 2.2900314331054688, |
|
"learning_rate": 1.959689070748638e-05, |
|
"loss": 1.19, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9344845099035043, |
|
"grad_norm": 2.500079393386841, |
|
"learning_rate": 1.9578740813932017e-05, |
|
"loss": 1.1826, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9547993905535805, |
|
"grad_norm": 1.704733967781067, |
|
"learning_rate": 1.9560200052804935e-05, |
|
"loss": 1.1811, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9751142712036567, |
|
"grad_norm": 2.3521337509155273, |
|
"learning_rate": 1.954126918067455e-05, |
|
"loss": 1.1868, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9954291518537328, |
|
"grad_norm": 2.1910970211029053, |
|
"learning_rate": 1.9521948970029045e-05, |
|
"loss": 1.2032, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.1905763149261475, |
|
"eval_runtime": 57.9944, |
|
"eval_samples_per_second": 33.969, |
|
"eval_steps_per_second": 8.501, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.0142204164550532, |
|
"grad_norm": 1.6047745943069458, |
|
"learning_rate": 1.9502240209243834e-05, |
|
"loss": 1.1649, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.0345352971051296, |
|
"grad_norm": 2.1489083766937256, |
|
"learning_rate": 1.948214370254942e-05, |
|
"loss": 1.1811, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0548501777552057, |
|
"grad_norm": 1.674709677696228, |
|
"learning_rate": 1.9461660269998556e-05, |
|
"loss": 1.1681, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.0751650584052819, |
|
"grad_norm": 1.839033603668213, |
|
"learning_rate": 1.9440790747432797e-05, |
|
"loss": 1.1683, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.095479939055358, |
|
"grad_norm": 2.0029451847076416, |
|
"learning_rate": 1.941953598644837e-05, |
|
"loss": 1.1471, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.1157948197054341, |
|
"grad_norm": 2.1408133506774902, |
|
"learning_rate": 1.9397896854361454e-05, |
|
"loss": 1.1865, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.1361097003555103, |
|
"grad_norm": 1.6449344158172607, |
|
"learning_rate": 1.937587423417277e-05, |
|
"loss": 1.1616, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.1564245810055866, |
|
"grad_norm": 2.530738592147827, |
|
"learning_rate": 1.935346902453155e-05, |
|
"loss": 1.1847, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.1767394616556628, |
|
"grad_norm": 1.7173138856887817, |
|
"learning_rate": 1.9330682139698872e-05, |
|
"loss": 1.1836, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.197054342305739, |
|
"grad_norm": 1.9412487745285034, |
|
"learning_rate": 1.9307514509510358e-05, |
|
"loss": 1.1745, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.217369222955815, |
|
"grad_norm": 1.9713300466537476, |
|
"learning_rate": 1.928396707933821e-05, |
|
"loss": 1.1498, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.2376841036058912, |
|
"grad_norm": 2.253971815109253, |
|
"learning_rate": 1.926004081005266e-05, |
|
"loss": 1.1554, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.2579989842559676, |
|
"grad_norm": 1.9793624877929688, |
|
"learning_rate": 1.9235736677982747e-05, |
|
"loss": 1.1445, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.2783138649060437, |
|
"grad_norm": 2.292801856994629, |
|
"learning_rate": 1.9211055674876475e-05, |
|
"loss": 1.1373, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.2986287455561198, |
|
"grad_norm": 1.8089720010757446, |
|
"learning_rate": 1.9185998807860357e-05, |
|
"loss": 1.1324, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.318943626206196, |
|
"grad_norm": 2.003289222717285, |
|
"learning_rate": 1.9160567099398294e-05, |
|
"loss": 1.173, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.3392585068562721, |
|
"grad_norm": 2.1809918880462646, |
|
"learning_rate": 1.9134761587249886e-05, |
|
"loss": 1.1408, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.3595733875063485, |
|
"grad_norm": 2.0070571899414062, |
|
"learning_rate": 1.9108583324428054e-05, |
|
"loss": 1.1811, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.3798882681564246, |
|
"grad_norm": 1.6615877151489258, |
|
"learning_rate": 1.9082033379156096e-05, |
|
"loss": 1.164, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.4002031488065008, |
|
"grad_norm": 2.217325210571289, |
|
"learning_rate": 1.9055112834824075e-05, |
|
"loss": 1.1534, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.420518029456577, |
|
"grad_norm": 1.8769993782043457, |
|
"learning_rate": 1.9027822789944625e-05, |
|
"loss": 1.1476, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.440832910106653, |
|
"grad_norm": 1.8755251169204712, |
|
"learning_rate": 1.900016435810813e-05, |
|
"loss": 1.1773, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.4611477907567294, |
|
"grad_norm": 2.059292793273926, |
|
"learning_rate": 1.8972138667937268e-05, |
|
"loss": 1.1512, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.4814626714068055, |
|
"grad_norm": 2.2438113689422607, |
|
"learning_rate": 1.894374686304096e-05, |
|
"loss": 1.1313, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.5017775520568817, |
|
"grad_norm": 1.6689549684524536, |
|
"learning_rate": 1.8914990101967717e-05, |
|
"loss": 1.1506, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.5220924327069578, |
|
"grad_norm": 1.9220091104507446, |
|
"learning_rate": 1.8885869558158348e-05, |
|
"loss": 1.1389, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.542407313357034, |
|
"grad_norm": 1.970219373703003, |
|
"learning_rate": 1.885638641989809e-05, |
|
"loss": 1.1535, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.5627221940071103, |
|
"grad_norm": 2.3392066955566406, |
|
"learning_rate": 1.882654189026811e-05, |
|
"loss": 1.1467, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.5830370746571862, |
|
"grad_norm": 1.9110764265060425, |
|
"learning_rate": 1.8796337187096415e-05, |
|
"loss": 1.1457, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.6033519553072626, |
|
"grad_norm": 3.4612278938293457, |
|
"learning_rate": 1.8765773542908165e-05, |
|
"loss": 1.1443, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.6236668359573387, |
|
"grad_norm": 1.8444197177886963, |
|
"learning_rate": 1.8734852204875363e-05, |
|
"loss": 1.1375, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.6439817166074149, |
|
"grad_norm": 1.7353184223175049, |
|
"learning_rate": 1.8703574434765986e-05, |
|
"loss": 1.1802, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.6642965972574912, |
|
"grad_norm": 2.283703327178955, |
|
"learning_rate": 1.8671941508892467e-05, |
|
"loss": 1.1457, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.6846114779075672, |
|
"grad_norm": 2.3982231616973877, |
|
"learning_rate": 1.8639954718059647e-05, |
|
"loss": 1.134, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.7049263585576435, |
|
"grad_norm": 2.6923394203186035, |
|
"learning_rate": 1.8607615367512077e-05, |
|
"loss": 1.1387, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.7252412392077197, |
|
"grad_norm": 1.870726466178894, |
|
"learning_rate": 1.8574924776880777e-05, |
|
"loss": 1.1479, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.7455561198577958, |
|
"grad_norm": 2.014890432357788, |
|
"learning_rate": 1.8541884280129367e-05, |
|
"loss": 1.1724, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.7658710005078722, |
|
"grad_norm": 1.8836543560028076, |
|
"learning_rate": 1.8508495225499643e-05, |
|
"loss": 1.1483, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.786185881157948, |
|
"grad_norm": 1.8277486562728882, |
|
"learning_rate": 1.8474758975456576e-05, |
|
"loss": 1.121, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.8065007618080244, |
|
"grad_norm": 1.8098642826080322, |
|
"learning_rate": 1.8440676906632688e-05, |
|
"loss": 1.1404, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.8268156424581006, |
|
"grad_norm": 2.0172536373138428, |
|
"learning_rate": 1.8406250409771894e-05, |
|
"loss": 1.1424, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.8471305231081767, |
|
"grad_norm": 2.2588918209075928, |
|
"learning_rate": 1.8371480889672756e-05, |
|
"loss": 1.1394, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.867445403758253, |
|
"grad_norm": 2.9289000034332275, |
|
"learning_rate": 1.8336369765131137e-05, |
|
"loss": 1.1273, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.887760284408329, |
|
"grad_norm": 1.9783216714859009, |
|
"learning_rate": 1.8300918468882337e-05, |
|
"loss": 1.1316, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.9080751650584054, |
|
"grad_norm": 2.6187262535095215, |
|
"learning_rate": 1.8265128447542606e-05, |
|
"loss": 1.1347, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.9283900457084815, |
|
"grad_norm": 1.7901383638381958, |
|
"learning_rate": 1.822900116155012e-05, |
|
"loss": 1.1419, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.9487049263585576, |
|
"grad_norm": 2.4337475299835205, |
|
"learning_rate": 1.819253808510538e-05, |
|
"loss": 1.1253, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.9690198070086338, |
|
"grad_norm": 1.88914155960083, |
|
"learning_rate": 1.8155740706111086e-05, |
|
"loss": 1.1278, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.98933468765871, |
|
"grad_norm": 2.087872266769409, |
|
"learning_rate": 1.811861052611137e-05, |
|
"loss": 1.1239, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.1426180601119995, |
|
"eval_runtime": 56.1438, |
|
"eval_samples_per_second": 35.088, |
|
"eval_steps_per_second": 8.781, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.0081259522600305, |
|
"grad_norm": 2.3936855792999268, |
|
"learning_rate": 1.8081149060230584e-05, |
|
"loss": 1.1307, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.0284408329101065, |
|
"grad_norm": 2.97658371925354, |
|
"learning_rate": 1.804335783711142e-05, |
|
"loss": 1.1115, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.048755713560183, |
|
"grad_norm": 1.7273203134536743, |
|
"learning_rate": 1.8005238398852567e-05, |
|
"loss": 1.1145, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.069070594210259, |
|
"grad_norm": 2.1559698581695557, |
|
"learning_rate": 1.7966792300945782e-05, |
|
"loss": 1.1333, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.089385474860335, |
|
"grad_norm": 1.8381649255752563, |
|
"learning_rate": 1.7928021112212397e-05, |
|
"loss": 1.0982, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.1097003555104115, |
|
"grad_norm": 2.330662727355957, |
|
"learning_rate": 1.788892641473932e-05, |
|
"loss": 1.1246, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.1300152361604874, |
|
"grad_norm": 2.1321730613708496, |
|
"learning_rate": 1.784950980381447e-05, |
|
"loss": 1.1182, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.1503301168105637, |
|
"grad_norm": 2.3504936695098877, |
|
"learning_rate": 1.7809772887861684e-05, |
|
"loss": 1.1449, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.17064499746064, |
|
"grad_norm": 1.9942519664764404, |
|
"learning_rate": 1.7769717288375078e-05, |
|
"loss": 1.1355, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.190959878110716, |
|
"grad_norm": 2.6634910106658936, |
|
"learning_rate": 1.7729344639852886e-05, |
|
"loss": 1.1317, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.2112747587607924, |
|
"grad_norm": 2.0692458152770996, |
|
"learning_rate": 1.768865658973076e-05, |
|
"loss": 1.1349, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.2315896394108683, |
|
"grad_norm": 2.0810651779174805, |
|
"learning_rate": 1.7647654798314554e-05, |
|
"loss": 1.1084, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.2519045200609447, |
|
"grad_norm": 1.9821966886520386, |
|
"learning_rate": 1.760634093871256e-05, |
|
"loss": 1.1274, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.2722194007110206, |
|
"grad_norm": 2.893826484680176, |
|
"learning_rate": 1.756471669676723e-05, |
|
"loss": 1.1262, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.292534281361097, |
|
"grad_norm": 1.9358024597167969, |
|
"learning_rate": 1.7522783770986425e-05, |
|
"loss": 1.1138, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.3128491620111733, |
|
"grad_norm": 1.746727466583252, |
|
"learning_rate": 1.748054387247405e-05, |
|
"loss": 1.1313, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.333164042661249, |
|
"grad_norm": 1.878308892250061, |
|
"learning_rate": 1.743799872486028e-05, |
|
"loss": 1.1313, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.3534789233113256, |
|
"grad_norm": 2.322291135787964, |
|
"learning_rate": 1.739515006423118e-05, |
|
"loss": 1.1173, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.3737938039614015, |
|
"grad_norm": 1.9874950647354126, |
|
"learning_rate": 1.7351999639057915e-05, |
|
"loss": 1.1023, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.394108684611478, |
|
"grad_norm": 2.070664882659912, |
|
"learning_rate": 1.7308549210125346e-05, |
|
"loss": 1.1208, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.414423565261554, |
|
"grad_norm": 1.8656115531921387, |
|
"learning_rate": 1.7264800550460235e-05, |
|
"loss": 1.1324, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.43473844591163, |
|
"grad_norm": 1.7181984186172485, |
|
"learning_rate": 1.7220755445258847e-05, |
|
"loss": 1.1299, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.4550533265617065, |
|
"grad_norm": 2.0644876956939697, |
|
"learning_rate": 1.717641569181414e-05, |
|
"loss": 1.1057, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.4753682072117824, |
|
"grad_norm": 2.54202938079834, |
|
"learning_rate": 1.7131783099442408e-05, |
|
"loss": 1.1371, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.4956830878618588, |
|
"grad_norm": 1.8502358198165894, |
|
"learning_rate": 1.708685948940944e-05, |
|
"loss": 1.0938, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.515997968511935, |
|
"grad_norm": 2.2226638793945312, |
|
"learning_rate": 1.7041646694856237e-05, |
|
"loss": 1.1306, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.536312849162011, |
|
"grad_norm": 2.0658068656921387, |
|
"learning_rate": 1.6996146560724163e-05, |
|
"loss": 1.1465, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.5566277298120874, |
|
"grad_norm": 2.187943458557129, |
|
"learning_rate": 1.6950360943679708e-05, |
|
"loss": 1.1071, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.5769426104621633, |
|
"grad_norm": 2.104330062866211, |
|
"learning_rate": 1.690429171203869e-05, |
|
"loss": 1.1247, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.5972574911122397, |
|
"grad_norm": 2.1676745414733887, |
|
"learning_rate": 1.685794074569003e-05, |
|
"loss": 1.112, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.617572371762316, |
|
"grad_norm": 1.8994890451431274, |
|
"learning_rate": 1.6811309936019034e-05, |
|
"loss": 1.1129, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.637887252412392, |
|
"grad_norm": 1.9885021448135376, |
|
"learning_rate": 1.6764401185830248e-05, |
|
"loss": 1.1074, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.6582021330624683, |
|
"grad_norm": 1.8062092065811157, |
|
"learning_rate": 1.6717216409269756e-05, |
|
"loss": 1.1083, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.6785170137125442, |
|
"grad_norm": 2.118276834487915, |
|
"learning_rate": 1.666975753174711e-05, |
|
"loss": 1.0967, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.6988318943626206, |
|
"grad_norm": 2.219618082046509, |
|
"learning_rate": 1.6622026489856767e-05, |
|
"loss": 1.1294, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.719146775012697, |
|
"grad_norm": 2.3651201725006104, |
|
"learning_rate": 1.657402523129903e-05, |
|
"loss": 1.1303, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.739461655662773, |
|
"grad_norm": 2.132640838623047, |
|
"learning_rate": 1.6525755714800605e-05, |
|
"loss": 1.0982, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.7597765363128492, |
|
"grad_norm": 2.0165741443634033, |
|
"learning_rate": 1.6477219910034662e-05, |
|
"loss": 1.1215, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.780091416962925, |
|
"grad_norm": 2.050801992416382, |
|
"learning_rate": 1.6428419797540455e-05, |
|
"loss": 1.0938, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.8004062976130015, |
|
"grad_norm": 2.0775163173675537, |
|
"learning_rate": 1.637935736864251e-05, |
|
"loss": 1.0976, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.820721178263078, |
|
"grad_norm": 1.9527651071548462, |
|
"learning_rate": 1.6330034625369366e-05, |
|
"loss": 1.108, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.841036058913154, |
|
"grad_norm": 2.5815951824188232, |
|
"learning_rate": 1.6280453580371887e-05, |
|
"loss": 1.1187, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.86135093956323, |
|
"grad_norm": 2.0379745960235596, |
|
"learning_rate": 1.6230616256841122e-05, |
|
"loss": 1.1263, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.881665820213306, |
|
"grad_norm": 2.2415878772735596, |
|
"learning_rate": 1.618052468842576e-05, |
|
"loss": 1.13, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.9019807008633824, |
|
"grad_norm": 1.951768159866333, |
|
"learning_rate": 1.6130180919149145e-05, |
|
"loss": 1.1179, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.922295581513459, |
|
"grad_norm": 2.3024680614471436, |
|
"learning_rate": 1.607958700332584e-05, |
|
"loss": 1.0993, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.9426104621635347, |
|
"grad_norm": 1.86294686794281, |
|
"learning_rate": 1.602874500547785e-05, |
|
"loss": 1.1071, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.962925342813611, |
|
"grad_norm": 2.225184202194214, |
|
"learning_rate": 1.5977657000250332e-05, |
|
"loss": 1.1189, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.983240223463687, |
|
"grad_norm": 2.0135252475738525, |
|
"learning_rate": 1.5926325072326963e-05, |
|
"loss": 1.1019, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.1209979057312012, |
|
"eval_runtime": 55.7989, |
|
"eval_samples_per_second": 35.305, |
|
"eval_steps_per_second": 8.835, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 3.0020314880650076, |
|
"grad_norm": 2.6400208473205566, |
|
"learning_rate": 1.587475131634486e-05, |
|
"loss": 1.1055, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.022346368715084, |
|
"grad_norm": 1.8572973012924194, |
|
"learning_rate": 1.582293783680912e-05, |
|
"loss": 1.1214, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.04266124936516, |
|
"grad_norm": 2.232374668121338, |
|
"learning_rate": 1.5770886748006925e-05, |
|
"loss": 1.1016, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.0629761300152363, |
|
"grad_norm": 1.8693956136703491, |
|
"learning_rate": 1.5718600173921287e-05, |
|
"loss": 1.1011, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.083291010665312, |
|
"grad_norm": 1.9302908182144165, |
|
"learning_rate": 1.5666080248144364e-05, |
|
"loss": 1.0913, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.1036058913153886, |
|
"grad_norm": 2.3789591789245605, |
|
"learning_rate": 1.5613329113790407e-05, |
|
"loss": 1.1153, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.123920771965465, |
|
"grad_norm": 2.4649009704589844, |
|
"learning_rate": 1.5560348923408297e-05, |
|
"loss": 1.1046, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.144235652615541, |
|
"grad_norm": 1.7256394624710083, |
|
"learning_rate": 1.5507141838893714e-05, |
|
"loss": 1.1111, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.164550533265617, |
|
"grad_norm": 2.8762755393981934, |
|
"learning_rate": 1.545371003140093e-05, |
|
"loss": 1.1196, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.184865413915693, |
|
"grad_norm": 1.8774479627609253, |
|
"learning_rate": 1.540005568125419e-05, |
|
"loss": 1.0758, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.2051802945657695, |
|
"grad_norm": 2.3003156185150146, |
|
"learning_rate": 1.5346180977858766e-05, |
|
"loss": 1.1024, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.225495175215846, |
|
"grad_norm": 1.9327248334884644, |
|
"learning_rate": 1.529208811961161e-05, |
|
"loss": 1.1217, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.2458100558659218, |
|
"grad_norm": 2.096296548843384, |
|
"learning_rate": 1.5237779313811622e-05, |
|
"loss": 1.0966, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.266124936515998, |
|
"grad_norm": 1.896620750427246, |
|
"learning_rate": 1.5183256776569625e-05, |
|
"loss": 1.1019, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.286439817166074, |
|
"grad_norm": 2.3992044925689697, |
|
"learning_rate": 1.5128522732717907e-05, |
|
"loss": 1.1048, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.3067546978161504, |
|
"grad_norm": 2.170178174972534, |
|
"learning_rate": 1.5073579415719422e-05, |
|
"loss": 1.0886, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.3270695784662268, |
|
"grad_norm": 2.299849510192871, |
|
"learning_rate": 1.5018429067576691e-05, |
|
"loss": 1.093, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.3473844591163027, |
|
"grad_norm": 2.0816946029663086, |
|
"learning_rate": 1.4963073938740276e-05, |
|
"loss": 1.1009, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.367699339766379, |
|
"grad_norm": 2.180161237716675, |
|
"learning_rate": 1.4907516288016972e-05, |
|
"loss": 1.1239, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.388014220416455, |
|
"grad_norm": 2.5566606521606445, |
|
"learning_rate": 1.4851758382477632e-05, |
|
"loss": 1.1168, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.4083291010665313, |
|
"grad_norm": 2.167325019836426, |
|
"learning_rate": 1.4795802497364655e-05, |
|
"loss": 1.0941, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.4286439817166072, |
|
"grad_norm": 2.132948875427246, |
|
"learning_rate": 1.4739650915999136e-05, |
|
"loss": 1.0935, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.4489588623666836, |
|
"grad_norm": 2.1108901500701904, |
|
"learning_rate": 1.46833059296877e-05, |
|
"loss": 1.0963, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.46927374301676, |
|
"grad_norm": 2.1430904865264893, |
|
"learning_rate": 1.4626769837629012e-05, |
|
"loss": 1.1058, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.489588623666836, |
|
"grad_norm": 2.2873406410217285, |
|
"learning_rate": 1.4570044946819937e-05, |
|
"loss": 1.0925, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.5099035043169122, |
|
"grad_norm": 2.4914746284484863, |
|
"learning_rate": 1.4513133571961415e-05, |
|
"loss": 1.1094, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.5302183849669886, |
|
"grad_norm": 2.074937582015991, |
|
"learning_rate": 1.4456038035364014e-05, |
|
"loss": 1.1123, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.5505332656170645, |
|
"grad_norm": 2.091817855834961, |
|
"learning_rate": 1.4398760666853143e-05, |
|
"loss": 1.0965, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.5708481462671404, |
|
"grad_norm": 1.8256065845489502, |
|
"learning_rate": 1.434130380367401e-05, |
|
"loss": 1.1179, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.591163026917217, |
|
"grad_norm": 2.202068567276001, |
|
"learning_rate": 1.428366979039622e-05, |
|
"loss": 1.1102, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.611477907567293, |
|
"grad_norm": 1.8671810626983643, |
|
"learning_rate": 1.4225860978818134e-05, |
|
"loss": 1.089, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.631792788217369, |
|
"grad_norm": 2.394472599029541, |
|
"learning_rate": 1.4167879727870873e-05, |
|
"loss": 1.0968, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.6521076688674454, |
|
"grad_norm": 1.918837308883667, |
|
"learning_rate": 1.4109728403522088e-05, |
|
"loss": 1.0971, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.6724225495175213, |
|
"grad_norm": 2.170398235321045, |
|
"learning_rate": 1.405140937867938e-05, |
|
"loss": 1.0953, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.6927374301675977, |
|
"grad_norm": 1.9695782661437988, |
|
"learning_rate": 1.3992925033093514e-05, |
|
"loss": 1.0954, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.713052310817674, |
|
"grad_norm": 2.0776660442352295, |
|
"learning_rate": 1.3934277753261274e-05, |
|
"loss": 1.0989, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.73336719146775, |
|
"grad_norm": 2.3341407775878906, |
|
"learning_rate": 1.3875469932328104e-05, |
|
"loss": 1.1108, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 3.7536820721178263, |
|
"grad_norm": 2.07391095161438, |
|
"learning_rate": 1.3816503969990444e-05, |
|
"loss": 1.1194, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.7739969527679023, |
|
"grad_norm": 1.9829392433166504, |
|
"learning_rate": 1.3757382272397805e-05, |
|
"loss": 1.0695, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.7943118334179786, |
|
"grad_norm": 2.744811534881592, |
|
"learning_rate": 1.3698107252054604e-05, |
|
"loss": 1.0825, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 3.814626714068055, |
|
"grad_norm": 2.293558120727539, |
|
"learning_rate": 1.3638681327721686e-05, |
|
"loss": 1.0942, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 3.834941594718131, |
|
"grad_norm": 2.2765190601348877, |
|
"learning_rate": 1.357910692431766e-05, |
|
"loss": 1.0907, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 3.8552564753682073, |
|
"grad_norm": 2.5928955078125, |
|
"learning_rate": 1.3519386472819927e-05, |
|
"loss": 1.0911, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.875571356018283, |
|
"grad_norm": 2.018359661102295, |
|
"learning_rate": 1.3459522410165488e-05, |
|
"loss": 1.0965, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 3.8958862366683595, |
|
"grad_norm": 3.01343035697937, |
|
"learning_rate": 1.3399517179151504e-05, |
|
"loss": 1.0946, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 3.916201117318436, |
|
"grad_norm": 2.592369794845581, |
|
"learning_rate": 1.3339373228335614e-05, |
|
"loss": 1.0901, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 3.936515997968512, |
|
"grad_norm": 2.4442763328552246, |
|
"learning_rate": 1.3279093011936021e-05, |
|
"loss": 1.0906, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 3.956830878618588, |
|
"grad_norm": 2.2360129356384277, |
|
"learning_rate": 1.3218678989731352e-05, |
|
"loss": 1.0988, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.977145759268664, |
|
"grad_norm": 2.1769418716430664, |
|
"learning_rate": 1.3158133626960261e-05, |
|
"loss": 1.0978, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 3.9974606399187405, |
|
"grad_norm": 2.4375362396240234, |
|
"learning_rate": 1.3097459394220867e-05, |
|
"loss": 1.0874, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.1085847616195679, |
|
"eval_runtime": 55.3659, |
|
"eval_samples_per_second": 35.581, |
|
"eval_steps_per_second": 8.904, |
|
"step": 1972 |
|
}, |
|
{ |
|
"epoch": 4.016251904520061, |
|
"grad_norm": 2.620096445083618, |
|
"learning_rate": 1.3036658767369919e-05, |
|
"loss": 1.1006, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 4.0365667851701375, |
|
"grad_norm": 2.652712345123291, |
|
"learning_rate": 1.2975734227421768e-05, |
|
"loss": 1.0797, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 4.056881665820213, |
|
"grad_norm": 2.4038774967193604, |
|
"learning_rate": 1.291468826044713e-05, |
|
"loss": 1.1085, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.077196546470289, |
|
"grad_norm": 2.6778948307037354, |
|
"learning_rate": 1.2853523357471644e-05, |
|
"loss": 1.0818, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 4.097511427120366, |
|
"grad_norm": 2.2972545623779297, |
|
"learning_rate": 1.2792242014374216e-05, |
|
"loss": 1.0921, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 4.117826307770442, |
|
"grad_norm": 2.1259145736694336, |
|
"learning_rate": 1.273084673178518e-05, |
|
"loss": 1.0911, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 4.138141188420518, |
|
"grad_norm": 2.6703901290893555, |
|
"learning_rate": 1.2669340014984252e-05, |
|
"loss": 1.0939, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 4.158456069070594, |
|
"grad_norm": 2.7287003993988037, |
|
"learning_rate": 1.260772437379831e-05, |
|
"loss": 1.0896, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 4.17877094972067, |
|
"grad_norm": 2.5945608615875244, |
|
"learning_rate": 1.254600232249896e-05, |
|
"loss": 1.0973, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 4.199085830370747, |
|
"grad_norm": 2.0844831466674805, |
|
"learning_rate": 1.248417637969996e-05, |
|
"loss": 1.0883, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 4.219400711020823, |
|
"grad_norm": 2.237929105758667, |
|
"learning_rate": 1.2422249068254433e-05, |
|
"loss": 1.0963, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 4.239715591670899, |
|
"grad_norm": 2.147749662399292, |
|
"learning_rate": 1.2360222915151929e-05, |
|
"loss": 1.0939, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 4.260030472320975, |
|
"grad_norm": 2.0512821674346924, |
|
"learning_rate": 1.22981004514153e-05, |
|
"loss": 1.0984, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 4.280345352971051, |
|
"grad_norm": 2.3941662311553955, |
|
"learning_rate": 1.223588421199743e-05, |
|
"loss": 1.1019, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 4.3006602336211275, |
|
"grad_norm": 2.5357017517089844, |
|
"learning_rate": 1.2173576735677775e-05, |
|
"loss": 1.1074, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 4.320975114271204, |
|
"grad_norm": 2.3033499717712402, |
|
"learning_rate": 1.2111180564958796e-05, |
|
"loss": 1.0841, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 4.34128999492128, |
|
"grad_norm": 2.249016046524048, |
|
"learning_rate": 1.2048698245962184e-05, |
|
"loss": 1.0746, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 4.361604875571356, |
|
"grad_norm": 2.2466604709625244, |
|
"learning_rate": 1.1986132328324986e-05, |
|
"loss": 1.105, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 4.381919756221432, |
|
"grad_norm": 2.7963321208953857, |
|
"learning_rate": 1.192348536509554e-05, |
|
"loss": 1.0876, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 4.402234636871508, |
|
"grad_norm": 2.159446954727173, |
|
"learning_rate": 1.1860759912629311e-05, |
|
"loss": 1.1072, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 4.422549517521585, |
|
"grad_norm": 2.521923542022705, |
|
"learning_rate": 1.179795853048458e-05, |
|
"loss": 1.1047, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 4.442864398171661, |
|
"grad_norm": 2.2701151371002197, |
|
"learning_rate": 1.1735083781317992e-05, |
|
"loss": 1.0879, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 4.463179278821737, |
|
"grad_norm": 2.706636667251587, |
|
"learning_rate": 1.167213823077999e-05, |
|
"loss": 1.0793, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.483494159471813, |
|
"grad_norm": 2.425516128540039, |
|
"learning_rate": 1.1609124447410114e-05, |
|
"loss": 1.093, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 4.503809040121889, |
|
"grad_norm": 2.4822216033935547, |
|
"learning_rate": 1.1546045002532202e-05, |
|
"loss": 1.0961, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.524123920771966, |
|
"grad_norm": 2.3154852390289307, |
|
"learning_rate": 1.148290247014945e-05, |
|
"loss": 1.0861, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 4.544438801422041, |
|
"grad_norm": 2.016235589981079, |
|
"learning_rate": 1.1419699426839393e-05, |
|
"loss": 1.0913, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 4.5647536820721175, |
|
"grad_norm": 2.701549530029297, |
|
"learning_rate": 1.1356438451648754e-05, |
|
"loss": 1.0918, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.585068562722194, |
|
"grad_norm": 2.02785587310791, |
|
"learning_rate": 1.1293122125988211e-05, |
|
"loss": 1.1, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 4.60538344337227, |
|
"grad_norm": 2.196712017059326, |
|
"learning_rate": 1.122975303352706e-05, |
|
"loss": 1.0657, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 4.625698324022347, |
|
"grad_norm": 2.8437767028808594, |
|
"learning_rate": 1.1166333760087784e-05, |
|
"loss": 1.1043, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 4.646013204672423, |
|
"grad_norm": 2.5000803470611572, |
|
"learning_rate": 1.1102866893540537e-05, |
|
"loss": 1.0855, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 4.666328085322498, |
|
"grad_norm": 2.488802194595337, |
|
"learning_rate": 1.1039355023697547e-05, |
|
"loss": 1.0792, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.686642965972575, |
|
"grad_norm": 2.04819655418396, |
|
"learning_rate": 1.097580074220743e-05, |
|
"loss": 1.0746, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 4.706957846622651, |
|
"grad_norm": 2.619755506515503, |
|
"learning_rate": 1.091220664244945e-05, |
|
"loss": 1.0807, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 4.7272727272727275, |
|
"grad_norm": 2.009214401245117, |
|
"learning_rate": 1.0848575319427673e-05, |
|
"loss": 1.0881, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 4.747587607922803, |
|
"grad_norm": 2.155071258544922, |
|
"learning_rate": 1.0784909369665092e-05, |
|
"loss": 1.0879, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 4.767902488572879, |
|
"grad_norm": 2.4336414337158203, |
|
"learning_rate": 1.0721211391097675e-05, |
|
"loss": 1.0915, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 4.788217369222956, |
|
"grad_norm": 2.942394733428955, |
|
"learning_rate": 1.0657483982968341e-05, |
|
"loss": 1.0753, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 4.808532249873032, |
|
"grad_norm": 2.1195554733276367, |
|
"learning_rate": 1.0593729745720913e-05, |
|
"loss": 1.067, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 4.828847130523108, |
|
"grad_norm": 2.387273073196411, |
|
"learning_rate": 1.0529951280893986e-05, |
|
"loss": 1.0828, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 4.849162011173185, |
|
"grad_norm": 2.598560094833374, |
|
"learning_rate": 1.0466151191014784e-05, |
|
"loss": 1.0905, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 4.86947689182326, |
|
"grad_norm": 2.338967800140381, |
|
"learning_rate": 1.0402332079492959e-05, |
|
"loss": 1.0888, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 4.889791772473337, |
|
"grad_norm": 2.038918972015381, |
|
"learning_rate": 1.033849655051435e-05, |
|
"loss": 1.0713, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 4.910106653123413, |
|
"grad_norm": 2.3864147663116455, |
|
"learning_rate": 1.0274647208934732e-05, |
|
"loss": 1.0997, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 4.930421533773489, |
|
"grad_norm": 2.313906669616699, |
|
"learning_rate": 1.02107866601735e-05, |
|
"loss": 1.088, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 4.950736414423565, |
|
"grad_norm": 1.9792237281799316, |
|
"learning_rate": 1.0146917510107377e-05, |
|
"loss": 1.0792, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 4.971051295073641, |
|
"grad_norm": 2.2212326526641846, |
|
"learning_rate": 1.0083042364964062e-05, |
|
"loss": 1.0804, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 4.9913661757237175, |
|
"grad_norm": 2.190819501876831, |
|
"learning_rate": 1.0019163831215894e-05, |
|
"loss": 1.0815, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.1006184816360474, |
|
"eval_runtime": 54.9569, |
|
"eval_samples_per_second": 35.846, |
|
"eval_steps_per_second": 8.971, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 5.010157440325038, |
|
"grad_norm": 2.375826597213745, |
|
"learning_rate": 9.955284515473478e-06, |
|
"loss": 1.0816, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 5.0304723209751145, |
|
"grad_norm": 2.3722262382507324, |
|
"learning_rate": 9.89140702437934e-06, |
|
"loss": 1.081, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 5.05078720162519, |
|
"grad_norm": 2.717057943344116, |
|
"learning_rate": 9.827533964501537e-06, |
|
"loss": 1.1021, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 5.071102082275266, |
|
"grad_norm": 2.2786624431610107, |
|
"learning_rate": 9.763667942227323e-06, |
|
"loss": 1.0983, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 5.091416962925343, |
|
"grad_norm": 2.0201127529144287, |
|
"learning_rate": 9.699811563656758e-06, |
|
"loss": 1.0694, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 5.111731843575419, |
|
"grad_norm": 2.4694550037384033, |
|
"learning_rate": 9.635967434496396e-06, |
|
"loss": 1.0899, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 5.1320467242254955, |
|
"grad_norm": 2.8942248821258545, |
|
"learning_rate": 9.572138159952945e-06, |
|
"loss": 1.099, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 5.152361604875571, |
|
"grad_norm": 2.079746723175049, |
|
"learning_rate": 9.508326344626955e-06, |
|
"loss": 1.0789, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 5.172676485525647, |
|
"grad_norm": 2.178121566772461, |
|
"learning_rate": 9.444534592406541e-06, |
|
"loss": 1.0763, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 5.192991366175724, |
|
"grad_norm": 2.4157040119171143, |
|
"learning_rate": 9.380765506361132e-06, |
|
"loss": 1.0735, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 5.2133062468258, |
|
"grad_norm": 2.3019464015960693, |
|
"learning_rate": 9.317021688635236e-06, |
|
"loss": 1.0746, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 5.233621127475876, |
|
"grad_norm": 2.6220922470092773, |
|
"learning_rate": 9.253305740342287e-06, |
|
"loss": 1.0663, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 5.253936008125952, |
|
"grad_norm": 2.5131311416625977, |
|
"learning_rate": 9.189620261458467e-06, |
|
"loss": 1.0616, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 5.274250888776028, |
|
"grad_norm": 2.5169596672058105, |
|
"learning_rate": 9.125967850716648e-06, |
|
"loss": 1.0709, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 5.294565769426105, |
|
"grad_norm": 2.525052309036255, |
|
"learning_rate": 9.062351105500316e-06, |
|
"loss": 1.07, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 5.314880650076181, |
|
"grad_norm": 2.4557714462280273, |
|
"learning_rate": 8.998772621737609e-06, |
|
"loss": 1.0802, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 5.335195530726257, |
|
"grad_norm": 2.4241530895233154, |
|
"learning_rate": 8.935234993795378e-06, |
|
"loss": 1.0765, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 5.355510411376333, |
|
"grad_norm": 2.6684749126434326, |
|
"learning_rate": 8.87174081437332e-06, |
|
"loss": 1.0793, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 5.375825292026409, |
|
"grad_norm": 2.0009050369262695, |
|
"learning_rate": 8.808292674398186e-06, |
|
"loss": 1.0584, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 5.3961401726764855, |
|
"grad_norm": 2.189467668533325, |
|
"learning_rate": 8.744893162918045e-06, |
|
"loss": 1.1002, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 5.416455053326562, |
|
"grad_norm": 2.2791178226470947, |
|
"learning_rate": 8.681544866996657e-06, |
|
"loss": 1.0928, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 5.436769933976638, |
|
"grad_norm": 2.332437753677368, |
|
"learning_rate": 8.618250371607879e-06, |
|
"loss": 1.0715, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 5.457084814626714, |
|
"grad_norm": 2.518786907196045, |
|
"learning_rate": 8.555012259530215e-06, |
|
"loss": 1.074, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 5.47739969527679, |
|
"grad_norm": 2.394573211669922, |
|
"learning_rate": 8.491833111241392e-06, |
|
"loss": 1.0846, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 5.497714575926866, |
|
"grad_norm": 2.2103583812713623, |
|
"learning_rate": 8.428715504813084e-06, |
|
"loss": 1.0952, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 5.518029456576943, |
|
"grad_norm": 2.51759934425354, |
|
"learning_rate": 8.365662015805694e-06, |
|
"loss": 1.0918, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 5.538344337227019, |
|
"grad_norm": 2.085672616958618, |
|
"learning_rate": 8.302675217163279e-06, |
|
"loss": 1.0726, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 5.558659217877095, |
|
"grad_norm": 2.341189384460449, |
|
"learning_rate": 8.23975767910854e-06, |
|
"loss": 1.0762, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 5.578974098527171, |
|
"grad_norm": 2.3695602416992188, |
|
"learning_rate": 8.176911969037951e-06, |
|
"loss": 1.0794, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 5.599288979177247, |
|
"grad_norm": 2.8153765201568604, |
|
"learning_rate": 8.11414065141699e-06, |
|
"loss": 1.0823, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 5.619603859827324, |
|
"grad_norm": 2.3020527362823486, |
|
"learning_rate": 8.051446287675499e-06, |
|
"loss": 1.064, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 5.6399187404774, |
|
"grad_norm": 2.436936378479004, |
|
"learning_rate": 7.988831436103157e-06, |
|
"loss": 1.0911, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 5.6602336211274755, |
|
"grad_norm": 2.706698417663574, |
|
"learning_rate": 7.926298651745095e-06, |
|
"loss": 1.1105, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 5.680548501777552, |
|
"grad_norm": 2.290712594985962, |
|
"learning_rate": 7.863850486297622e-06, |
|
"loss": 1.0668, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 5.700863382427628, |
|
"grad_norm": 2.3723671436309814, |
|
"learning_rate": 7.801489488004124e-06, |
|
"loss": 1.0701, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 5.721178263077705, |
|
"grad_norm": 2.1805179119110107, |
|
"learning_rate": 7.739218201551054e-06, |
|
"loss": 1.0718, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 5.741493143727781, |
|
"grad_norm": 2.39038348197937, |
|
"learning_rate": 7.677039167964114e-06, |
|
"loss": 1.075, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 5.761808024377856, |
|
"grad_norm": 2.164767265319824, |
|
"learning_rate": 7.614954924504553e-06, |
|
"loss": 1.0857, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 5.782122905027933, |
|
"grad_norm": 2.3338029384613037, |
|
"learning_rate": 7.552968004565653e-06, |
|
"loss": 1.0692, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 5.802437785678009, |
|
"grad_norm": 2.186400890350342, |
|
"learning_rate": 7.491080937569321e-06, |
|
"loss": 1.0759, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 5.8227526663280855, |
|
"grad_norm": 2.548473834991455, |
|
"learning_rate": 7.429296248862907e-06, |
|
"loss": 1.0799, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 5.843067546978162, |
|
"grad_norm": 2.4101195335388184, |
|
"learning_rate": 7.367616459616127e-06, |
|
"loss": 1.0772, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 5.863382427628237, |
|
"grad_norm": 2.5707526206970215, |
|
"learning_rate": 7.306044086718207e-06, |
|
"loss": 1.0772, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 5.883697308278314, |
|
"grad_norm": 2.447829246520996, |
|
"learning_rate": 7.244581642675159e-06, |
|
"loss": 1.0823, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 5.90401218892839, |
|
"grad_norm": 2.3447105884552, |
|
"learning_rate": 7.1832316355072785e-06, |
|
"loss": 1.0927, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 5.924327069578466, |
|
"grad_norm": 2.4636731147766113, |
|
"learning_rate": 7.121996568646786e-06, |
|
"loss": 1.0963, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 5.944641950228542, |
|
"grad_norm": 2.2715587615966797, |
|
"learning_rate": 7.060878940835674e-06, |
|
"loss": 1.1112, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 5.964956830878618, |
|
"grad_norm": 2.37288236618042, |
|
"learning_rate": 6.999881246023754e-06, |
|
"loss": 1.0959, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 5.985271711528695, |
|
"grad_norm": 2.28794264793396, |
|
"learning_rate": 6.9390059732668835e-06, |
|
"loss": 1.0642, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.0949872732162476, |
|
"eval_runtime": 54.7572, |
|
"eval_samples_per_second": 35.977, |
|
"eval_steps_per_second": 9.003, |
|
"step": 2958 |
|
}, |
|
{ |
|
"epoch": 6.004062976130015, |
|
"grad_norm": 2.5767059326171875, |
|
"learning_rate": 6.878255606625395e-06, |
|
"loss": 1.0996, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 6.024377856780092, |
|
"grad_norm": 2.55753755569458, |
|
"learning_rate": 6.817632625062735e-06, |
|
"loss": 1.0868, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 6.044692737430168, |
|
"grad_norm": 2.5268876552581787, |
|
"learning_rate": 6.757139502344312e-06, |
|
"loss": 1.0957, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 6.0650076180802435, |
|
"grad_norm": 2.596231698989868, |
|
"learning_rate": 6.696778706936542e-06, |
|
"loss": 1.078, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 6.08532249873032, |
|
"grad_norm": 2.4920766353607178, |
|
"learning_rate": 6.636552701906135e-06, |
|
"loss": 1.0653, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 6.105637379380396, |
|
"grad_norm": 2.75101900100708, |
|
"learning_rate": 6.576463944819576e-06, |
|
"loss": 1.1024, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 6.1259522600304726, |
|
"grad_norm": 2.3427553176879883, |
|
"learning_rate": 6.516514887642846e-06, |
|
"loss": 1.0864, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 6.146267140680549, |
|
"grad_norm": 2.1833336353302, |
|
"learning_rate": 6.456707976641368e-06, |
|
"loss": 1.066, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 6.166582021330624, |
|
"grad_norm": 2.4288933277130127, |
|
"learning_rate": 6.397045652280192e-06, |
|
"loss": 1.0787, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 6.186896901980701, |
|
"grad_norm": 2.3094112873077393, |
|
"learning_rate": 6.3375303491243925e-06, |
|
"loss": 1.0703, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 6.207211782630777, |
|
"grad_norm": 2.3378853797912598, |
|
"learning_rate": 6.278164495739749e-06, |
|
"loss": 1.1107, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 6.2275266632808535, |
|
"grad_norm": 2.429354667663574, |
|
"learning_rate": 6.21895051459362e-06, |
|
"loss": 1.0632, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 6.24784154393093, |
|
"grad_norm": 2.5030083656311035, |
|
"learning_rate": 6.159890821956118e-06, |
|
"loss": 1.0794, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 6.268156424581005, |
|
"grad_norm": 2.2454118728637695, |
|
"learning_rate": 6.100987827801486e-06, |
|
"loss": 1.0555, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 6.288471305231082, |
|
"grad_norm": 2.3601973056793213, |
|
"learning_rate": 6.042243935709782e-06, |
|
"loss": 1.0758, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 6.308786185881158, |
|
"grad_norm": 1.9250520467758179, |
|
"learning_rate": 5.983661542768777e-06, |
|
"loss": 1.066, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 6.329101066531234, |
|
"grad_norm": 2.4363834857940674, |
|
"learning_rate": 5.9252430394761625e-06, |
|
"loss": 1.089, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 6.34941594718131, |
|
"grad_norm": 2.1766016483306885, |
|
"learning_rate": 5.866990809641978e-06, |
|
"loss": 1.0695, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 6.369730827831386, |
|
"grad_norm": 2.575039863586426, |
|
"learning_rate": 5.808907230291359e-06, |
|
"loss": 1.0874, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 6.390045708481463, |
|
"grad_norm": 2.4749789237976074, |
|
"learning_rate": 5.75099467156754e-06, |
|
"loss": 1.1004, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 6.410360589131539, |
|
"grad_norm": 2.669945240020752, |
|
"learning_rate": 5.6932554966351185e-06, |
|
"loss": 1.0756, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 6.430675469781615, |
|
"grad_norm": 2.250049114227295, |
|
"learning_rate": 5.635692061583651e-06, |
|
"loss": 1.074, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 6.450990350431692, |
|
"grad_norm": 2.271474599838257, |
|
"learning_rate": 5.578306715331494e-06, |
|
"loss": 1.0881, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 6.471305231081767, |
|
"grad_norm": 2.2663955688476562, |
|
"learning_rate": 5.521101799529965e-06, |
|
"loss": 1.0601, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 6.4916201117318435, |
|
"grad_norm": 2.209956407546997, |
|
"learning_rate": 5.464079648467769e-06, |
|
"loss": 1.0569, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 6.51193499238192, |
|
"grad_norm": 2.395031213760376, |
|
"learning_rate": 5.407242588975777e-06, |
|
"loss": 1.0687, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 6.532249873031996, |
|
"grad_norm": 2.570211410522461, |
|
"learning_rate": 5.3505929403320575e-06, |
|
"loss": 1.064, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 6.552564753682072, |
|
"grad_norm": 2.569349765777588, |
|
"learning_rate": 5.294133014167242e-06, |
|
"loss": 1.0752, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 6.572879634332148, |
|
"grad_norm": 2.717036008834839, |
|
"learning_rate": 5.237865114370192e-06, |
|
"loss": 1.06, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 6.593194514982224, |
|
"grad_norm": 2.299722671508789, |
|
"learning_rate": 5.18179153699399e-06, |
|
"loss": 1.0828, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 6.613509395632301, |
|
"grad_norm": 2.2542483806610107, |
|
"learning_rate": 5.125914570162259e-06, |
|
"loss": 1.0829, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 6.633824276282377, |
|
"grad_norm": 2.26954984664917, |
|
"learning_rate": 5.070236493975777e-06, |
|
"loss": 1.0877, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 6.6541391569324535, |
|
"grad_norm": 2.2126009464263916, |
|
"learning_rate": 5.014759580419448e-06, |
|
"loss": 1.0676, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 6.674454037582529, |
|
"grad_norm": 2.658097505569458, |
|
"learning_rate": 4.959486093269577e-06, |
|
"loss": 1.0746, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 6.694768918232605, |
|
"grad_norm": 2.2925167083740234, |
|
"learning_rate": 4.904418288001511e-06, |
|
"loss": 1.0689, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 6.715083798882682, |
|
"grad_norm": 2.2561416625976562, |
|
"learning_rate": 4.849558411697604e-06, |
|
"loss": 1.0787, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 6.735398679532758, |
|
"grad_norm": 2.590914726257324, |
|
"learning_rate": 4.794908702955496e-06, |
|
"loss": 1.0673, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 6.7557135601828335, |
|
"grad_norm": 2.577042579650879, |
|
"learning_rate": 4.740471391796807e-06, |
|
"loss": 1.0841, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 6.77602844083291, |
|
"grad_norm": 2.53352689743042, |
|
"learning_rate": 4.686248699576098e-06, |
|
"loss": 1.0603, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 6.796343321482986, |
|
"grad_norm": 2.2674384117126465, |
|
"learning_rate": 4.632242838890257e-06, |
|
"loss": 1.0651, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 6.816658202133063, |
|
"grad_norm": 2.618176221847534, |
|
"learning_rate": 4.578456013488206e-06, |
|
"loss": 1.0714, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 6.836973082783139, |
|
"grad_norm": 2.610429286956787, |
|
"learning_rate": 4.524890418180954e-06, |
|
"loss": 1.1131, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 6.8572879634332145, |
|
"grad_norm": 2.254110813140869, |
|
"learning_rate": 4.471548238752068e-06, |
|
"loss": 1.0697, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 6.877602844083291, |
|
"grad_norm": 2.650230884552002, |
|
"learning_rate": 4.4184316518684625e-06, |
|
"loss": 1.0892, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 6.897917724733367, |
|
"grad_norm": 2.45845890045166, |
|
"learning_rate": 4.365542824991581e-06, |
|
"loss": 1.0798, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 6.9182326053834435, |
|
"grad_norm": 2.395923376083374, |
|
"learning_rate": 4.312883916288947e-06, |
|
"loss": 1.0596, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 6.93854748603352, |
|
"grad_norm": 2.744338274002075, |
|
"learning_rate": 4.26045707454611e-06, |
|
"loss": 1.0624, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 6.958862366683595, |
|
"grad_norm": 2.431194543838501, |
|
"learning_rate": 4.2082644390789565e-06, |
|
"loss": 1.0762, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 6.979177247333672, |
|
"grad_norm": 2.344907522201538, |
|
"learning_rate": 4.156308139646417e-06, |
|
"loss": 1.0716, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 6.999492127983748, |
|
"grad_norm": 2.289046049118042, |
|
"learning_rate": 4.1045902963635355e-06, |
|
"loss": 1.0749, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.0920889377593994, |
|
"eval_runtime": 54.5848, |
|
"eval_samples_per_second": 36.091, |
|
"eval_steps_per_second": 9.032, |
|
"step": 3451 |
|
}, |
|
{ |
|
"epoch": 7.018283392585069, |
|
"grad_norm": 2.9955337047576904, |
|
"learning_rate": 4.053113019615001e-06, |
|
"loss": 1.0685, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 7.038598273235145, |
|
"grad_norm": 2.43476939201355, |
|
"learning_rate": 4.001878409969e-06, |
|
"loss": 1.0741, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 7.058913153885221, |
|
"grad_norm": 2.4879932403564453, |
|
"learning_rate": 3.95088855809151e-06, |
|
"loss": 1.0906, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 7.079228034535297, |
|
"grad_norm": 2.2968192100524902, |
|
"learning_rate": 3.900145544660981e-06, |
|
"loss": 1.0646, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 7.099542915185373, |
|
"grad_norm": 2.6439547538757324, |
|
"learning_rate": 3.849651440283443e-06, |
|
"loss": 1.0802, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 7.11985779583545, |
|
"grad_norm": 2.3785431385040283, |
|
"learning_rate": 3.7994083054080123e-06, |
|
"loss": 1.067, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 7.140172676485526, |
|
"grad_norm": 2.7672765254974365, |
|
"learning_rate": 3.7494181902428094e-06, |
|
"loss": 1.082, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 7.1604875571356015, |
|
"grad_norm": 2.634470224380493, |
|
"learning_rate": 3.6996831346712892e-06, |
|
"loss": 1.0749, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 7.180802437785678, |
|
"grad_norm": 2.251033067703247, |
|
"learning_rate": 3.6502051681690266e-06, |
|
"loss": 1.0831, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 7.201117318435754, |
|
"grad_norm": 2.855700969696045, |
|
"learning_rate": 3.600986309720873e-06, |
|
"loss": 1.0765, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 7.221432199085831, |
|
"grad_norm": 2.3868894577026367, |
|
"learning_rate": 3.552028567738596e-06, |
|
"loss": 1.0614, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 7.241747079735907, |
|
"grad_norm": 2.3770251274108887, |
|
"learning_rate": 3.5033339399789125e-06, |
|
"loss": 1.0764, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 7.262061960385982, |
|
"grad_norm": 2.417048215866089, |
|
"learning_rate": 3.4549044134619603e-06, |
|
"loss": 1.0607, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 7.282376841036059, |
|
"grad_norm": 2.5006279945373535, |
|
"learning_rate": 3.4067419643902367e-06, |
|
"loss": 1.0481, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 7.302691721686135, |
|
"grad_norm": 2.4641923904418945, |
|
"learning_rate": 3.3588485580679465e-06, |
|
"loss": 1.0755, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 7.3230066023362115, |
|
"grad_norm": 2.6034882068634033, |
|
"learning_rate": 3.3112261488207962e-06, |
|
"loss": 1.068, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 7.343321482986288, |
|
"grad_norm": 2.483919858932495, |
|
"learning_rate": 3.263876679916267e-06, |
|
"loss": 1.0659, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 7.363636363636363, |
|
"grad_norm": 2.473473072052002, |
|
"learning_rate": 3.216802083484307e-06, |
|
"loss": 1.0953, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 7.38395124428644, |
|
"grad_norm": 2.57886004447937, |
|
"learning_rate": 3.1700042804384924e-06, |
|
"loss": 1.0537, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 7.404266124936516, |
|
"grad_norm": 2.4494612216949463, |
|
"learning_rate": 3.1234851803976352e-06, |
|
"loss": 1.0834, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 7.424581005586592, |
|
"grad_norm": 2.679572105407715, |
|
"learning_rate": 3.077246681607865e-06, |
|
"loss": 1.0538, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 7.444895886236669, |
|
"grad_norm": 2.955970287322998, |
|
"learning_rate": 3.03129067086518e-06, |
|
"loss": 1.0855, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 7.465210766886744, |
|
"grad_norm": 2.511077404022217, |
|
"learning_rate": 2.98561902343844e-06, |
|
"loss": 1.094, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 7.485525647536821, |
|
"grad_norm": 3.1838951110839844, |
|
"learning_rate": 2.9402336029928526e-06, |
|
"loss": 1.0813, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 7.505840528186897, |
|
"grad_norm": 2.397426128387451, |
|
"learning_rate": 2.8951362615139167e-06, |
|
"loss": 1.0666, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 7.526155408836973, |
|
"grad_norm": 2.655226230621338, |
|
"learning_rate": 2.8503288392318627e-06, |
|
"loss": 1.0586, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 7.546470289487049, |
|
"grad_norm": 2.5811400413513184, |
|
"learning_rate": 2.8058131645465546e-06, |
|
"loss": 1.0763, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 7.566785170137125, |
|
"grad_norm": 2.3530173301696777, |
|
"learning_rate": 2.7615910539528824e-06, |
|
"loss": 1.069, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 7.5871000507872015, |
|
"grad_norm": 2.2900733947753906, |
|
"learning_rate": 2.717664311966627e-06, |
|
"loss": 1.0621, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 7.607414931437278, |
|
"grad_norm": 2.464092493057251, |
|
"learning_rate": 2.6740347310508487e-06, |
|
"loss": 1.0622, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 7.627729812087354, |
|
"grad_norm": 2.5965607166290283, |
|
"learning_rate": 2.6307040915427196e-06, |
|
"loss": 1.0682, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 7.648044692737431, |
|
"grad_norm": 2.3087034225463867, |
|
"learning_rate": 2.5876741615809007e-06, |
|
"loss": 1.0653, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 7.668359573387506, |
|
"grad_norm": 2.2895405292510986, |
|
"learning_rate": 2.5449466970333658e-06, |
|
"loss": 1.0716, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 7.688674454037582, |
|
"grad_norm": 2.888216972351074, |
|
"learning_rate": 2.5025234414257717e-06, |
|
"loss": 1.0812, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 7.708989334687659, |
|
"grad_norm": 2.578557014465332, |
|
"learning_rate": 2.460406125870305e-06, |
|
"loss": 1.0694, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 7.729304215337735, |
|
"grad_norm": 2.9586007595062256, |
|
"learning_rate": 2.418596468995044e-06, |
|
"loss": 1.0819, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 7.749619095987811, |
|
"grad_norm": 2.469660520553589, |
|
"learning_rate": 2.3770961768738222e-06, |
|
"loss": 1.0815, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 7.769933976637887, |
|
"grad_norm": 2.530141592025757, |
|
"learning_rate": 2.3359069429566216e-06, |
|
"loss": 1.0876, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 7.790248857287963, |
|
"grad_norm": 2.477205514907837, |
|
"learning_rate": 2.295030448000466e-06, |
|
"loss": 1.0533, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 7.81056373793804, |
|
"grad_norm": 2.493910789489746, |
|
"learning_rate": 2.2544683600008345e-06, |
|
"loss": 1.0663, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 7.830878618588116, |
|
"grad_norm": 2.667234182357788, |
|
"learning_rate": 2.214222334123596e-06, |
|
"loss": 1.0802, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 7.851193499238192, |
|
"grad_norm": 2.600020408630371, |
|
"learning_rate": 2.17429401263747e-06, |
|
"loss": 1.1002, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 7.871508379888268, |
|
"grad_norm": 2.2591912746429443, |
|
"learning_rate": 2.134685024847023e-06, |
|
"loss": 1.0694, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 7.891823260538344, |
|
"grad_norm": 2.7257189750671387, |
|
"learning_rate": 2.0953969870261704e-06, |
|
"loss": 1.0684, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 7.912138141188421, |
|
"grad_norm": 2.2915289402008057, |
|
"learning_rate": 2.0564315023522218e-06, |
|
"loss": 1.0601, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 7.932453021838497, |
|
"grad_norm": 2.529210090637207, |
|
"learning_rate": 2.017790160840478e-06, |
|
"loss": 1.0902, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 7.9527679024885725, |
|
"grad_norm": 2.3367769718170166, |
|
"learning_rate": 1.9794745392793344e-06, |
|
"loss": 1.0629, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 7.973082783138649, |
|
"grad_norm": 2.5318503379821777, |
|
"learning_rate": 1.9414862011659462e-06, |
|
"loss": 1.0851, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 7.993397663788725, |
|
"grad_norm": 2.2982535362243652, |
|
"learning_rate": 1.9038266966424213e-06, |
|
"loss": 1.0725, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 1.0901824235916138, |
|
"eval_runtime": 55.0806, |
|
"eval_samples_per_second": 35.766, |
|
"eval_steps_per_second": 8.951, |
|
"step": 3944 |
|
}, |
|
{ |
|
"epoch": 8.012188928390046, |
|
"grad_norm": 2.5969467163085938, |
|
"learning_rate": 1.8664975624325743e-06, |
|
"loss": 1.0801, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 8.032503809040122, |
|
"grad_norm": 2.4681787490844727, |
|
"learning_rate": 1.8295003217792206e-06, |
|
"loss": 1.0732, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 8.052818689690199, |
|
"grad_norm": 2.256989002227783, |
|
"learning_rate": 1.7928364843820034e-06, |
|
"loss": 1.0615, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 8.073133570340275, |
|
"grad_norm": 2.6549007892608643, |
|
"learning_rate": 1.756507546335814e-06, |
|
"loss": 1.0692, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 8.093448450990351, |
|
"grad_norm": 2.311284065246582, |
|
"learning_rate": 1.7205149900697138e-06, |
|
"loss": 1.0669, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 8.113763331640426, |
|
"grad_norm": 2.461761236190796, |
|
"learning_rate": 1.6848602842864726e-06, |
|
"loss": 1.0617, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 8.134078212290502, |
|
"grad_norm": 2.347125291824341, |
|
"learning_rate": 1.6495448839026128e-06, |
|
"loss": 1.063, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 8.154393092940579, |
|
"grad_norm": 2.2828891277313232, |
|
"learning_rate": 1.6145702299890576e-06, |
|
"loss": 1.1011, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 8.174707973590655, |
|
"grad_norm": 2.536144256591797, |
|
"learning_rate": 1.5799377497123104e-06, |
|
"loss": 1.0696, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 8.195022854240731, |
|
"grad_norm": 2.7368884086608887, |
|
"learning_rate": 1.545648856276234e-06, |
|
"loss": 1.0463, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 8.215337734890808, |
|
"grad_norm": 2.709357976913452, |
|
"learning_rate": 1.5117049488643787e-06, |
|
"loss": 1.0894, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 8.235652615540884, |
|
"grad_norm": 2.5076632499694824, |
|
"learning_rate": 1.4781074125828799e-06, |
|
"loss": 1.0951, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 8.25596749619096, |
|
"grad_norm": 2.656967878341675, |
|
"learning_rate": 1.4448576184039442e-06, |
|
"loss": 1.0818, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 8.276282376841037, |
|
"grad_norm": 2.170839786529541, |
|
"learning_rate": 1.4119569231099107e-06, |
|
"loss": 1.0555, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 8.296597257491113, |
|
"grad_norm": 2.704313278198242, |
|
"learning_rate": 1.3794066692378793e-06, |
|
"loss": 1.0575, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 8.316912138141188, |
|
"grad_norm": 2.512399911880493, |
|
"learning_rate": 1.347208185024932e-06, |
|
"loss": 1.067, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 8.337227018791264, |
|
"grad_norm": 2.5209548473358154, |
|
"learning_rate": 1.315362784353923e-06, |
|
"loss": 1.0529, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 8.35754189944134, |
|
"grad_norm": 2.6369845867156982, |
|
"learning_rate": 1.2838717666998802e-06, |
|
"loss": 1.0723, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 8.377856780091417, |
|
"grad_norm": 3.272552013397217, |
|
"learning_rate": 1.2527364170769686e-06, |
|
"loss": 1.0733, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 8.398171660741493, |
|
"grad_norm": 2.4088523387908936, |
|
"learning_rate": 1.2219580059860602e-06, |
|
"loss": 1.0554, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 8.41848654139157, |
|
"grad_norm": 2.388200044631958, |
|
"learning_rate": 1.191537789362881e-06, |
|
"loss": 1.0691, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 8.438801422041646, |
|
"grad_norm": 2.4707677364349365, |
|
"learning_rate": 1.1614770085267724e-06, |
|
"loss": 1.0583, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 8.459116302691722, |
|
"grad_norm": 2.6969428062438965, |
|
"learning_rate": 1.1317768901300375e-06, |
|
"loss": 1.0633, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 8.479431183341799, |
|
"grad_norm": 2.359264850616455, |
|
"learning_rate": 1.1024386461078752e-06, |
|
"loss": 1.0503, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 8.499746063991875, |
|
"grad_norm": 2.645862579345703, |
|
"learning_rate": 1.073463473628935e-06, |
|
"loss": 1.0953, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 8.52006094464195, |
|
"grad_norm": 2.5734922885894775, |
|
"learning_rate": 1.0448525550464704e-06, |
|
"loss": 1.0529, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 8.540375825292026, |
|
"grad_norm": 2.237166404724121, |
|
"learning_rate": 1.0166070578500842e-06, |
|
"loss": 1.0684, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 8.560690705942102, |
|
"grad_norm": 2.320554733276367, |
|
"learning_rate": 9.887281346180888e-07, |
|
"loss": 1.0654, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 8.581005586592179, |
|
"grad_norm": 2.724879503250122, |
|
"learning_rate": 9.612169229704748e-07, |
|
"loss": 1.0667, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 8.601320467242255, |
|
"grad_norm": 2.951643705368042, |
|
"learning_rate": 9.340745455224943e-07, |
|
"loss": 1.0892, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 8.621635347892331, |
|
"grad_norm": 2.4628891944885254, |
|
"learning_rate": 9.073021098388468e-07, |
|
"loss": 1.0776, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 8.641950228542408, |
|
"grad_norm": 2.5746138095855713, |
|
"learning_rate": 8.809007083884868e-07, |
|
"loss": 1.0644, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 8.662265109192484, |
|
"grad_norm": 2.6229591369628906, |
|
"learning_rate": 8.548714185000429e-07, |
|
"loss": 1.0795, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 8.68257998984256, |
|
"grad_norm": 2.9902350902557373, |
|
"learning_rate": 8.292153023178517e-07, |
|
"loss": 1.0791, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 8.702894870492635, |
|
"grad_norm": 2.228269577026367, |
|
"learning_rate": 8.039334067586325e-07, |
|
"loss": 1.0862, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 8.723209751142711, |
|
"grad_norm": 2.6434853076934814, |
|
"learning_rate": 7.790267634687487e-07, |
|
"loss": 1.0752, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 8.743524631792788, |
|
"grad_norm": 2.551692008972168, |
|
"learning_rate": 7.544963887821233e-07, |
|
"loss": 1.0675, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 8.763839512442864, |
|
"grad_norm": 2.4782984256744385, |
|
"learning_rate": 7.303432836787572e-07, |
|
"loss": 1.0768, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 8.78415439309294, |
|
"grad_norm": 2.706620931625366, |
|
"learning_rate": 7.065684337438916e-07, |
|
"loss": 1.0881, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 8.804469273743017, |
|
"grad_norm": 2.345564365386963, |
|
"learning_rate": 6.831728091277867e-07, |
|
"loss": 1.0771, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 8.824784154393093, |
|
"grad_norm": 2.543198347091675, |
|
"learning_rate": 6.601573645061321e-07, |
|
"loss": 1.049, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 8.84509903504317, |
|
"grad_norm": 2.7005691528320312, |
|
"learning_rate": 6.375230390410947e-07, |
|
"loss": 1.0871, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 8.865413915693246, |
|
"grad_norm": 2.9565064907073975, |
|
"learning_rate": 6.152707563429949e-07, |
|
"loss": 1.0691, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 8.885728796343322, |
|
"grad_norm": 2.2274577617645264, |
|
"learning_rate": 5.934014244326114e-07, |
|
"loss": 1.0697, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 8.906043676993399, |
|
"grad_norm": 2.464477062225342, |
|
"learning_rate": 5.719159357041415e-07, |
|
"loss": 1.0937, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 8.926358557643473, |
|
"grad_norm": 2.860454797744751, |
|
"learning_rate": 5.508151668887696e-07, |
|
"loss": 1.065, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 8.94667343829355, |
|
"grad_norm": 2.2630953788757324, |
|
"learning_rate": 5.300999790189088e-07, |
|
"loss": 1.0709, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 8.966988318943626, |
|
"grad_norm": 2.171909809112549, |
|
"learning_rate": 5.11786680609313e-07, |
|
"loss": 1.0732, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 8.987303199593702, |
|
"grad_norm": 2.525136947631836, |
|
"learning_rate": 4.918064123013932e-07, |
|
"loss": 1.0703, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 1.0895577669143677, |
|
"eval_runtime": 54.6524, |
|
"eval_samples_per_second": 36.046, |
|
"eval_steps_per_second": 9.021, |
|
"step": 4437 |
|
}, |
|
{ |
|
"epoch": 9.006094464195023, |
|
"grad_norm": 2.3463656902313232, |
|
"learning_rate": 4.722141328346708e-07, |
|
"loss": 1.0563, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 9.0264093448451, |
|
"grad_norm": 2.3120319843292236, |
|
"learning_rate": 4.530106416865332e-07, |
|
"loss": 1.0576, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 9.046724225495176, |
|
"grad_norm": 2.431831121444702, |
|
"learning_rate": 4.3419672246958313e-07, |
|
"loss": 1.0689, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 9.067039106145252, |
|
"grad_norm": 2.210456609725952, |
|
"learning_rate": 4.1577314289964474e-07, |
|
"loss": 1.0576, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 9.087353986795328, |
|
"grad_norm": 2.559464931488037, |
|
"learning_rate": 3.977406547644513e-07, |
|
"loss": 1.0773, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 9.107668867445403, |
|
"grad_norm": 2.405012845993042, |
|
"learning_rate": 3.800999938929606e-07, |
|
"loss": 1.0851, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 9.12798374809548, |
|
"grad_norm": 2.8481688499450684, |
|
"learning_rate": 3.628518801253311e-07, |
|
"loss": 1.0486, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 9.148298628745556, |
|
"grad_norm": 2.405825138092041, |
|
"learning_rate": 3.4599701728354474e-07, |
|
"loss": 1.0776, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 9.168613509395632, |
|
"grad_norm": 2.5540664196014404, |
|
"learning_rate": 3.2953609314269163e-07, |
|
"loss": 1.0623, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 9.188928390045708, |
|
"grad_norm": 2.9651260375976562, |
|
"learning_rate": 3.1346977940290404e-07, |
|
"loss": 1.0583, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 9.209243270695785, |
|
"grad_norm": 2.4424266815185547, |
|
"learning_rate": 2.9779873166194686e-07, |
|
"loss": 1.0869, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 9.229558151345861, |
|
"grad_norm": 2.3093559741973877, |
|
"learning_rate": 2.8252358938846145e-07, |
|
"loss": 1.0667, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 9.249873031995937, |
|
"grad_norm": 2.28373646736145, |
|
"learning_rate": 2.6764497589587544e-07, |
|
"loss": 1.0634, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 9.270187912646014, |
|
"grad_norm": 2.4830288887023926, |
|
"learning_rate": 2.5316349831697175e-07, |
|
"loss": 1.0964, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 9.29050279329609, |
|
"grad_norm": 2.0313925743103027, |
|
"learning_rate": 2.3907974757910735e-07, |
|
"loss": 1.0625, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 9.310817673946165, |
|
"grad_norm": 2.334379196166992, |
|
"learning_rate": 2.253942983800994e-07, |
|
"loss": 1.0654, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 9.331132554596241, |
|
"grad_norm": 2.6533384323120117, |
|
"learning_rate": 2.1210770916478384e-07, |
|
"loss": 1.0769, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 9.351447435246317, |
|
"grad_norm": 2.715524435043335, |
|
"learning_rate": 1.9922052210221477e-07, |
|
"loss": 1.0784, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 9.371762315896394, |
|
"grad_norm": 2.238746404647827, |
|
"learning_rate": 1.8673326306355432e-07, |
|
"loss": 1.0694, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 9.39207719654647, |
|
"grad_norm": 2.3140628337860107, |
|
"learning_rate": 1.7464644160059885e-07, |
|
"loss": 1.0733, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 9.412392077196547, |
|
"grad_norm": 2.2057929039001465, |
|
"learning_rate": 1.6296055092500207e-07, |
|
"loss": 1.0837, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 9.432706957846623, |
|
"grad_norm": 2.212195873260498, |
|
"learning_rate": 1.5167606788813794e-07, |
|
"loss": 1.0748, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 9.4530218384967, |
|
"grad_norm": 2.6805107593536377, |
|
"learning_rate": 1.4079345296164504e-07, |
|
"loss": 1.0793, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 9.473336719146776, |
|
"grad_norm": 2.494554042816162, |
|
"learning_rate": 1.3031315021864054e-07, |
|
"loss": 1.0564, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 9.493651599796852, |
|
"grad_norm": 2.6305577754974365, |
|
"learning_rate": 1.2023558731559582e-07, |
|
"loss": 1.0756, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 9.513966480446927, |
|
"grad_norm": 2.3631041049957275, |
|
"learning_rate": 1.1056117547488588e-07, |
|
"loss": 1.0828, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 9.534281361097003, |
|
"grad_norm": 2.3476672172546387, |
|
"learning_rate": 1.0129030946801066e-07, |
|
"loss": 1.0885, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 9.55459624174708, |
|
"grad_norm": 2.513430595397949, |
|
"learning_rate": 9.242336759948457e-08, |
|
"loss": 1.0698, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 9.574911122397156, |
|
"grad_norm": 2.3962695598602295, |
|
"learning_rate": 8.396071169139986e-08, |
|
"loss": 1.082, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 9.595226003047232, |
|
"grad_norm": 2.5314340591430664, |
|
"learning_rate": 7.590268706866188e-08, |
|
"loss": 1.075, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 9.615540883697308, |
|
"grad_norm": 2.3048431873321533, |
|
"learning_rate": 6.824962254490141e-08, |
|
"loss": 1.0527, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 9.635855764347385, |
|
"grad_norm": 2.2387807369232178, |
|
"learning_rate": 6.100183040905095e-08, |
|
"loss": 1.0573, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 9.656170644997461, |
|
"grad_norm": 2.7502102851867676, |
|
"learning_rate": 5.415960641260487e-08, |
|
"loss": 1.0662, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 9.676485525647538, |
|
"grad_norm": 2.980741024017334, |
|
"learning_rate": 4.772322975755361e-08, |
|
"loss": 1.099, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 9.696800406297612, |
|
"grad_norm": 2.4962992668151855, |
|
"learning_rate": 4.169296308498494e-08, |
|
"loss": 1.065, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 9.717115286947688, |
|
"grad_norm": 2.1819846630096436, |
|
"learning_rate": 3.606905246437253e-08, |
|
"loss": 1.0641, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 9.737430167597765, |
|
"grad_norm": 2.336205244064331, |
|
"learning_rate": 3.0851727383529596e-08, |
|
"loss": 1.052, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 9.757745048247841, |
|
"grad_norm": 2.4870941638946533, |
|
"learning_rate": 2.604120073924965e-08, |
|
"loss": 1.0729, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 9.778059928897918, |
|
"grad_norm": 2.8149468898773193, |
|
"learning_rate": 2.16376688286124e-08, |
|
"loss": 1.0905, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 9.798374809547994, |
|
"grad_norm": 2.3567795753479004, |
|
"learning_rate": 1.7641311340982346e-08, |
|
"loss": 1.0628, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 9.81868969019807, |
|
"grad_norm": 2.4957797527313232, |
|
"learning_rate": 1.40522913506691e-08, |
|
"loss": 1.071, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 9.839004570848147, |
|
"grad_norm": 2.4365956783294678, |
|
"learning_rate": 1.0870755310276038e-08, |
|
"loss": 1.0788, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 9.859319451498223, |
|
"grad_norm": 2.4922902584075928, |
|
"learning_rate": 8.096833044723973e-09, |
|
"loss": 1.0908, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 9.8796343321483, |
|
"grad_norm": 2.4978227615356445, |
|
"learning_rate": 5.730637745954282e-09, |
|
"loss": 1.0725, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 9.899949212798376, |
|
"grad_norm": 2.3648996353149414, |
|
"learning_rate": 3.772265968309263e-09, |
|
"loss": 1.0646, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 9.92026409344845, |
|
"grad_norm": 2.4374351501464844, |
|
"learning_rate": 2.2217976245886286e-09, |
|
"loss": 1.0571, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 9.940578974098527, |
|
"grad_norm": 2.5737831592559814, |
|
"learning_rate": 1.0792959827987671e-09, |
|
"loss": 1.0577, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 9.960893854748603, |
|
"grad_norm": 2.2821946144104004, |
|
"learning_rate": 3.448076635570452e-10, |
|
"loss": 1.0729, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 9.98120873539868, |
|
"grad_norm": 2.4394965171813965, |
|
"learning_rate": 1.8362638203317517e-11, |
|
"loss": 1.068, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 9.98120873539868, |
|
"eval_loss": 1.0894742012023926, |
|
"eval_runtime": 55.0248, |
|
"eval_samples_per_second": 35.802, |
|
"eval_steps_per_second": 8.96, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 9.98120873539868, |
|
"step": 4920, |
|
"total_flos": 2.0218089447773184e+16, |
|
"train_loss": 1.1380672057469685, |
|
"train_runtime": 8182.6752, |
|
"train_samples_per_second": 9.625, |
|
"train_steps_per_second": 0.601 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4920, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.0218089447773184e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|