|
{ |
|
"best_metric": 0.32636332511901855, |
|
"best_model_checkpoint": "./output/checkpoint-3600", |
|
"epoch": 0.2544385389573674, |
|
"eval_steps": 150, |
|
"global_step": 4500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005654189754608164, |
|
"grad_norm": 2.227701425552368, |
|
"learning_rate": 7.500000000000001e-07, |
|
"loss": 0.1995, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0011308379509216329, |
|
"grad_norm": 5.525130748748779, |
|
"learning_rate": 1.5000000000000002e-06, |
|
"loss": 0.2334, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0016962569263824494, |
|
"grad_norm": 1.4867887496948242, |
|
"learning_rate": 2.25e-06, |
|
"loss": 0.3133, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0022616759018432657, |
|
"grad_norm": 0.5477761626243591, |
|
"learning_rate": 3.0000000000000005e-06, |
|
"loss": 0.1692, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0028270948773040823, |
|
"grad_norm": 1.7914361953735352, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.3082, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.003392513852764899, |
|
"grad_norm": 0.40979117155075073, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.1468, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.003957932828225715, |
|
"grad_norm": 5.27268123626709, |
|
"learning_rate": 5.2500000000000006e-06, |
|
"loss": 0.2381, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0045233518036865315, |
|
"grad_norm": 8.342147827148438, |
|
"learning_rate": 6.000000000000001e-06, |
|
"loss": 0.2292, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0050887707791473485, |
|
"grad_norm": 4.8586745262146, |
|
"learning_rate": 6.7500000000000014e-06, |
|
"loss": 0.1351, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.005654189754608165, |
|
"grad_norm": 7.848427772521973, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.2967, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.006219608730068981, |
|
"grad_norm": 0.6273432374000549, |
|
"learning_rate": 7.499922926093874e-06, |
|
"loss": 0.1445, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.006785027705529798, |
|
"grad_norm": 0.21901638805866241, |
|
"learning_rate": 7.499691707543699e-06, |
|
"loss": 0.2228, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.007350446680990614, |
|
"grad_norm": 0.5449599027633667, |
|
"learning_rate": 7.499306353853963e-06, |
|
"loss": 0.2675, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.00791586565645143, |
|
"grad_norm": 0.5028505921363831, |
|
"learning_rate": 7.49876688086505e-06, |
|
"loss": 0.1786, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.008481284631912247, |
|
"grad_norm": 1.582517385482788, |
|
"learning_rate": 7.4980733107525805e-06, |
|
"loss": 0.2159, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.008481284631912247, |
|
"eval_loss": 0.4814591705799103, |
|
"eval_runtime": 39.6064, |
|
"eval_samples_per_second": 12.624, |
|
"eval_steps_per_second": 12.624, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.009046703607373063, |
|
"grad_norm": 1.0850478410720825, |
|
"learning_rate": 7.4972256720265044e-06, |
|
"loss": 0.213, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.009612122582833879, |
|
"grad_norm": 13.114879608154297, |
|
"learning_rate": 7.496223999529932e-06, |
|
"loss": 0.2911, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.010177541558294697, |
|
"grad_norm": 1.2058771848678589, |
|
"learning_rate": 7.4950683344376926e-06, |
|
"loss": 0.3511, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.010742960533755513, |
|
"grad_norm": 0.21950186789035797, |
|
"learning_rate": 7.4937587242546544e-06, |
|
"loss": 0.2356, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01130837950921633, |
|
"grad_norm": 3.904442548751831, |
|
"learning_rate": 7.492295222813762e-06, |
|
"loss": 0.1941, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.011873798484677145, |
|
"grad_norm": 8.762584686279297, |
|
"learning_rate": 7.490677890273828e-06, |
|
"loss": 0.2477, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.012439217460137961, |
|
"grad_norm": 2.0331380367279053, |
|
"learning_rate": 7.488906793117058e-06, |
|
"loss": 0.2963, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.01300463643559878, |
|
"grad_norm": 0.952492892742157, |
|
"learning_rate": 7.486982004146319e-06, |
|
"loss": 0.1924, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.013570055411059595, |
|
"grad_norm": 2.3315181732177734, |
|
"learning_rate": 7.484903602482148e-06, |
|
"loss": 0.2643, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.014135474386520411, |
|
"grad_norm": 4.806341648101807, |
|
"learning_rate": 7.4826716735594945e-06, |
|
"loss": 0.2789, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.014700893361981228, |
|
"grad_norm": 5.050877571105957, |
|
"learning_rate": 7.480286309124216e-06, |
|
"loss": 0.2581, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.015266312337442044, |
|
"grad_norm": 6.544984340667725, |
|
"learning_rate": 7.477747607229302e-06, |
|
"loss": 0.2921, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01583173131290286, |
|
"grad_norm": 6.630581378936768, |
|
"learning_rate": 7.475055672230844e-06, |
|
"loss": 0.254, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.016397150288363676, |
|
"grad_norm": 8.29354190826416, |
|
"learning_rate": 7.472210614783745e-06, |
|
"loss": 0.3359, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.016962569263824494, |
|
"grad_norm": 0.3115900158882141, |
|
"learning_rate": 7.469212551837173e-06, |
|
"loss": 0.1632, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.016962569263824494, |
|
"eval_loss": 0.4433949291706085, |
|
"eval_runtime": 39.7798, |
|
"eval_samples_per_second": 12.569, |
|
"eval_steps_per_second": 12.569, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.01752798823928531, |
|
"grad_norm": 0.5275546908378601, |
|
"learning_rate": 7.4660616066297565e-06, |
|
"loss": 0.2717, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.018093407214746126, |
|
"grad_norm": 10.666130065917969, |
|
"learning_rate": 7.462757908684509e-06, |
|
"loss": 0.2823, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.018658826190206944, |
|
"grad_norm": 6.284046173095703, |
|
"learning_rate": 7.459301593803512e-06, |
|
"loss": 0.1959, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.019224245165667758, |
|
"grad_norm": 0.3211212456226349, |
|
"learning_rate": 7.455692804062335e-06, |
|
"loss": 0.1922, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.019789664141128576, |
|
"grad_norm": 1.0060560703277588, |
|
"learning_rate": 7.451931687804189e-06, |
|
"loss": 0.3553, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.020355083116589394, |
|
"grad_norm": 10.29517650604248, |
|
"learning_rate": 7.448018399633831e-06, |
|
"loss": 0.2307, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.02092050209205021, |
|
"grad_norm": 1.2054423093795776, |
|
"learning_rate": 7.443953100411214e-06, |
|
"loss": 0.1897, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.021485921067511026, |
|
"grad_norm": 7.133542537689209, |
|
"learning_rate": 7.439735957244862e-06, |
|
"loss": 0.2051, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.02205134004297184, |
|
"grad_norm": 1.6390613317489624, |
|
"learning_rate": 7.435367143485015e-06, |
|
"loss": 0.2781, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.02261675901843266, |
|
"grad_norm": 0.9467723965644836, |
|
"learning_rate": 7.430846838716496e-06, |
|
"loss": 0.1886, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.023182177993893476, |
|
"grad_norm": 5.297723770141602, |
|
"learning_rate": 7.426175228751328e-06, |
|
"loss": 0.2828, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.02374759696935429, |
|
"grad_norm": 0.396626353263855, |
|
"learning_rate": 7.421352505621099e-06, |
|
"loss": 0.294, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.02431301594481511, |
|
"grad_norm": 0.4418993592262268, |
|
"learning_rate": 7.416378867569069e-06, |
|
"loss": 0.217, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.024878434920275923, |
|
"grad_norm": 0.9995683431625366, |
|
"learning_rate": 7.411254519042017e-06, |
|
"loss": 0.109, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.02544385389573674, |
|
"grad_norm": 2.574638843536377, |
|
"learning_rate": 7.4059796706818396e-06, |
|
"loss": 0.1732, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.02544385389573674, |
|
"eval_loss": 0.4305788278579712, |
|
"eval_runtime": 39.9497, |
|
"eval_samples_per_second": 12.516, |
|
"eval_steps_per_second": 12.516, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.02600927287119756, |
|
"grad_norm": 4.463107109069824, |
|
"learning_rate": 7.400554539316894e-06, |
|
"loss": 0.2522, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.026574691846658373, |
|
"grad_norm": 0.5263558030128479, |
|
"learning_rate": 7.394979347953081e-06, |
|
"loss": 0.3847, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.02714011082211919, |
|
"grad_norm": 0.4478251039981842, |
|
"learning_rate": 7.389254325764681e-06, |
|
"loss": 0.2012, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.027705529797580005, |
|
"grad_norm": 6.665970325469971, |
|
"learning_rate": 7.383379708084934e-06, |
|
"loss": 0.255, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.028270948773040823, |
|
"grad_norm": 0.7272936701774597, |
|
"learning_rate": 7.377355736396362e-06, |
|
"loss": 0.1486, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02883636774850164, |
|
"grad_norm": 0.3776552081108093, |
|
"learning_rate": 7.371182658320847e-06, |
|
"loss": 0.2023, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.029401786723962455, |
|
"grad_norm": 0.8645554184913635, |
|
"learning_rate": 7.36486072760945e-06, |
|
"loss": 0.1617, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.029967205699423273, |
|
"grad_norm": 6.2824387550354, |
|
"learning_rate": 7.358390204131984e-06, |
|
"loss": 0.3383, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.030532624674884087, |
|
"grad_norm": 0.8180729150772095, |
|
"learning_rate": 7.3517713538663235e-06, |
|
"loss": 0.2147, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.031098043650344905, |
|
"grad_norm": 0.5693274736404419, |
|
"learning_rate": 7.345004448887478e-06, |
|
"loss": 0.2348, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.03166346262580572, |
|
"grad_norm": 4.538175582885742, |
|
"learning_rate": 7.3380897673564085e-06, |
|
"loss": 0.3281, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.03222888160126654, |
|
"grad_norm": 4.282591819763184, |
|
"learning_rate": 7.33102759350859e-06, |
|
"loss": 0.2727, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.03279430057672735, |
|
"grad_norm": 0.8115090727806091, |
|
"learning_rate": 7.323818217642328e-06, |
|
"loss": 0.2479, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.03335971955218817, |
|
"grad_norm": 3.089582681655884, |
|
"learning_rate": 7.316461936106827e-06, |
|
"loss": 0.195, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.03392513852764899, |
|
"grad_norm": 10.888387680053711, |
|
"learning_rate": 7.3089590512900084e-06, |
|
"loss": 0.1643, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.03392513852764899, |
|
"eval_loss": 0.425952672958374, |
|
"eval_runtime": 39.7561, |
|
"eval_samples_per_second": 12.577, |
|
"eval_steps_per_second": 12.577, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.0344905575031098, |
|
"grad_norm": 6.550843715667725, |
|
"learning_rate": 7.301309871606081e-06, |
|
"loss": 0.3565, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.03505597647857062, |
|
"grad_norm": 1.943982720375061, |
|
"learning_rate": 7.293514711482861e-06, |
|
"loss": 0.2715, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.03562139545403144, |
|
"grad_norm": 4.950693607330322, |
|
"learning_rate": 7.285573891348849e-06, |
|
"loss": 0.2719, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.03618681442949225, |
|
"grad_norm": 7.106111526489258, |
|
"learning_rate": 7.27748773762006e-06, |
|
"loss": 0.287, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.03675223340495307, |
|
"grad_norm": 3.4435412883758545, |
|
"learning_rate": 7.269256582686603e-06, |
|
"loss": 0.2495, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.03731765238041389, |
|
"grad_norm": 3.9263601303100586, |
|
"learning_rate": 7.260880764899016e-06, |
|
"loss": 0.2317, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.0378830713558747, |
|
"grad_norm": 1.1376698017120361, |
|
"learning_rate": 7.252360628554363e-06, |
|
"loss": 0.138, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.038448490331335516, |
|
"grad_norm": 7.205196857452393, |
|
"learning_rate": 7.243696523882079e-06, |
|
"loss": 0.1982, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.03901390930679634, |
|
"grad_norm": 3.7006053924560547, |
|
"learning_rate": 7.2348888070295705e-06, |
|
"loss": 0.2156, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.03957932828225715, |
|
"grad_norm": 0.5304602384567261, |
|
"learning_rate": 7.225937840047583e-06, |
|
"loss": 0.3153, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.040144747257717966, |
|
"grad_norm": 14.555486679077148, |
|
"learning_rate": 7.216843990875307e-06, |
|
"loss": 0.3455, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.04071016623317879, |
|
"grad_norm": 20.35503578186035, |
|
"learning_rate": 7.207607633325266e-06, |
|
"loss": 0.2996, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.0412755852086396, |
|
"grad_norm": 0.4252071678638458, |
|
"learning_rate": 7.198229147067941e-06, |
|
"loss": 0.2781, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.04184100418410042, |
|
"grad_norm": 0.641488790512085, |
|
"learning_rate": 7.18870891761617e-06, |
|
"loss": 0.1364, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.04240642315956124, |
|
"grad_norm": 7.14177942276001, |
|
"learning_rate": 7.1790473363092974e-06, |
|
"loss": 0.2639, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.04240642315956124, |
|
"eval_loss": 0.4108331799507141, |
|
"eval_runtime": 39.6561, |
|
"eval_samples_per_second": 12.608, |
|
"eval_steps_per_second": 12.608, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.04297184213502205, |
|
"grad_norm": 2.8823704719543457, |
|
"learning_rate": 7.169244800297089e-06, |
|
"loss": 0.2308, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.04353726111048287, |
|
"grad_norm": 0.27674323320388794, |
|
"learning_rate": 7.159301712523407e-06, |
|
"loss": 0.2142, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.04410268008594368, |
|
"grad_norm": 3.091693162918091, |
|
"learning_rate": 7.149218481709644e-06, |
|
"loss": 0.2864, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.0446680990614045, |
|
"grad_norm": 2.583334445953369, |
|
"learning_rate": 7.1389955223379266e-06, |
|
"loss": 0.1566, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.04523351803686532, |
|
"grad_norm": 1.4404966831207275, |
|
"learning_rate": 7.128633254634072e-06, |
|
"loss": 0.2206, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.04579893701232613, |
|
"grad_norm": 0.3449786603450775, |
|
"learning_rate": 7.118132104550322e-06, |
|
"loss": 0.0562, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.04636435598778695, |
|
"grad_norm": 1.9726862907409668, |
|
"learning_rate": 7.107492503747826e-06, |
|
"loss": 0.1539, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.04692977496324777, |
|
"grad_norm": 6.304052829742432, |
|
"learning_rate": 7.096714889578898e-06, |
|
"loss": 0.2936, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.04749519393870858, |
|
"grad_norm": 0.7741239070892334, |
|
"learning_rate": 7.085799705069046e-06, |
|
"loss": 0.3401, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.0480606129141694, |
|
"grad_norm": 2.764857053756714, |
|
"learning_rate": 7.0747473988987515e-06, |
|
"loss": 0.2143, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.04862603188963022, |
|
"grad_norm": 11.133625030517578, |
|
"learning_rate": 7.063558425385033e-06, |
|
"loss": 0.3402, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.04919145086509103, |
|
"grad_norm": 3.349154233932495, |
|
"learning_rate": 7.052233244462769e-06, |
|
"loss": 0.155, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.049756869840551846, |
|
"grad_norm": 5.708688735961914, |
|
"learning_rate": 7.040772321665788e-06, |
|
"loss": 0.1933, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.05032228881601267, |
|
"grad_norm": 10.529179573059082, |
|
"learning_rate": 7.029176128107734e-06, |
|
"loss": 0.2543, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.05088770779147348, |
|
"grad_norm": 0.6206080913543701, |
|
"learning_rate": 7.017445140462711e-06, |
|
"loss": 0.1855, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.05088770779147348, |
|
"eval_loss": 0.38634100556373596, |
|
"eval_runtime": 39.5281, |
|
"eval_samples_per_second": 12.649, |
|
"eval_steps_per_second": 12.649, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.051453126766934296, |
|
"grad_norm": 0.795102059841156, |
|
"learning_rate": 7.00557984094567e-06, |
|
"loss": 0.2033, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.05201854574239512, |
|
"grad_norm": 4.390031814575195, |
|
"learning_rate": 6.993580717292601e-06, |
|
"loss": 0.2188, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.05258396471785593, |
|
"grad_norm": 0.5030427575111389, |
|
"learning_rate": 6.981448262740483e-06, |
|
"loss": 0.1603, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.053149383693316746, |
|
"grad_norm": 5.4078288078308105, |
|
"learning_rate": 6.969182976006999e-06, |
|
"loss": 0.2024, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.05371480266877757, |
|
"grad_norm": 3.2166638374328613, |
|
"learning_rate": 6.95678536127005e-06, |
|
"loss": 0.3025, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.05428022164423838, |
|
"grad_norm": 1.07512629032135, |
|
"learning_rate": 6.944255928147017e-06, |
|
"loss": 0.1517, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.054845640619699196, |
|
"grad_norm": 13.33733081817627, |
|
"learning_rate": 6.931595191673823e-06, |
|
"loss": 0.262, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.05541105959516001, |
|
"grad_norm": 5.074169635772705, |
|
"learning_rate": 6.9188036722837555e-06, |
|
"loss": 0.2072, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.05597647857062083, |
|
"grad_norm": 2.5820138454437256, |
|
"learning_rate": 6.905881895786076e-06, |
|
"loss": 0.2312, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.056541897546081646, |
|
"grad_norm": 4.5758819580078125, |
|
"learning_rate": 6.892830393344403e-06, |
|
"loss": 0.2633, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.05710731652154246, |
|
"grad_norm": 0.38114678859710693, |
|
"learning_rate": 6.879649701454886e-06, |
|
"loss": 0.0931, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.05767273549700328, |
|
"grad_norm": 6.1940131187438965, |
|
"learning_rate": 6.866340361924141e-06, |
|
"loss": 0.2923, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.058238154472464096, |
|
"grad_norm": 0.355970174074173, |
|
"learning_rate": 6.852902921846988e-06, |
|
"loss": 0.257, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.05880357344792491, |
|
"grad_norm": 5.869740962982178, |
|
"learning_rate": 6.8393379335839565e-06, |
|
"loss": 0.2529, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.05936899242338573, |
|
"grad_norm": 4.137405872344971, |
|
"learning_rate": 6.825645954738586e-06, |
|
"loss": 0.1663, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.05936899242338573, |
|
"eval_loss": 0.3912532329559326, |
|
"eval_runtime": 39.5493, |
|
"eval_samples_per_second": 12.642, |
|
"eval_steps_per_second": 12.642, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.059934411398846546, |
|
"grad_norm": 0.35506558418273926, |
|
"learning_rate": 6.811827548134495e-06, |
|
"loss": 0.3231, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.06049983037430736, |
|
"grad_norm": 1.4487513303756714, |
|
"learning_rate": 6.797883281792261e-06, |
|
"loss": 0.2067, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.061065249349768175, |
|
"grad_norm": 2.257608652114868, |
|
"learning_rate": 6.783813728906054e-06, |
|
"loss": 0.2776, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.061630668325228996, |
|
"grad_norm": 2.917322874069214, |
|
"learning_rate": 6.769619467820086e-06, |
|
"loss": 0.2023, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.06219608730068981, |
|
"grad_norm": 1.0882151126861572, |
|
"learning_rate": 6.755301082004838e-06, |
|
"loss": 0.1002, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.06276150627615062, |
|
"grad_norm": 3.259073257446289, |
|
"learning_rate": 6.740859160033068e-06, |
|
"loss": 0.1888, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.06332692525161145, |
|
"grad_norm": 5.543813228607178, |
|
"learning_rate": 6.726294295555623e-06, |
|
"loss": 0.31, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.06389234422707227, |
|
"grad_norm": 5.321115970611572, |
|
"learning_rate": 6.711607087277034e-06, |
|
"loss": 0.1793, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.06445776320253307, |
|
"grad_norm": 5.510456562042236, |
|
"learning_rate": 6.69679813893091e-06, |
|
"loss": 0.2166, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.0650231821779939, |
|
"grad_norm": 0.46486374735832214, |
|
"learning_rate": 6.681868059255113e-06, |
|
"loss": 0.1986, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.0655886011534547, |
|
"grad_norm": 0.5120518803596497, |
|
"learning_rate": 6.666817461966741e-06, |
|
"loss": 0.3544, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.06615402012891552, |
|
"grad_norm": 2.512892484664917, |
|
"learning_rate": 6.651646965736902e-06, |
|
"loss": 0.2077, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.06671943910437635, |
|
"grad_norm": 8.5523099899292, |
|
"learning_rate": 6.636357194165274e-06, |
|
"loss": 0.299, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.06728485807983715, |
|
"grad_norm": 13.795504570007324, |
|
"learning_rate": 6.620948775754481e-06, |
|
"loss": 0.2292, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.06785027705529798, |
|
"grad_norm": 0.3113418519496918, |
|
"learning_rate": 6.605422343884255e-06, |
|
"loss": 0.117, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.06785027705529798, |
|
"eval_loss": 0.37112918496131897, |
|
"eval_runtime": 39.5153, |
|
"eval_samples_per_second": 12.653, |
|
"eval_steps_per_second": 12.653, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.0684156960307588, |
|
"grad_norm": 4.8856682777404785, |
|
"learning_rate": 6.589778536785396e-06, |
|
"loss": 0.2095, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.0689811150062196, |
|
"grad_norm": 2.4136312007904053, |
|
"learning_rate": 6.5740179975135426e-06, |
|
"loss": 0.2877, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.06954653398168043, |
|
"grad_norm": 4.938726902008057, |
|
"learning_rate": 6.5581413739227314e-06, |
|
"loss": 0.2253, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.07011195295714125, |
|
"grad_norm": 2.4916675090789795, |
|
"learning_rate": 6.542149318638777e-06, |
|
"loss": 0.2073, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.07067737193260205, |
|
"grad_norm": 0.27168160676956177, |
|
"learning_rate": 6.526042489032434e-06, |
|
"loss": 0.129, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.07124279090806288, |
|
"grad_norm": 0.6078025102615356, |
|
"learning_rate": 6.509821547192383e-06, |
|
"loss": 0.2505, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.0718082098835237, |
|
"grad_norm": 3.8309597969055176, |
|
"learning_rate": 6.493487159898006e-06, |
|
"loss": 0.1853, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.0723736288589845, |
|
"grad_norm": 0.6031014919281006, |
|
"learning_rate": 6.477039998591991e-06, |
|
"loss": 0.1812, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.07293904783444533, |
|
"grad_norm": 26.20022201538086, |
|
"learning_rate": 6.460480739352719e-06, |
|
"loss": 0.2584, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.07350446680990615, |
|
"grad_norm": 4.755608558654785, |
|
"learning_rate": 6.4438100628664795e-06, |
|
"loss": 0.2646, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.07406988578536695, |
|
"grad_norm": 4.010619163513184, |
|
"learning_rate": 6.4270286543994874e-06, |
|
"loss": 0.1157, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.07463530476082778, |
|
"grad_norm": 4.133307456970215, |
|
"learning_rate": 6.410137203769718e-06, |
|
"loss": 0.1249, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.0752007237362886, |
|
"grad_norm": 0.7277269959449768, |
|
"learning_rate": 6.393136405318545e-06, |
|
"loss": 0.17, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.0757661427117494, |
|
"grad_norm": 7.398675918579102, |
|
"learning_rate": 6.376026957882207e-06, |
|
"loss": 0.2289, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.07633156168721023, |
|
"grad_norm": 2.177061080932617, |
|
"learning_rate": 6.3588095647630754e-06, |
|
"loss": 0.2373, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.07633156168721023, |
|
"eval_loss": 0.3694673478603363, |
|
"eval_runtime": 39.5421, |
|
"eval_samples_per_second": 12.645, |
|
"eval_steps_per_second": 12.645, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.07689698066267103, |
|
"grad_norm": 4.6597371101379395, |
|
"learning_rate": 6.341484933700744e-06, |
|
"loss": 0.2264, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.07746239963813185, |
|
"grad_norm": 2.3831920623779297, |
|
"learning_rate": 6.32405377684294e-06, |
|
"loss": 0.2637, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.07802781861359268, |
|
"grad_norm": 6.223957538604736, |
|
"learning_rate": 6.306516810716249e-06, |
|
"loss": 0.262, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.07859323758905348, |
|
"grad_norm": 9.916725158691406, |
|
"learning_rate": 6.288874756196662e-06, |
|
"loss": 0.2729, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.0791586565645143, |
|
"grad_norm": 0.49764057993888855, |
|
"learning_rate": 6.271128338479939e-06, |
|
"loss": 0.2377, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.07972407553997513, |
|
"grad_norm": 2.9618566036224365, |
|
"learning_rate": 6.253278287051806e-06, |
|
"loss": 0.249, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.08028949451543593, |
|
"grad_norm": 2.368474006652832, |
|
"learning_rate": 6.235325335657962e-06, |
|
"loss": 0.124, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.08085491349089675, |
|
"grad_norm": 0.38136398792266846, |
|
"learning_rate": 6.217270222273923e-06, |
|
"loss": 0.2674, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.08142033246635758, |
|
"grad_norm": 4.6476898193359375, |
|
"learning_rate": 6.1991136890746825e-06, |
|
"loss": 0.1299, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.08198575144181838, |
|
"grad_norm": 1.214414119720459, |
|
"learning_rate": 6.180856482404208e-06, |
|
"loss": 0.2702, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.0825511704172792, |
|
"grad_norm": 7.3671464920043945, |
|
"learning_rate": 6.162499352744754e-06, |
|
"loss": 0.2172, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.08311658939274003, |
|
"grad_norm": 0.29375457763671875, |
|
"learning_rate": 6.144043054686022e-06, |
|
"loss": 0.1906, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.08368200836820083, |
|
"grad_norm": 1.2316617965698242, |
|
"learning_rate": 6.125488346894139e-06, |
|
"loss": 0.2524, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.08424742734366165, |
|
"grad_norm": 4.17201566696167, |
|
"learning_rate": 6.106835992080464e-06, |
|
"loss": 0.2358, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.08481284631912248, |
|
"grad_norm": 0.6424977779388428, |
|
"learning_rate": 6.088086756970252e-06, |
|
"loss": 0.2243, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.08481284631912248, |
|
"eval_loss": 0.3590245246887207, |
|
"eval_runtime": 39.5277, |
|
"eval_samples_per_second": 12.649, |
|
"eval_steps_per_second": 12.649, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.08537826529458328, |
|
"grad_norm": 5.551772117614746, |
|
"learning_rate": 6.0692414122711184e-06, |
|
"loss": 0.2982, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.0859436842700441, |
|
"grad_norm": 0.5617218613624573, |
|
"learning_rate": 6.050300732641376e-06, |
|
"loss": 0.2046, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.08650910324550493, |
|
"grad_norm": 0.30334511399269104, |
|
"learning_rate": 6.0312654966581755e-06, |
|
"loss": 0.2256, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.08707452222096573, |
|
"grad_norm": 4.384812355041504, |
|
"learning_rate": 6.012136486785512e-06, |
|
"loss": 0.2902, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.08763994119642655, |
|
"grad_norm": 0.2928411066532135, |
|
"learning_rate": 5.992914489342061e-06, |
|
"loss": 0.1868, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.08820536017188736, |
|
"grad_norm": 2.172215461730957, |
|
"learning_rate": 5.9736002944688474e-06, |
|
"loss": 0.1724, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.08877077914734818, |
|
"grad_norm": 0.31219416856765747, |
|
"learning_rate": 5.954194696096775e-06, |
|
"loss": 0.1851, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.089336198122809, |
|
"grad_norm": 0.2729625403881073, |
|
"learning_rate": 5.9346984919139865e-06, |
|
"loss": 0.1647, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.08990161709826981, |
|
"grad_norm": 4.8005852699279785, |
|
"learning_rate": 5.9151124833330745e-06, |
|
"loss": 0.2142, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.09046703607373063, |
|
"grad_norm": 4.540345668792725, |
|
"learning_rate": 5.895437475458137e-06, |
|
"loss": 0.251, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.09103245504919145, |
|
"grad_norm": 8.689173698425293, |
|
"learning_rate": 5.875674277051688e-06, |
|
"loss": 0.3042, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.09159787402465226, |
|
"grad_norm": 0.9120452404022217, |
|
"learning_rate": 5.855823700501406e-06, |
|
"loss": 0.1492, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.09216329300011308, |
|
"grad_norm": 5.6351776123046875, |
|
"learning_rate": 5.835886561786744e-06, |
|
"loss": 0.1607, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.0927287119755739, |
|
"grad_norm": 0.8756939768791199, |
|
"learning_rate": 5.815863680445385e-06, |
|
"loss": 0.2282, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.09329413095103471, |
|
"grad_norm": 1.7489228248596191, |
|
"learning_rate": 5.795755879539558e-06, |
|
"loss": 0.2005, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.09329413095103471, |
|
"eval_loss": 0.3531251847743988, |
|
"eval_runtime": 39.8671, |
|
"eval_samples_per_second": 12.542, |
|
"eval_steps_per_second": 12.542, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.09385954992649553, |
|
"grad_norm": 0.37482306361198425, |
|
"learning_rate": 5.775563985622202e-06, |
|
"loss": 0.2869, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.09442496890195635, |
|
"grad_norm": 5.0887532234191895, |
|
"learning_rate": 5.755288828702987e-06, |
|
"loss": 0.1684, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.09499038787741716, |
|
"grad_norm": 12.987879753112793, |
|
"learning_rate": 5.734931242214204e-06, |
|
"loss": 0.2351, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.09555580685287798, |
|
"grad_norm": 1.7983970642089844, |
|
"learning_rate": 5.7144920629764955e-06, |
|
"loss": 0.2079, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.0961212258283388, |
|
"grad_norm": 0.2802395522594452, |
|
"learning_rate": 5.693972131164471e-06, |
|
"loss": 0.0819, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.09668664480379961, |
|
"grad_norm": 2.808706045150757, |
|
"learning_rate": 5.673372290272149e-06, |
|
"loss": 0.2285, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.09725206377926043, |
|
"grad_norm": 1.49234139919281, |
|
"learning_rate": 5.652693387078309e-06, |
|
"loss": 0.1787, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.09781748275472125, |
|
"grad_norm": 0.7509416341781616, |
|
"learning_rate": 5.631936271611667e-06, |
|
"loss": 0.1384, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.09838290173018206, |
|
"grad_norm": 3.4828104972839355, |
|
"learning_rate": 5.611101797115939e-06, |
|
"loss": 0.3529, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.09894832070564288, |
|
"grad_norm": 3.589022636413574, |
|
"learning_rate": 5.5901908200147685e-06, |
|
"loss": 0.184, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.09951373968110369, |
|
"grad_norm": 9.908434867858887, |
|
"learning_rate": 5.56920419987652e-06, |
|
"loss": 0.2994, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.10007915865656451, |
|
"grad_norm": 0.6579049229621887, |
|
"learning_rate": 5.5481427993789534e-06, |
|
"loss": 0.2197, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.10064457763202533, |
|
"grad_norm": 1.0579322576522827, |
|
"learning_rate": 5.527007484273746e-06, |
|
"loss": 0.2826, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.10120999660748614, |
|
"grad_norm": 0.5070655941963196, |
|
"learning_rate": 5.5057991233509225e-06, |
|
"loss": 0.1558, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.10177541558294696, |
|
"grad_norm": 0.28426429629325867, |
|
"learning_rate": 5.484518588403134e-06, |
|
"loss": 0.1975, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.10177541558294696, |
|
"eval_loss": 0.35644251108169556, |
|
"eval_runtime": 39.6469, |
|
"eval_samples_per_second": 12.611, |
|
"eval_steps_per_second": 12.611, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.10234083455840778, |
|
"grad_norm": 2.890162467956543, |
|
"learning_rate": 5.463166754189819e-06, |
|
"loss": 0.2279, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.10290625353386859, |
|
"grad_norm": 1.4932255744934082, |
|
"learning_rate": 5.441744498401255e-06, |
|
"loss": 0.1315, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.10347167250932941, |
|
"grad_norm": 5.322872161865234, |
|
"learning_rate": 5.4202527016224725e-06, |
|
"loss": 0.1946, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.10403709148479023, |
|
"grad_norm": 0.6986817717552185, |
|
"learning_rate": 5.398692247297059e-06, |
|
"loss": 0.1914, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.10460251046025104, |
|
"grad_norm": 0.5160526633262634, |
|
"learning_rate": 5.377064021690844e-06, |
|
"loss": 0.1465, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.10516792943571186, |
|
"grad_norm": 7.956513404846191, |
|
"learning_rate": 5.355368913855472e-06, |
|
"loss": 0.1872, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.10573334841117268, |
|
"grad_norm": 8.840923309326172, |
|
"learning_rate": 5.333607815591851e-06, |
|
"loss": 0.31, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.10629876738663349, |
|
"grad_norm": 0.5062395334243774, |
|
"learning_rate": 5.311781621413497e-06, |
|
"loss": 0.0946, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.10686418636209431, |
|
"grad_norm": 2.2639293670654297, |
|
"learning_rate": 5.289891228509769e-06, |
|
"loss": 0.1967, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.10742960533755513, |
|
"grad_norm": 3.102832555770874, |
|
"learning_rate": 5.267937536708977e-06, |
|
"loss": 0.2081, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.10799502431301594, |
|
"grad_norm": 0.5631608366966248, |
|
"learning_rate": 5.245921448441407e-06, |
|
"loss": 0.2371, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.10856044328847676, |
|
"grad_norm": 5.38816499710083, |
|
"learning_rate": 5.223843868702214e-06, |
|
"loss": 0.223, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.10912586226393758, |
|
"grad_norm": 4.507992267608643, |
|
"learning_rate": 5.201705705014231e-06, |
|
"loss": 0.1544, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.10969128123939839, |
|
"grad_norm": 3.72554612159729, |
|
"learning_rate": 5.1795078673906575e-06, |
|
"loss": 0.1786, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.11025670021485921, |
|
"grad_norm": 0.2608848512172699, |
|
"learning_rate": 5.1572512682976546e-06, |
|
"loss": 0.1554, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.11025670021485921, |
|
"eval_loss": 0.3513215184211731, |
|
"eval_runtime": 39.581, |
|
"eval_samples_per_second": 12.632, |
|
"eval_steps_per_second": 12.632, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.11082211919032002, |
|
"grad_norm": 4.061446666717529, |
|
"learning_rate": 5.134936822616837e-06, |
|
"loss": 0.2292, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.11138753816578084, |
|
"grad_norm": 2.5705180168151855, |
|
"learning_rate": 5.112565447607669e-06, |
|
"loss": 0.1872, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.11195295714124166, |
|
"grad_norm": 2.256411552429199, |
|
"learning_rate": 5.090138062869755e-06, |
|
"loss": 0.2593, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.11251837611670247, |
|
"grad_norm": 2.7512147426605225, |
|
"learning_rate": 5.067655590305036e-06, |
|
"loss": 0.1837, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.11308379509216329, |
|
"grad_norm": 2.080970048904419, |
|
"learning_rate": 5.045118954079904e-06, |
|
"loss": 0.1603, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.11364921406762411, |
|
"grad_norm": 0.7004780769348145, |
|
"learning_rate": 5.022529080587205e-06, |
|
"loss": 0.2083, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.11421463304308492, |
|
"grad_norm": 0.395233154296875, |
|
"learning_rate": 4.999886898408157e-06, |
|
"loss": 0.3176, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.11478005201854574, |
|
"grad_norm": 1.5686570405960083, |
|
"learning_rate": 4.977193338274189e-06, |
|
"loss": 0.1894, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.11534547099400656, |
|
"grad_norm": 2.3186402320861816, |
|
"learning_rate": 4.954449333028672e-06, |
|
"loss": 0.259, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.11591088996946737, |
|
"grad_norm": 3.282456159591675, |
|
"learning_rate": 4.931655817588579e-06, |
|
"loss": 0.331, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.11647630894492819, |
|
"grad_norm": 4.835910320281982, |
|
"learning_rate": 4.9088137289060535e-06, |
|
"loss": 0.1809, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.11704172792038901, |
|
"grad_norm": 5.888257026672363, |
|
"learning_rate": 4.885924005929896e-06, |
|
"loss": 0.1677, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.11760714689584982, |
|
"grad_norm": 0.332059383392334, |
|
"learning_rate": 4.862987589566965e-06, |
|
"loss": 0.1576, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.11817256587131064, |
|
"grad_norm": 9.200852394104004, |
|
"learning_rate": 4.840005422643503e-06, |
|
"loss": 0.1955, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.11873798484677146, |
|
"grad_norm": 5.58013916015625, |
|
"learning_rate": 4.816978449866372e-06, |
|
"loss": 0.2468, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.11873798484677146, |
|
"eval_loss": 0.3523203134536743, |
|
"eval_runtime": 39.5709, |
|
"eval_samples_per_second": 12.636, |
|
"eval_steps_per_second": 12.636, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.11930340382223227, |
|
"grad_norm": 2.2958157062530518, |
|
"learning_rate": 4.793907617784238e-06, |
|
"loss": 0.2361, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.11986882279769309, |
|
"grad_norm": 12.717740058898926, |
|
"learning_rate": 4.770793874748642e-06, |
|
"loss": 0.3083, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.12043424177315391, |
|
"grad_norm": 3.778834342956543, |
|
"learning_rate": 4.747638170875032e-06, |
|
"loss": 0.1325, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.12099966074861472, |
|
"grad_norm": 4.040988922119141, |
|
"learning_rate": 4.724441458003699e-06, |
|
"loss": 0.1954, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.12156507972407554, |
|
"grad_norm": 0.34643489122390747, |
|
"learning_rate": 4.701204689660653e-06, |
|
"loss": 0.1422, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.12213049869953635, |
|
"grad_norm": 4.3240532875061035, |
|
"learning_rate": 4.67792882101843e-06, |
|
"loss": 0.1082, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.12269591767499717, |
|
"grad_norm": 2.3450865745544434, |
|
"learning_rate": 4.654614808856823e-06, |
|
"loss": 0.1661, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.12326133665045799, |
|
"grad_norm": 6.436302185058594, |
|
"learning_rate": 4.631263611523557e-06, |
|
"loss": 0.2116, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.1238267556259188, |
|
"grad_norm": 3.144357681274414, |
|
"learning_rate": 4.607876188894896e-06, |
|
"loss": 0.25, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.12439217460137962, |
|
"grad_norm": 11.192508697509766, |
|
"learning_rate": 4.58445350233618e-06, |
|
"loss": 0.2223, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.12495759357684044, |
|
"grad_norm": 4.112863540649414, |
|
"learning_rate": 4.560996514662314e-06, |
|
"loss": 0.2854, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.12552301255230125, |
|
"grad_norm": 0.4032130539417267, |
|
"learning_rate": 4.5375061900981855e-06, |
|
"loss": 0.1555, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.12608843152776208, |
|
"grad_norm": 0.21966849267482758, |
|
"learning_rate": 4.513983494239034e-06, |
|
"loss": 0.2369, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.1266538505032229, |
|
"grad_norm": 4.995877265930176, |
|
"learning_rate": 4.490429394010752e-06, |
|
"loss": 0.3232, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.1272192694786837, |
|
"grad_norm": 4.551838397979736, |
|
"learning_rate": 4.466844857630147e-06, |
|
"loss": 0.1853, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.1272192694786837, |
|
"eval_loss": 0.3480963110923767, |
|
"eval_runtime": 39.5414, |
|
"eval_samples_per_second": 12.645, |
|
"eval_steps_per_second": 12.645, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.12778468845414453, |
|
"grad_norm": 5.477660179138184, |
|
"learning_rate": 4.443230854565133e-06, |
|
"loss": 0.2047, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.12835010742960534, |
|
"grad_norm": 2.2373340129852295, |
|
"learning_rate": 4.4195883554948885e-06, |
|
"loss": 0.1282, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.12891552640506615, |
|
"grad_norm": 8.20826530456543, |
|
"learning_rate": 4.3959183322699466e-06, |
|
"loss": 0.2477, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.12948094538052696, |
|
"grad_norm": 1.8365843296051025, |
|
"learning_rate": 4.372221757872255e-06, |
|
"loss": 0.1239, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.1300463643559878, |
|
"grad_norm": 2.880095958709717, |
|
"learning_rate": 4.3484996063751725e-06, |
|
"loss": 0.1884, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.1306117833314486, |
|
"grad_norm": 0.17532140016555786, |
|
"learning_rate": 4.324752852903435e-06, |
|
"loss": 0.139, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.1311772023069094, |
|
"grad_norm": 7.910481929779053, |
|
"learning_rate": 4.300982473593068e-06, |
|
"loss": 0.142, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.13174262128237024, |
|
"grad_norm": 5.037327766418457, |
|
"learning_rate": 4.277189445551261e-06, |
|
"loss": 0.1302, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.13230804025783105, |
|
"grad_norm": 2.0469155311584473, |
|
"learning_rate": 4.253374746816209e-06, |
|
"loss": 0.1594, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.13287345923329186, |
|
"grad_norm": 3.8150224685668945, |
|
"learning_rate": 4.229539356316898e-06, |
|
"loss": 0.2159, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.1334388782087527, |
|
"grad_norm": 5.029961585998535, |
|
"learning_rate": 4.205684253832877e-06, |
|
"loss": 0.2102, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.1340042971842135, |
|
"grad_norm": 0.1849166750907898, |
|
"learning_rate": 4.1818104199539735e-06, |
|
"loss": 0.2711, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.1345697161596743, |
|
"grad_norm": 4.861811637878418, |
|
"learning_rate": 4.1579188360399916e-06, |
|
"loss": 0.1932, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 1.8959629535675049, |
|
"learning_rate": 4.134010484180368e-06, |
|
"loss": 0.16, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.13570055411059595, |
|
"grad_norm": 3.661323070526123, |
|
"learning_rate": 4.110086347153807e-06, |
|
"loss": 0.2053, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.13570055411059595, |
|
"eval_loss": 0.3416612446308136, |
|
"eval_runtime": 39.5846, |
|
"eval_samples_per_second": 12.631, |
|
"eval_steps_per_second": 12.631, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.13626597308605676, |
|
"grad_norm": 2.4910035133361816, |
|
"learning_rate": 4.0861474083878765e-06, |
|
"loss": 0.0915, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.1368313920615176, |
|
"grad_norm": 3.023247718811035, |
|
"learning_rate": 4.062194651918585e-06, |
|
"loss": 0.1348, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.1373968110369784, |
|
"grad_norm": 2.995408773422241, |
|
"learning_rate": 4.0382290623499384e-06, |
|
"loss": 0.226, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.1379622300124392, |
|
"grad_norm": 0.3714699149131775, |
|
"learning_rate": 4.014251624813453e-06, |
|
"loss": 0.1973, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.13852764898790004, |
|
"grad_norm": 3.3884501457214355, |
|
"learning_rate": 3.990263324927675e-06, |
|
"loss": 0.3278, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.13909306796336085, |
|
"grad_norm": 5.731414318084717, |
|
"learning_rate": 3.966265148757655e-06, |
|
"loss": 0.2329, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.13965848693882166, |
|
"grad_norm": 4.77826452255249, |
|
"learning_rate": 3.9422580827744224e-06, |
|
"loss": 0.2764, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.1402239059142825, |
|
"grad_norm": 0.8867257833480835, |
|
"learning_rate": 3.9182431138144315e-06, |
|
"loss": 0.274, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.1407893248897433, |
|
"grad_norm": 0.6366099715232849, |
|
"learning_rate": 3.894221229038995e-06, |
|
"loss": 0.2898, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.1413547438652041, |
|
"grad_norm": 8.020559310913086, |
|
"learning_rate": 3.870193415893709e-06, |
|
"loss": 0.2701, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.14192016284066494, |
|
"grad_norm": 0.9654809236526489, |
|
"learning_rate": 3.846160662067859e-06, |
|
"loss": 0.1817, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.14248558181612575, |
|
"grad_norm": 3.0796737670898438, |
|
"learning_rate": 3.8221239554538275e-06, |
|
"loss": 0.1611, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.14305100079158656, |
|
"grad_norm": 3.5335640907287598, |
|
"learning_rate": 3.798084284106478e-06, |
|
"loss": 0.2191, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.1436164197670474, |
|
"grad_norm": 7.279317378997803, |
|
"learning_rate": 3.7740426362025424e-06, |
|
"loss": 0.2094, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.1441818387425082, |
|
"grad_norm": 0.32002538442611694, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.1295, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.1441818387425082, |
|
"eval_loss": 0.35380080342292786, |
|
"eval_runtime": 39.6008, |
|
"eval_samples_per_second": 12.626, |
|
"eval_steps_per_second": 12.626, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.144747257717969, |
|
"grad_norm": 0.5581704378128052, |
|
"learning_rate": 3.7259573637974587e-06, |
|
"loss": 0.1194, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.14531267669342984, |
|
"grad_norm": 1.0721137523651123, |
|
"learning_rate": 3.701915715893523e-06, |
|
"loss": 0.2054, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.14587809566889065, |
|
"grad_norm": 0.565732479095459, |
|
"learning_rate": 3.677876044546174e-06, |
|
"loss": 0.2069, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.14644351464435146, |
|
"grad_norm": 7.235752105712891, |
|
"learning_rate": 3.6538393379321427e-06, |
|
"loss": 0.2537, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.1470089336198123, |
|
"grad_norm": 3.3759572505950928, |
|
"learning_rate": 3.6298065841062934e-06, |
|
"loss": 0.1357, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.1475743525952731, |
|
"grad_norm": 0.9213785529136658, |
|
"learning_rate": 3.6057787709610064e-06, |
|
"loss": 0.2335, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.1481397715707339, |
|
"grad_norm": 2.818834066390991, |
|
"learning_rate": 3.5817568861855708e-06, |
|
"loss": 0.2965, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.14870519054619474, |
|
"grad_norm": 1.4139299392700195, |
|
"learning_rate": 3.557741917225579e-06, |
|
"loss": 0.2562, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.14927060952165555, |
|
"grad_norm": 1.7301828861236572, |
|
"learning_rate": 3.5337348512423468e-06, |
|
"loss": 0.1434, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.14983602849711636, |
|
"grad_norm": 1.9895609617233276, |
|
"learning_rate": 3.5097366750723275e-06, |
|
"loss": 0.1388, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.1504014474725772, |
|
"grad_norm": 2.680075168609619, |
|
"learning_rate": 3.4857483751865478e-06, |
|
"loss": 0.1759, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.150966866448038, |
|
"grad_norm": 4.484714031219482, |
|
"learning_rate": 3.461770937650064e-06, |
|
"loss": 0.155, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.1515322854234988, |
|
"grad_norm": 1.2487350702285767, |
|
"learning_rate": 3.437805348081416e-06, |
|
"loss": 0.1466, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.15209770439895962, |
|
"grad_norm": 2.4353535175323486, |
|
"learning_rate": 3.413852591612125e-06, |
|
"loss": 0.1878, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.15266312337442045, |
|
"grad_norm": 4.543776035308838, |
|
"learning_rate": 3.389913652846194e-06, |
|
"loss": 0.2557, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.15266312337442045, |
|
"eval_loss": 0.35080721974372864, |
|
"eval_runtime": 39.7283, |
|
"eval_samples_per_second": 12.585, |
|
"eval_steps_per_second": 12.585, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.15322854234988126, |
|
"grad_norm": 4.765056610107422, |
|
"learning_rate": 3.365989515819633e-06, |
|
"loss": 0.2985, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.15379396132534207, |
|
"grad_norm": 0.7188260555267334, |
|
"learning_rate": 3.34208116396001e-06, |
|
"loss": 0.2191, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.1543593803008029, |
|
"grad_norm": 3.8512814044952393, |
|
"learning_rate": 3.318189580046028e-06, |
|
"loss": 0.2997, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.1549247992762637, |
|
"grad_norm": 4.674261093139648, |
|
"learning_rate": 3.294315746167124e-06, |
|
"loss": 0.2117, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.15549021825172452, |
|
"grad_norm": 3.1170215606689453, |
|
"learning_rate": 3.2704606436831023e-06, |
|
"loss": 0.2079, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.15605563722718535, |
|
"grad_norm": 10.292257308959961, |
|
"learning_rate": 3.2466252531837934e-06, |
|
"loss": 0.2417, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.15662105620264616, |
|
"grad_norm": 2.266889810562134, |
|
"learning_rate": 3.2228105544487405e-06, |
|
"loss": 0.1316, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.15718647517810697, |
|
"grad_norm": 0.4365326762199402, |
|
"learning_rate": 3.1990175264069333e-06, |
|
"loss": 0.2402, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.1577518941535678, |
|
"grad_norm": 2.5704283714294434, |
|
"learning_rate": 3.1752471470965653e-06, |
|
"loss": 0.1469, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.1583173131290286, |
|
"grad_norm": 3.8729872703552246, |
|
"learning_rate": 3.151500393624829e-06, |
|
"loss": 0.1652, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.15888273210448942, |
|
"grad_norm": 2.1594536304473877, |
|
"learning_rate": 3.127778242127747e-06, |
|
"loss": 0.1545, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.15944815107995025, |
|
"grad_norm": 4.568520545959473, |
|
"learning_rate": 3.104081667730055e-06, |
|
"loss": 0.1478, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.16001357005541106, |
|
"grad_norm": 5.309835433959961, |
|
"learning_rate": 3.0804116445051133e-06, |
|
"loss": 0.2298, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.16057898903087187, |
|
"grad_norm": 3.718083381652832, |
|
"learning_rate": 3.0567691454348674e-06, |
|
"loss": 0.229, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.1611444080063327, |
|
"grad_norm": 0.4385773837566376, |
|
"learning_rate": 3.033155142369855e-06, |
|
"loss": 0.1503, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.1611444080063327, |
|
"eval_loss": 0.3485355079174042, |
|
"eval_runtime": 39.9617, |
|
"eval_samples_per_second": 12.512, |
|
"eval_steps_per_second": 12.512, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.1617098269817935, |
|
"grad_norm": 2.906590223312378, |
|
"learning_rate": 3.009570605989249e-06, |
|
"loss": 0.2858, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.16227524595725432, |
|
"grad_norm": 0.3650115728378296, |
|
"learning_rate": 2.986016505760967e-06, |
|
"loss": 0.1283, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.16284066493271515, |
|
"grad_norm": 4.489029884338379, |
|
"learning_rate": 2.962493809901815e-06, |
|
"loss": 0.2333, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.16340608390817596, |
|
"grad_norm": 3.8139522075653076, |
|
"learning_rate": 2.9390034853376875e-06, |
|
"loss": 0.1783, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.16397150288363677, |
|
"grad_norm": 0.4647109806537628, |
|
"learning_rate": 2.9155464976638217e-06, |
|
"loss": 0.16, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.1645369218590976, |
|
"grad_norm": 1.2971251010894775, |
|
"learning_rate": 2.8921238111051057e-06, |
|
"loss": 0.1817, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.1651023408345584, |
|
"grad_norm": 3.0727622509002686, |
|
"learning_rate": 2.8687363884764434e-06, |
|
"loss": 0.1837, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.16566775981001922, |
|
"grad_norm": 1.7231451272964478, |
|
"learning_rate": 2.8453851911431783e-06, |
|
"loss": 0.2391, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.16623317878548005, |
|
"grad_norm": 4.894044876098633, |
|
"learning_rate": 2.822071178981572e-06, |
|
"loss": 0.179, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.16679859776094086, |
|
"grad_norm": 0.9752815365791321, |
|
"learning_rate": 2.7987953103393484e-06, |
|
"loss": 0.1575, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.16736401673640167, |
|
"grad_norm": 0.42615827918052673, |
|
"learning_rate": 2.7755585419963026e-06, |
|
"loss": 0.154, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.1679294357118625, |
|
"grad_norm": 2.923886299133301, |
|
"learning_rate": 2.7523618291249687e-06, |
|
"loss": 0.1766, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.1684948546873233, |
|
"grad_norm": 0.335527241230011, |
|
"learning_rate": 2.729206125251359e-06, |
|
"loss": 0.1634, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.16906027366278412, |
|
"grad_norm": 1.2382445335388184, |
|
"learning_rate": 2.7060923822157638e-06, |
|
"loss": 0.1298, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.16962569263824495, |
|
"grad_norm": 0.7978836297988892, |
|
"learning_rate": 2.6830215501336288e-06, |
|
"loss": 0.1611, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.16962569263824495, |
|
"eval_loss": 0.33967724442481995, |
|
"eval_runtime": 40.0485, |
|
"eval_samples_per_second": 12.485, |
|
"eval_steps_per_second": 12.485, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.17019111161370576, |
|
"grad_norm": 2.1920552253723145, |
|
"learning_rate": 2.6599945773564997e-06, |
|
"loss": 0.21, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.17075653058916657, |
|
"grad_norm": 14.545994758605957, |
|
"learning_rate": 2.6370124104330357e-06, |
|
"loss": 0.1796, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.1713219495646274, |
|
"grad_norm": 3.3877086639404297, |
|
"learning_rate": 2.614075994070105e-06, |
|
"loss": 0.0967, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.1718873685400882, |
|
"grad_norm": 3.582965612411499, |
|
"learning_rate": 2.591186271093948e-06, |
|
"loss": 0.2305, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.17245278751554902, |
|
"grad_norm": 2.486964225769043, |
|
"learning_rate": 2.568344182411423e-06, |
|
"loss": 0.1499, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.17301820649100985, |
|
"grad_norm": 6.171244144439697, |
|
"learning_rate": 2.5455506669713293e-06, |
|
"loss": 0.1886, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.17358362546647066, |
|
"grad_norm": 0.24228227138519287, |
|
"learning_rate": 2.522806661725812e-06, |
|
"loss": 0.156, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.17414904444193147, |
|
"grad_norm": 0.5226927995681763, |
|
"learning_rate": 2.5001131015918444e-06, |
|
"loss": 0.1678, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.17471446341739227, |
|
"grad_norm": 0.41159191727638245, |
|
"learning_rate": 2.4774709194127973e-06, |
|
"loss": 0.1768, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.1752798823928531, |
|
"grad_norm": 2.998814105987549, |
|
"learning_rate": 2.4548810459200973e-06, |
|
"loss": 0.2387, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.17584530136831392, |
|
"grad_norm": 22.550670623779297, |
|
"learning_rate": 2.4323444096949647e-06, |
|
"loss": 0.2979, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.17641072034377472, |
|
"grad_norm": 7.358113765716553, |
|
"learning_rate": 2.409861937130248e-06, |
|
"loss": 0.3022, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.17697613931923556, |
|
"grad_norm": 1.2710931301116943, |
|
"learning_rate": 2.3874345523923327e-06, |
|
"loss": 0.1331, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.17754155829469637, |
|
"grad_norm": 5.509647846221924, |
|
"learning_rate": 2.3650631773831644e-06, |
|
"loss": 0.1751, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.17810697727015717, |
|
"grad_norm": 0.6605579853057861, |
|
"learning_rate": 2.3427487317023477e-06, |
|
"loss": 0.178, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.17810697727015717, |
|
"eval_loss": 0.3411501944065094, |
|
"eval_runtime": 39.7859, |
|
"eval_samples_per_second": 12.567, |
|
"eval_steps_per_second": 12.567, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.178672396245618, |
|
"grad_norm": 0.5948871970176697, |
|
"learning_rate": 2.320492132609344e-06, |
|
"loss": 0.1548, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.17923781522107882, |
|
"grad_norm": 0.9561953544616699, |
|
"learning_rate": 2.2982942949857705e-06, |
|
"loss": 0.2065, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.17980323419653962, |
|
"grad_norm": 0.804959237575531, |
|
"learning_rate": 2.276156131297787e-06, |
|
"loss": 0.1109, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.18036865317200046, |
|
"grad_norm": 0.41093799471855164, |
|
"learning_rate": 2.254078551558594e-06, |
|
"loss": 0.1668, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.18093407214746127, |
|
"grad_norm": 2.7389471530914307, |
|
"learning_rate": 2.2320624632910232e-06, |
|
"loss": 0.1489, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.18149949112292207, |
|
"grad_norm": 0.720253050327301, |
|
"learning_rate": 2.210108771490233e-06, |
|
"loss": 0.281, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.1820649100983829, |
|
"grad_norm": 6.099893093109131, |
|
"learning_rate": 2.1882183785865047e-06, |
|
"loss": 0.153, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.18263032907384372, |
|
"grad_norm": 1.5762041807174683, |
|
"learning_rate": 2.166392184408152e-06, |
|
"loss": 0.1278, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.18319574804930452, |
|
"grad_norm": 0.46884018182754517, |
|
"learning_rate": 2.1446310861445306e-06, |
|
"loss": 0.1056, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.18376116702476536, |
|
"grad_norm": 0.27693289518356323, |
|
"learning_rate": 2.1229359783091576e-06, |
|
"loss": 0.2258, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.18432658600022617, |
|
"grad_norm": 2.051727056503296, |
|
"learning_rate": 2.1013077527029428e-06, |
|
"loss": 0.2151, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.18489200497568697, |
|
"grad_norm": 3.1207640171051025, |
|
"learning_rate": 2.079747298377528e-06, |
|
"loss": 0.201, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.1854574239511478, |
|
"grad_norm": 0.9300882816314697, |
|
"learning_rate": 2.058255501598745e-06, |
|
"loss": 0.1664, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.18602284292660862, |
|
"grad_norm": 0.8406446576118469, |
|
"learning_rate": 2.0368332458101814e-06, |
|
"loss": 0.1616, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.18658826190206942, |
|
"grad_norm": 0.4233115315437317, |
|
"learning_rate": 2.015481411596869e-06, |
|
"loss": 0.1332, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.18658826190206942, |
|
"eval_loss": 0.3380352258682251, |
|
"eval_runtime": 39.6159, |
|
"eval_samples_per_second": 12.621, |
|
"eval_steps_per_second": 12.621, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.18715368087753026, |
|
"grad_norm": 1.925206184387207, |
|
"learning_rate": 1.9942008766490793e-06, |
|
"loss": 0.2432, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.18771909985299107, |
|
"grad_norm": 3.6537692546844482, |
|
"learning_rate": 1.9729925157262554e-06, |
|
"loss": 0.1625, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.18828451882845187, |
|
"grad_norm": 5.3204145431518555, |
|
"learning_rate": 1.9518572006210484e-06, |
|
"loss": 0.1764, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.1888499378039127, |
|
"grad_norm": 0.5921250581741333, |
|
"learning_rate": 1.9307958001234794e-06, |
|
"loss": 0.2376, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.18941535677937352, |
|
"grad_norm": 0.35270601511001587, |
|
"learning_rate": 1.9098091799852347e-06, |
|
"loss": 0.1107, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.18998077575483432, |
|
"grad_norm": 0.6701639890670776, |
|
"learning_rate": 1.8888982028840636e-06, |
|
"loss": 0.118, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.19054619473029516, |
|
"grad_norm": 0.3764590919017792, |
|
"learning_rate": 1.8680637283883355e-06, |
|
"loss": 0.2243, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.19111161370575597, |
|
"grad_norm": 3.9748284816741943, |
|
"learning_rate": 1.8473066129216927e-06, |
|
"loss": 0.1741, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.19167703268121677, |
|
"grad_norm": 0.7545977830886841, |
|
"learning_rate": 1.8266277097278527e-06, |
|
"loss": 0.1912, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.1922424516566776, |
|
"grad_norm": 4.927014350891113, |
|
"learning_rate": 1.8060278688355313e-06, |
|
"loss": 0.1867, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.19280787063213842, |
|
"grad_norm": 0.6150132417678833, |
|
"learning_rate": 1.7855079370235043e-06, |
|
"loss": 0.1054, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.19337328960759922, |
|
"grad_norm": 0.36793455481529236, |
|
"learning_rate": 1.7650687577857972e-06, |
|
"loss": 0.1543, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.19393870858306006, |
|
"grad_norm": 3.976844310760498, |
|
"learning_rate": 1.7447111712970138e-06, |
|
"loss": 0.2848, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.19450412755852087, |
|
"grad_norm": 5.263996124267578, |
|
"learning_rate": 1.7244360143778004e-06, |
|
"loss": 0.1584, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.19506954653398167, |
|
"grad_norm": 3.9323816299438477, |
|
"learning_rate": 1.704244120460443e-06, |
|
"loss": 0.1874, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.19506954653398167, |
|
"eval_loss": 0.3385576903820038, |
|
"eval_runtime": 39.7276, |
|
"eval_samples_per_second": 12.586, |
|
"eval_steps_per_second": 12.586, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.1956349655094425, |
|
"grad_norm": 1.2136938571929932, |
|
"learning_rate": 1.6841363195546162e-06, |
|
"loss": 0.1443, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.19620038448490332, |
|
"grad_norm": 0.9482824206352234, |
|
"learning_rate": 1.6641134382132576e-06, |
|
"loss": 0.2659, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.19676580346036412, |
|
"grad_norm": 10.65390396118164, |
|
"learning_rate": 1.6441762994985947e-06, |
|
"loss": 0.2336, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.19733122243582496, |
|
"grad_norm": 2.970275402069092, |
|
"learning_rate": 1.6243257229483141e-06, |
|
"loss": 0.2652, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.19789664141128577, |
|
"grad_norm": 0.24105581641197205, |
|
"learning_rate": 1.6045625245418648e-06, |
|
"loss": 0.0749, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.19846206038674657, |
|
"grad_norm": 5.464949131011963, |
|
"learning_rate": 1.584887516666928e-06, |
|
"loss": 0.1836, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.19902747936220738, |
|
"grad_norm": 5.387618064880371, |
|
"learning_rate": 1.565301508086015e-06, |
|
"loss": 0.2545, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.19959289833766822, |
|
"grad_norm": 0.504027783870697, |
|
"learning_rate": 1.5458053039032263e-06, |
|
"loss": 0.2082, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.20015831731312902, |
|
"grad_norm": 4.017180919647217, |
|
"learning_rate": 1.5263997055311536e-06, |
|
"loss": 0.1656, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.20072373628858983, |
|
"grad_norm": 5.102208137512207, |
|
"learning_rate": 1.5070855106579404e-06, |
|
"loss": 0.2146, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.20128915526405067, |
|
"grad_norm": 2.9926159381866455, |
|
"learning_rate": 1.4878635132144885e-06, |
|
"loss": 0.1647, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.20185457423951148, |
|
"grad_norm": 2.709998369216919, |
|
"learning_rate": 1.4687345033418258e-06, |
|
"loss": 0.156, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.20241999321497228, |
|
"grad_norm": 2.5307137966156006, |
|
"learning_rate": 1.4496992673586262e-06, |
|
"loss": 0.2059, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.20298541219043312, |
|
"grad_norm": 2.756467342376709, |
|
"learning_rate": 1.4307585877288822e-06, |
|
"loss": 0.2409, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.20355083116589393, |
|
"grad_norm": 4.961981296539307, |
|
"learning_rate": 1.4119132430297496e-06, |
|
"loss": 0.2549, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.20355083116589393, |
|
"eval_loss": 0.32636332511901855, |
|
"eval_runtime": 39.6922, |
|
"eval_samples_per_second": 12.597, |
|
"eval_steps_per_second": 12.597, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.20411625014135473, |
|
"grad_norm": 1.8197344541549683, |
|
"learning_rate": 1.3931640079195365e-06, |
|
"loss": 0.2096, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.20468166911681557, |
|
"grad_norm": 5.955694198608398, |
|
"learning_rate": 1.3745116531058645e-06, |
|
"loss": 0.1543, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.20524708809227638, |
|
"grad_norm": 10.277007102966309, |
|
"learning_rate": 1.3559569453139797e-06, |
|
"loss": 0.2748, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.20581250706773718, |
|
"grad_norm": 0.30265048146247864, |
|
"learning_rate": 1.3375006472552483e-06, |
|
"loss": 0.1597, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.20637792604319802, |
|
"grad_norm": 6.251509666442871, |
|
"learning_rate": 1.3191435175957945e-06, |
|
"loss": 0.22, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.20694334501865883, |
|
"grad_norm": 3.4624879360198975, |
|
"learning_rate": 1.3008863109253174e-06, |
|
"loss": 0.1568, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.20750876399411963, |
|
"grad_norm": 2.0883748531341553, |
|
"learning_rate": 1.282729777726078e-06, |
|
"loss": 0.2017, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.20807418296958047, |
|
"grad_norm": 1.0840513706207275, |
|
"learning_rate": 1.2646746643420392e-06, |
|
"loss": 0.2098, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.20863960194504128, |
|
"grad_norm": 6.809708595275879, |
|
"learning_rate": 1.2467217129481952e-06, |
|
"loss": 0.1643, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.20920502092050208, |
|
"grad_norm": 7.467820167541504, |
|
"learning_rate": 1.2288716615200617e-06, |
|
"loss": 0.1661, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.20977043989596292, |
|
"grad_norm": 2.9000637531280518, |
|
"learning_rate": 1.2111252438033404e-06, |
|
"loss": 0.2398, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.21033585887142373, |
|
"grad_norm": 4.400672435760498, |
|
"learning_rate": 1.1934831892837524e-06, |
|
"loss": 0.1884, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.21090127784688453, |
|
"grad_norm": 4.535418510437012, |
|
"learning_rate": 1.1759462231570618e-06, |
|
"loss": 0.1855, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.21146669682234537, |
|
"grad_norm": 7.94601583480835, |
|
"learning_rate": 1.1585150662992578e-06, |
|
"loss": 0.1889, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.21203211579780618, |
|
"grad_norm": 0.3384474515914917, |
|
"learning_rate": 1.1411904352369262e-06, |
|
"loss": 0.1916, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.21203211579780618, |
|
"eval_loss": 0.33191370964050293, |
|
"eval_runtime": 39.8752, |
|
"eval_samples_per_second": 12.539, |
|
"eval_steps_per_second": 12.539, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.21259753477326698, |
|
"grad_norm": 2.803856611251831, |
|
"learning_rate": 1.1239730421177952e-06, |
|
"loss": 0.1466, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.21316295374872782, |
|
"grad_norm": 0.442586213350296, |
|
"learning_rate": 1.1068635946814569e-06, |
|
"loss": 0.2555, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.21372837272418863, |
|
"grad_norm": 2.499997615814209, |
|
"learning_rate": 1.0898627962302831e-06, |
|
"loss": 0.2612, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.21429379169964943, |
|
"grad_norm": 0.28989672660827637, |
|
"learning_rate": 1.072971345600513e-06, |
|
"loss": 0.2013, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.21485921067511027, |
|
"grad_norm": 8.835334777832031, |
|
"learning_rate": 1.056189937133522e-06, |
|
"loss": 0.2094, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.21542462965057108, |
|
"grad_norm": 4.539586544036865, |
|
"learning_rate": 1.0395192606472822e-06, |
|
"loss": 0.1693, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.21599004862603188, |
|
"grad_norm": 2.8156323432922363, |
|
"learning_rate": 1.0229600014080101e-06, |
|
"loss": 0.1936, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.21655546760149272, |
|
"grad_norm": 1.8551064729690552, |
|
"learning_rate": 1.006512840101995e-06, |
|
"loss": 0.1604, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.21712088657695353, |
|
"grad_norm": 10.510647773742676, |
|
"learning_rate": 9.90178452807619e-07, |
|
"loss": 0.2973, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.21768630555241433, |
|
"grad_norm": 0.41059374809265137, |
|
"learning_rate": 9.739575109675674e-07, |
|
"loss": 0.1143, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.21825172452787517, |
|
"grad_norm": 3.6061229705810547, |
|
"learning_rate": 9.578506813612243e-07, |
|
"loss": 0.2298, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.21881714350333598, |
|
"grad_norm": 5.259402751922607, |
|
"learning_rate": 9.418586260772695e-07, |
|
"loss": 0.2009, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.21938256247879678, |
|
"grad_norm": 3.3045120239257812, |
|
"learning_rate": 9.259820024864594e-07, |
|
"loss": 0.2024, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.21994798145425762, |
|
"grad_norm": 2.2371156215667725, |
|
"learning_rate": 9.102214632146059e-07, |
|
"loss": 0.6061, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.22051340042971843, |
|
"grad_norm": 4.4306416511535645, |
|
"learning_rate": 8.94577656115746e-07, |
|
"loss": 0.2206, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.22051340042971843, |
|
"eval_loss": 0.33228054642677307, |
|
"eval_runtime": 39.785, |
|
"eval_samples_per_second": 12.568, |
|
"eval_steps_per_second": 12.568, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.22107881940517923, |
|
"grad_norm": 0.37068045139312744, |
|
"learning_rate": 8.790512242455198e-07, |
|
"loss": 0.2174, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.22164423838064004, |
|
"grad_norm": 4.47023344039917, |
|
"learning_rate": 8.636428058347274e-07, |
|
"loss": 0.3149, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.22220965735610088, |
|
"grad_norm": 7.323469638824463, |
|
"learning_rate": 8.483530342630993e-07, |
|
"loss": 0.2612, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.22277507633156168, |
|
"grad_norm": 0.32100722193717957, |
|
"learning_rate": 8.331825380332599e-07, |
|
"loss": 0.2203, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.2233404953070225, |
|
"grad_norm": 2.3755152225494385, |
|
"learning_rate": 8.181319407448884e-07, |
|
"loss": 0.2012, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.22390591428248333, |
|
"grad_norm": 9.208174705505371, |
|
"learning_rate": 8.032018610690914e-07, |
|
"loss": 0.2391, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.22447133325794413, |
|
"grad_norm": 0.4264732897281647, |
|
"learning_rate": 7.883929127229665e-07, |
|
"loss": 0.2285, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.22503675223340494, |
|
"grad_norm": 3.967883586883545, |
|
"learning_rate": 7.737057044443793e-07, |
|
"loss": 0.2079, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.22560217120886578, |
|
"grad_norm": 0.47608694434165955, |
|
"learning_rate": 7.591408399669337e-07, |
|
"loss": 0.1906, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.22616759018432658, |
|
"grad_norm": 0.6385930776596069, |
|
"learning_rate": 7.446989179951632e-07, |
|
"loss": 0.1014, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.2267330091597874, |
|
"grad_norm": 2.06697154045105, |
|
"learning_rate": 7.303805321799146e-07, |
|
"loss": 0.2238, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.22729842813524823, |
|
"grad_norm": 1.6443243026733398, |
|
"learning_rate": 7.161862710939476e-07, |
|
"loss": 0.1686, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.22786384711070903, |
|
"grad_norm": 2.0701982975006104, |
|
"learning_rate": 7.021167182077403e-07, |
|
"loss": 0.2322, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.22842926608616984, |
|
"grad_norm": 0.38208675384521484, |
|
"learning_rate": 6.881724518655049e-07, |
|
"loss": 0.2221, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.22899468506163068, |
|
"grad_norm": 3.5175411701202393, |
|
"learning_rate": 6.743540452614152e-07, |
|
"loss": 0.182, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.22899468506163068, |
|
"eval_loss": 0.32821282744407654, |
|
"eval_runtime": 39.6922, |
|
"eval_samples_per_second": 12.597, |
|
"eval_steps_per_second": 12.597, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.22956010403709148, |
|
"grad_norm": 4.386037349700928, |
|
"learning_rate": 6.606620664160438e-07, |
|
"loss": 0.1743, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.2301255230125523, |
|
"grad_norm": 0.43842968344688416, |
|
"learning_rate": 6.470970781530139e-07, |
|
"loss": 0.1721, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.23069094198801313, |
|
"grad_norm": 0.4264715611934662, |
|
"learning_rate": 6.336596380758604e-07, |
|
"loss": 0.1568, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.23125636096347393, |
|
"grad_norm": 0.2072145938873291, |
|
"learning_rate": 6.203502985451152e-07, |
|
"loss": 0.2589, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.23182177993893474, |
|
"grad_norm": 0.47133326530456543, |
|
"learning_rate": 6.071696066555978e-07, |
|
"loss": 0.2778, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.23238719891439558, |
|
"grad_norm": 9.583172798156738, |
|
"learning_rate": 5.941181042139258e-07, |
|
"loss": 0.322, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.23295261788985638, |
|
"grad_norm": 2.681870460510254, |
|
"learning_rate": 5.811963277162466e-07, |
|
"loss": 0.0906, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.2335180368653172, |
|
"grad_norm": 1.2123790979385376, |
|
"learning_rate": 5.684048083261789e-07, |
|
"loss": 0.1341, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.23408345584077803, |
|
"grad_norm": 0.409408837556839, |
|
"learning_rate": 5.557440718529848e-07, |
|
"loss": 0.0791, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.23464887481623883, |
|
"grad_norm": 5.215662002563477, |
|
"learning_rate": 5.432146387299522e-07, |
|
"loss": 0.2075, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.23521429379169964, |
|
"grad_norm": 10.576168060302734, |
|
"learning_rate": 5.308170239930022e-07, |
|
"loss": 0.2556, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.23577971276716048, |
|
"grad_norm": 0.5010024309158325, |
|
"learning_rate": 5.185517372595187e-07, |
|
"loss": 0.1953, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.23634513174262128, |
|
"grad_norm": 4.521603107452393, |
|
"learning_rate": 5.064192827073995e-07, |
|
"loss": 0.1843, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.2369105507180821, |
|
"grad_norm": 6.856910705566406, |
|
"learning_rate": 4.944201590543308e-07, |
|
"loss": 0.1933, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.23747596969354293, |
|
"grad_norm": 3.1912882328033447, |
|
"learning_rate": 4.825548595372898e-07, |
|
"loss": 0.1511, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.23747596969354293, |
|
"eval_loss": 0.32930639386177063, |
|
"eval_runtime": 39.7678, |
|
"eval_samples_per_second": 12.573, |
|
"eval_steps_per_second": 12.573, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.23804138866900373, |
|
"grad_norm": 3.913271188735962, |
|
"learning_rate": 4.7082387189226646e-07, |
|
"loss": 0.143, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.23860680764446454, |
|
"grad_norm": 7.505680084228516, |
|
"learning_rate": 4.5922767833421454e-07, |
|
"loss": 0.2034, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.23917222661992538, |
|
"grad_norm": 2.7881624698638916, |
|
"learning_rate": 4.477667555372326e-07, |
|
"loss": 0.2107, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.23973764559538618, |
|
"grad_norm": 8.819357872009277, |
|
"learning_rate": 4.364415746149678e-07, |
|
"loss": 0.2098, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.240303064570847, |
|
"grad_norm": 10.310869216918945, |
|
"learning_rate": 4.2525260110124964e-07, |
|
"loss": 0.1681, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.24086848354630783, |
|
"grad_norm": 0.3812723457813263, |
|
"learning_rate": 4.1420029493095623e-07, |
|
"loss": 0.1556, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.24143390252176863, |
|
"grad_norm": 0.6693885922431946, |
|
"learning_rate": 4.032851104211036e-07, |
|
"loss": 0.2379, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.24199932149722944, |
|
"grad_norm": 3.099388360977173, |
|
"learning_rate": 3.925074962521762e-07, |
|
"loss": 0.2339, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.24256474047269028, |
|
"grad_norm": 4.581945419311523, |
|
"learning_rate": 3.818678954496787e-07, |
|
"loss": 0.1822, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.24313015944815108, |
|
"grad_norm": 3.103391647338867, |
|
"learning_rate": 3.713667453659287e-07, |
|
"loss": 0.1837, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.2436955784236119, |
|
"grad_norm": 1.2889468669891357, |
|
"learning_rate": 3.6100447766207473e-07, |
|
"loss": 0.1532, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.2442609973990727, |
|
"grad_norm": 0.22819407284259796, |
|
"learning_rate": 3.5078151829035693e-07, |
|
"loss": 0.1405, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.24482641637453353, |
|
"grad_norm": 6.73429012298584, |
|
"learning_rate": 3.4069828747659405e-07, |
|
"loss": 0.2636, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.24539183534999434, |
|
"grad_norm": 2.3190746307373047, |
|
"learning_rate": 3.3075519970291144e-07, |
|
"loss": 0.3083, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.24595725432545515, |
|
"grad_norm": 0.4830774664878845, |
|
"learning_rate": 3.209526636907036e-07, |
|
"loss": 0.1964, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.24595725432545515, |
|
"eval_loss": 0.32908380031585693, |
|
"eval_runtime": 39.973, |
|
"eval_samples_per_second": 12.508, |
|
"eval_steps_per_second": 12.508, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.24652267330091598, |
|
"grad_norm": 2.5670671463012695, |
|
"learning_rate": 3.1129108238383095e-07, |
|
"loss": 0.2492, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.2470880922763768, |
|
"grad_norm": 3.091651678085327, |
|
"learning_rate": 3.017708529320604e-07, |
|
"loss": 0.25, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.2476535112518376, |
|
"grad_norm": 7.4671478271484375, |
|
"learning_rate": 2.923923666747357e-07, |
|
"loss": 0.2727, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.24821893022729843, |
|
"grad_norm": 1.166784644126892, |
|
"learning_rate": 2.8315600912469477e-07, |
|
"loss": 0.1832, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.24878434920275924, |
|
"grad_norm": 0.42220553755760193, |
|
"learning_rate": 2.740621599524189e-07, |
|
"loss": 0.0977, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.24934976817822005, |
|
"grad_norm": 0.39072978496551514, |
|
"learning_rate": 2.651111929704303e-07, |
|
"loss": 0.2116, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.24991518715368088, |
|
"grad_norm": 3.5217363834381104, |
|
"learning_rate": 2.563034761179223e-07, |
|
"loss": 0.3073, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.2504806061291417, |
|
"grad_norm": 11.77111530303955, |
|
"learning_rate": 2.476393714456384e-07, |
|
"loss": 0.2344, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.2510460251046025, |
|
"grad_norm": 3.195404052734375, |
|
"learning_rate": 2.391192351009855e-07, |
|
"loss": 0.1463, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.2516114440800633, |
|
"grad_norm": 0.4377438724040985, |
|
"learning_rate": 2.3074341731339837e-07, |
|
"loss": 0.2104, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.25217686305552417, |
|
"grad_norm": 0.9128416180610657, |
|
"learning_rate": 2.225122623799407e-07, |
|
"loss": 0.0992, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.252742282030985, |
|
"grad_norm": 0.349115788936615, |
|
"learning_rate": 2.1442610865115135e-07, |
|
"loss": 0.1147, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.2533077010064458, |
|
"grad_norm": 2.0376551151275635, |
|
"learning_rate": 2.0648528851714077e-07, |
|
"loss": 0.1576, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.2538731199819066, |
|
"grad_norm": 0.2783850133419037, |
|
"learning_rate": 1.9869012839392064e-07, |
|
"loss": 0.1495, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.2544385389573674, |
|
"grad_norm": 0.7911704778671265, |
|
"learning_rate": 1.9104094870999264e-07, |
|
"loss": 0.1709, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.2544385389573674, |
|
"eval_loss": 0.33011871576309204, |
|
"eval_runtime": 39.8147, |
|
"eval_samples_per_second": 12.558, |
|
"eval_steps_per_second": 12.558, |
|
"step": 4500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 150, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.5190440709750784e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|