|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.991437584497522, |
|
"eval_steps": 500, |
|
"global_step": 2770, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.018026137899954935, |
|
"grad_norm": 21.723926544189453, |
|
"learning_rate": 4.981949458483755e-05, |
|
"loss": 0.8104, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03605227579990987, |
|
"grad_norm": 55.4254035949707, |
|
"learning_rate": 4.963898916967509e-05, |
|
"loss": 0.5178, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.054078413699864804, |
|
"grad_norm": 41.4888801574707, |
|
"learning_rate": 4.945848375451264e-05, |
|
"loss": 0.5092, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07210455159981974, |
|
"grad_norm": 25.77608299255371, |
|
"learning_rate": 4.927797833935018e-05, |
|
"loss": 0.5581, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09013068949977468, |
|
"grad_norm": 61.5268669128418, |
|
"learning_rate": 4.909747292418773e-05, |
|
"loss": 0.5132, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10815682739972961, |
|
"grad_norm": 18.218244552612305, |
|
"learning_rate": 4.891696750902527e-05, |
|
"loss": 0.4614, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12618296529968454, |
|
"grad_norm": 30.51982879638672, |
|
"learning_rate": 4.873646209386282e-05, |
|
"loss": 0.4648, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14420910319963948, |
|
"grad_norm": 20.707260131835938, |
|
"learning_rate": 4.855595667870036e-05, |
|
"loss": 0.4317, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16223524109959442, |
|
"grad_norm": 21.699357986450195, |
|
"learning_rate": 4.837545126353791e-05, |
|
"loss": 0.4226, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18026137899954936, |
|
"grad_norm": 51.80731201171875, |
|
"learning_rate": 4.819494584837546e-05, |
|
"loss": 0.5002, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19828751689950427, |
|
"grad_norm": 44.694942474365234, |
|
"learning_rate": 4.8014440433213e-05, |
|
"loss": 0.4933, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21631365479945922, |
|
"grad_norm": 22.704530715942383, |
|
"learning_rate": 4.783393501805055e-05, |
|
"loss": 0.4399, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23433979269941416, |
|
"grad_norm": 26.04544448852539, |
|
"learning_rate": 4.765342960288809e-05, |
|
"loss": 0.438, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25236593059936907, |
|
"grad_norm": 14.642335891723633, |
|
"learning_rate": 4.747292418772563e-05, |
|
"loss": 0.4591, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.270392068499324, |
|
"grad_norm": 16.13884162902832, |
|
"learning_rate": 4.7292418772563177e-05, |
|
"loss": 0.4546, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28841820639927895, |
|
"grad_norm": 29.33772087097168, |
|
"learning_rate": 4.711191335740072e-05, |
|
"loss": 0.4665, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3064443442992339, |
|
"grad_norm": 15.900433540344238, |
|
"learning_rate": 4.693140794223827e-05, |
|
"loss": 0.4703, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32447048219918884, |
|
"grad_norm": 17.246356964111328, |
|
"learning_rate": 4.675090252707581e-05, |
|
"loss": 0.426, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3424966200991438, |
|
"grad_norm": 25.457477569580078, |
|
"learning_rate": 4.657039711191336e-05, |
|
"loss": 0.4711, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3605227579990987, |
|
"grad_norm": 25.43963050842285, |
|
"learning_rate": 4.63898916967509e-05, |
|
"loss": 0.4547, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3785488958990536, |
|
"grad_norm": 32.49702072143555, |
|
"learning_rate": 4.620938628158845e-05, |
|
"loss": 0.4603, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.39657503379900855, |
|
"grad_norm": 52.78520965576172, |
|
"learning_rate": 4.602888086642599e-05, |
|
"loss": 0.4471, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4146011716989635, |
|
"grad_norm": 16.70362663269043, |
|
"learning_rate": 4.584837545126354e-05, |
|
"loss": 0.4393, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.43262730959891843, |
|
"grad_norm": 23.55246353149414, |
|
"learning_rate": 4.566787003610109e-05, |
|
"loss": 0.4256, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.45065344749887337, |
|
"grad_norm": 18.868804931640625, |
|
"learning_rate": 4.548736462093863e-05, |
|
"loss": 0.4324, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4686795853988283, |
|
"grad_norm": 19.074827194213867, |
|
"learning_rate": 4.530685920577618e-05, |
|
"loss": 0.4419, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.48670572329878325, |
|
"grad_norm": 29.232803344726562, |
|
"learning_rate": 4.5126353790613716e-05, |
|
"loss": 0.4179, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5047318611987381, |
|
"grad_norm": 17.780059814453125, |
|
"learning_rate": 4.494584837545127e-05, |
|
"loss": 0.3917, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5227579990986931, |
|
"grad_norm": 13.99399471282959, |
|
"learning_rate": 4.4765342960288806e-05, |
|
"loss": 0.4321, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.540784136998648, |
|
"grad_norm": 14.700942039489746, |
|
"learning_rate": 4.458483754512636e-05, |
|
"loss": 0.4163, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.558810274898603, |
|
"grad_norm": 17.43704605102539, |
|
"learning_rate": 4.44043321299639e-05, |
|
"loss": 0.4268, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5768364127985579, |
|
"grad_norm": 28.500301361083984, |
|
"learning_rate": 4.422382671480145e-05, |
|
"loss": 0.4229, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5948625506985128, |
|
"grad_norm": 36.14088439941406, |
|
"learning_rate": 4.404332129963899e-05, |
|
"loss": 0.4559, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6128886885984678, |
|
"grad_norm": 15.279230117797852, |
|
"learning_rate": 4.386281588447654e-05, |
|
"loss": 0.4067, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6309148264984227, |
|
"grad_norm": 21.569740295410156, |
|
"learning_rate": 4.368231046931408e-05, |
|
"loss": 0.4298, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6489409643983777, |
|
"grad_norm": 20.630115509033203, |
|
"learning_rate": 4.350180505415163e-05, |
|
"loss": 0.4173, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6669671022983326, |
|
"grad_norm": 15.515020370483398, |
|
"learning_rate": 4.332129963898917e-05, |
|
"loss": 0.4063, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6849932401982876, |
|
"grad_norm": 16.33180046081543, |
|
"learning_rate": 4.314079422382672e-05, |
|
"loss": 0.3651, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7030193780982424, |
|
"grad_norm": 16.77664566040039, |
|
"learning_rate": 4.296028880866426e-05, |
|
"loss": 0.3879, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7210455159981974, |
|
"grad_norm": 11.113824844360352, |
|
"learning_rate": 4.277978339350181e-05, |
|
"loss": 0.4205, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7390716538981523, |
|
"grad_norm": 17.835678100585938, |
|
"learning_rate": 4.259927797833935e-05, |
|
"loss": 0.4466, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7570977917981072, |
|
"grad_norm": 24.48379135131836, |
|
"learning_rate": 4.24187725631769e-05, |
|
"loss": 0.4084, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7751239296980622, |
|
"grad_norm": 29.183530807495117, |
|
"learning_rate": 4.223826714801444e-05, |
|
"loss": 0.3588, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7931500675980171, |
|
"grad_norm": 16.398386001586914, |
|
"learning_rate": 4.205776173285199e-05, |
|
"loss": 0.4594, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8111762054979721, |
|
"grad_norm": 24.11660385131836, |
|
"learning_rate": 4.187725631768953e-05, |
|
"loss": 0.376, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.829202343397927, |
|
"grad_norm": 21.480924606323242, |
|
"learning_rate": 4.169675090252708e-05, |
|
"loss": 0.4035, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.847228481297882, |
|
"grad_norm": 11.91422176361084, |
|
"learning_rate": 4.151624548736462e-05, |
|
"loss": 0.4074, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8652546191978369, |
|
"grad_norm": 17.17136001586914, |
|
"learning_rate": 4.1335740072202167e-05, |
|
"loss": 0.4259, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8832807570977917, |
|
"grad_norm": 15.024781227111816, |
|
"learning_rate": 4.115523465703972e-05, |
|
"loss": 0.413, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9013068949977467, |
|
"grad_norm": 15.877848625183105, |
|
"learning_rate": 4.0974729241877256e-05, |
|
"loss": 0.4013, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9193330328977016, |
|
"grad_norm": 20.711393356323242, |
|
"learning_rate": 4.079422382671481e-05, |
|
"loss": 0.3875, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9373591707976566, |
|
"grad_norm": 14.360701560974121, |
|
"learning_rate": 4.0613718411552346e-05, |
|
"loss": 0.4025, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9553853086976115, |
|
"grad_norm": 16.94271469116211, |
|
"learning_rate": 4.043321299638989e-05, |
|
"loss": 0.3971, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9734114465975665, |
|
"grad_norm": 23.218050003051758, |
|
"learning_rate": 4.0252707581227436e-05, |
|
"loss": 0.4305, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9914375844975214, |
|
"grad_norm": 25.439455032348633, |
|
"learning_rate": 4.007220216606498e-05, |
|
"loss": 0.3969, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_f1": 0.8410362694300518, |
|
"eval_loss": 0.36654186248779297, |
|
"eval_precision": 0.7994483845547675, |
|
"eval_recall": 0.8871884564932225, |
|
"eval_runtime": 77.9824, |
|
"eval_samples_per_second": 59.924, |
|
"eval_steps_per_second": 7.502, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.0090130689499774, |
|
"grad_norm": 23.230247497558594, |
|
"learning_rate": 3.989169675090253e-05, |
|
"loss": 0.3393, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.0270392068499323, |
|
"grad_norm": 20.747711181640625, |
|
"learning_rate": 3.971119133574007e-05, |
|
"loss": 0.2951, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.0450653447498874, |
|
"grad_norm": 16.3277645111084, |
|
"learning_rate": 3.953068592057762e-05, |
|
"loss": 0.3178, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0630914826498423, |
|
"grad_norm": 27.63875389099121, |
|
"learning_rate": 3.935018050541516e-05, |
|
"loss": 0.3043, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0811176205497972, |
|
"grad_norm": 24.972593307495117, |
|
"learning_rate": 3.916967509025271e-05, |
|
"loss": 0.3521, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.099143758449752, |
|
"grad_norm": 22.260204315185547, |
|
"learning_rate": 3.898916967509025e-05, |
|
"loss": 0.2859, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.117169896349707, |
|
"grad_norm": 18.97205924987793, |
|
"learning_rate": 3.88086642599278e-05, |
|
"loss": 0.3159, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.135196034249662, |
|
"grad_norm": 25.5858097076416, |
|
"learning_rate": 3.862815884476535e-05, |
|
"loss": 0.2881, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.153222172149617, |
|
"grad_norm": 16.255144119262695, |
|
"learning_rate": 3.844765342960289e-05, |
|
"loss": 0.356, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.1712483100495719, |
|
"grad_norm": 16.83954429626465, |
|
"learning_rate": 3.826714801444044e-05, |
|
"loss": 0.2792, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.1892744479495267, |
|
"grad_norm": 13.091219902038574, |
|
"learning_rate": 3.8086642599277976e-05, |
|
"loss": 0.3232, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.2073005858494819, |
|
"grad_norm": 17.03887367248535, |
|
"learning_rate": 3.790613718411553e-05, |
|
"loss": 0.3081, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.2253267237494367, |
|
"grad_norm": 24.6894588470459, |
|
"learning_rate": 3.7725631768953066e-05, |
|
"loss": 0.29, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2433528616493916, |
|
"grad_norm": 23.55462646484375, |
|
"learning_rate": 3.754512635379062e-05, |
|
"loss": 0.3232, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.2613789995493465, |
|
"grad_norm": 44.3718147277832, |
|
"learning_rate": 3.7364620938628155e-05, |
|
"loss": 0.2878, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2794051374493014, |
|
"grad_norm": 21.339521408081055, |
|
"learning_rate": 3.718411552346571e-05, |
|
"loss": 0.2625, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2974312753492563, |
|
"grad_norm": 22.27944564819336, |
|
"learning_rate": 3.700361010830325e-05, |
|
"loss": 0.3261, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.3154574132492114, |
|
"grad_norm": 20.857757568359375, |
|
"learning_rate": 3.68231046931408e-05, |
|
"loss": 0.3194, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.3334835511491663, |
|
"grad_norm": 15.615569114685059, |
|
"learning_rate": 3.664259927797834e-05, |
|
"loss": 0.3099, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.3515096890491212, |
|
"grad_norm": 13.210846900939941, |
|
"learning_rate": 3.646209386281589e-05, |
|
"loss": 0.2961, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.3695358269490763, |
|
"grad_norm": 20.196102142333984, |
|
"learning_rate": 3.628158844765343e-05, |
|
"loss": 0.3099, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.3875619648490312, |
|
"grad_norm": 14.043251991271973, |
|
"learning_rate": 3.610108303249098e-05, |
|
"loss": 0.3548, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.405588102748986, |
|
"grad_norm": 31.82477569580078, |
|
"learning_rate": 3.592057761732852e-05, |
|
"loss": 0.3255, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.423614240648941, |
|
"grad_norm": 19.933557510375977, |
|
"learning_rate": 3.574007220216607e-05, |
|
"loss": 0.3404, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.4416403785488958, |
|
"grad_norm": 27.846755981445312, |
|
"learning_rate": 3.555956678700361e-05, |
|
"loss": 0.3054, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4596665164488507, |
|
"grad_norm": 18.607879638671875, |
|
"learning_rate": 3.537906137184116e-05, |
|
"loss": 0.2802, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.4776926543488058, |
|
"grad_norm": 29.801652908325195, |
|
"learning_rate": 3.51985559566787e-05, |
|
"loss": 0.2975, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.4957187922487607, |
|
"grad_norm": 21.80506134033203, |
|
"learning_rate": 3.5018050541516247e-05, |
|
"loss": 0.3251, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.5137449301487156, |
|
"grad_norm": 12.826874732971191, |
|
"learning_rate": 3.483754512635379e-05, |
|
"loss": 0.2943, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.5317710680486707, |
|
"grad_norm": 23.97177505493164, |
|
"learning_rate": 3.4657039711191336e-05, |
|
"loss": 0.3221, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.5497972059486256, |
|
"grad_norm": 20.326284408569336, |
|
"learning_rate": 3.447653429602888e-05, |
|
"loss": 0.3276, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.5678233438485805, |
|
"grad_norm": 19.27255630493164, |
|
"learning_rate": 3.4296028880866426e-05, |
|
"loss": 0.3338, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.5858494817485354, |
|
"grad_norm": 17.299917221069336, |
|
"learning_rate": 3.411552346570397e-05, |
|
"loss": 0.3177, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.6038756196484902, |
|
"grad_norm": 15.277181625366211, |
|
"learning_rate": 3.3935018050541516e-05, |
|
"loss": 0.2773, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.6219017575484451, |
|
"grad_norm": 27.945833206176758, |
|
"learning_rate": 3.375451263537907e-05, |
|
"loss": 0.3417, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.6399278954484002, |
|
"grad_norm": 25.78679847717285, |
|
"learning_rate": 3.3574007220216606e-05, |
|
"loss": 0.2959, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.6579540333483551, |
|
"grad_norm": 17.608028411865234, |
|
"learning_rate": 3.339350180505416e-05, |
|
"loss": 0.273, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.67598017124831, |
|
"grad_norm": 17.937299728393555, |
|
"learning_rate": 3.3212996389891696e-05, |
|
"loss": 0.3121, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.694006309148265, |
|
"grad_norm": 22.207386016845703, |
|
"learning_rate": 3.303249097472924e-05, |
|
"loss": 0.3389, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.71203244704822, |
|
"grad_norm": 40.351959228515625, |
|
"learning_rate": 3.2851985559566786e-05, |
|
"loss": 0.3138, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.7300585849481749, |
|
"grad_norm": 23.845033645629883, |
|
"learning_rate": 3.267148014440433e-05, |
|
"loss": 0.2865, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.7480847228481298, |
|
"grad_norm": 18.095985412597656, |
|
"learning_rate": 3.249097472924188e-05, |
|
"loss": 0.294, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.7661108607480847, |
|
"grad_norm": 25.775617599487305, |
|
"learning_rate": 3.231046931407942e-05, |
|
"loss": 0.2785, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.7841369986480395, |
|
"grad_norm": 20.0789737701416, |
|
"learning_rate": 3.212996389891697e-05, |
|
"loss": 0.3533, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.8021631365479944, |
|
"grad_norm": 26.862163543701172, |
|
"learning_rate": 3.194945848375451e-05, |
|
"loss": 0.2957, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.8201892744479495, |
|
"grad_norm": 24.20423698425293, |
|
"learning_rate": 3.176895306859206e-05, |
|
"loss": 0.3195, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.8382154123479044, |
|
"grad_norm": 25.36762809753418, |
|
"learning_rate": 3.15884476534296e-05, |
|
"loss": 0.2823, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.8562415502478595, |
|
"grad_norm": 37.54848098754883, |
|
"learning_rate": 3.140794223826715e-05, |
|
"loss": 0.3557, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.8742676881478144, |
|
"grad_norm": 24.26515769958496, |
|
"learning_rate": 3.12274368231047e-05, |
|
"loss": 0.3194, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.8922938260477693, |
|
"grad_norm": 22.1879940032959, |
|
"learning_rate": 3.104693140794224e-05, |
|
"loss": 0.2912, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.9103199639477242, |
|
"grad_norm": 21.703449249267578, |
|
"learning_rate": 3.086642599277979e-05, |
|
"loss": 0.2635, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.928346101847679, |
|
"grad_norm": 33.39324951171875, |
|
"learning_rate": 3.0685920577617325e-05, |
|
"loss": 0.2943, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.946372239747634, |
|
"grad_norm": 21.398008346557617, |
|
"learning_rate": 3.0505415162454877e-05, |
|
"loss": 0.353, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.9643983776475888, |
|
"grad_norm": 17.463472366333008, |
|
"learning_rate": 3.032490974729242e-05, |
|
"loss": 0.2934, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.982424515547544, |
|
"grad_norm": 32.96004104614258, |
|
"learning_rate": 3.0144404332129967e-05, |
|
"loss": 0.3206, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 22.864667892456055, |
|
"learning_rate": 2.996389891696751e-05, |
|
"loss": 0.2765, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_f1": 0.8415584415584415, |
|
"eval_loss": 0.3972894847393036, |
|
"eval_precision": 0.7748344370860927, |
|
"eval_recall": 0.9208570179274158, |
|
"eval_runtime": 67.5759, |
|
"eval_samples_per_second": 69.152, |
|
"eval_steps_per_second": 8.657, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.018026137899955, |
|
"grad_norm": 25.821582794189453, |
|
"learning_rate": 2.9783393501805057e-05, |
|
"loss": 0.1921, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.0360522757999098, |
|
"grad_norm": 29.275466918945312, |
|
"learning_rate": 2.9602888086642598e-05, |
|
"loss": 0.1621, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.0540784136998647, |
|
"grad_norm": 26.00608253479004, |
|
"learning_rate": 2.9422382671480147e-05, |
|
"loss": 0.1792, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.0721045515998195, |
|
"grad_norm": 44.50676345825195, |
|
"learning_rate": 2.924187725631769e-05, |
|
"loss": 0.1525, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.090130689499775, |
|
"grad_norm": 35.6057014465332, |
|
"learning_rate": 2.906137184115524e-05, |
|
"loss": 0.1869, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.1081568273997298, |
|
"grad_norm": 15.054738998413086, |
|
"learning_rate": 2.888086642599278e-05, |
|
"loss": 0.1549, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.1261829652996846, |
|
"grad_norm": 31.124298095703125, |
|
"learning_rate": 2.870036101083033e-05, |
|
"loss": 0.1712, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.1442091031996395, |
|
"grad_norm": 31.94968032836914, |
|
"learning_rate": 2.851985559566787e-05, |
|
"loss": 0.1562, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.1622352410995944, |
|
"grad_norm": 27.812400817871094, |
|
"learning_rate": 2.8339350180505413e-05, |
|
"loss": 0.1615, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.1802613789995493, |
|
"grad_norm": 20.641502380371094, |
|
"learning_rate": 2.815884476534296e-05, |
|
"loss": 0.1744, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.198287516899504, |
|
"grad_norm": 22.02704429626465, |
|
"learning_rate": 2.7978339350180506e-05, |
|
"loss": 0.1794, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.216313654799459, |
|
"grad_norm": 23.43486213684082, |
|
"learning_rate": 2.779783393501805e-05, |
|
"loss": 0.1821, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.234339792699414, |
|
"grad_norm": 11.662972450256348, |
|
"learning_rate": 2.7617328519855596e-05, |
|
"loss": 0.1842, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.2523659305993693, |
|
"grad_norm": 55.82278823852539, |
|
"learning_rate": 2.7436823104693144e-05, |
|
"loss": 0.1587, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.270392068499324, |
|
"grad_norm": 52.459190368652344, |
|
"learning_rate": 2.7256317689530686e-05, |
|
"loss": 0.1755, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.288418206399279, |
|
"grad_norm": 11.363420486450195, |
|
"learning_rate": 2.7075812274368234e-05, |
|
"loss": 0.1507, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.306444344299234, |
|
"grad_norm": 19.20196533203125, |
|
"learning_rate": 2.6895306859205776e-05, |
|
"loss": 0.1538, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.324470482199189, |
|
"grad_norm": 24.035587310791016, |
|
"learning_rate": 2.6714801444043324e-05, |
|
"loss": 0.1604, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.3424966200991437, |
|
"grad_norm": 25.760459899902344, |
|
"learning_rate": 2.6534296028880866e-05, |
|
"loss": 0.179, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.3605227579990986, |
|
"grad_norm": 38.57444381713867, |
|
"learning_rate": 2.6353790613718414e-05, |
|
"loss": 0.1668, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.3785488958990535, |
|
"grad_norm": 26.489524841308594, |
|
"learning_rate": 2.617328519855596e-05, |
|
"loss": 0.1531, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.3965750337990084, |
|
"grad_norm": 30.741003036499023, |
|
"learning_rate": 2.59927797833935e-05, |
|
"loss": 0.201, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.4146011716989637, |
|
"grad_norm": 13.293583869934082, |
|
"learning_rate": 2.581227436823105e-05, |
|
"loss": 0.1413, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.4326273095989186, |
|
"grad_norm": 38.17484664916992, |
|
"learning_rate": 2.563176895306859e-05, |
|
"loss": 0.1479, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.4506534474988735, |
|
"grad_norm": 26.58913230895996, |
|
"learning_rate": 2.545126353790614e-05, |
|
"loss": 0.162, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.4686795853988284, |
|
"grad_norm": 18.360992431640625, |
|
"learning_rate": 2.527075812274368e-05, |
|
"loss": 0.1938, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.4867057232987833, |
|
"grad_norm": 31.35187339782715, |
|
"learning_rate": 2.509025270758123e-05, |
|
"loss": 0.1895, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.504731861198738, |
|
"grad_norm": 23.93364715576172, |
|
"learning_rate": 2.4909747292418774e-05, |
|
"loss": 0.1983, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.522757999098693, |
|
"grad_norm": 48.125240325927734, |
|
"learning_rate": 2.472924187725632e-05, |
|
"loss": 0.2107, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.540784136998648, |
|
"grad_norm": 16.627723693847656, |
|
"learning_rate": 2.4548736462093864e-05, |
|
"loss": 0.1798, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.558810274898603, |
|
"grad_norm": 33.984134674072266, |
|
"learning_rate": 2.436823104693141e-05, |
|
"loss": 0.1903, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.576836412798558, |
|
"grad_norm": 41.849796295166016, |
|
"learning_rate": 2.4187725631768953e-05, |
|
"loss": 0.1883, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.5948625506985126, |
|
"grad_norm": 35.38132095336914, |
|
"learning_rate": 2.40072202166065e-05, |
|
"loss": 0.218, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.612888688598468, |
|
"grad_norm": 45.718360900878906, |
|
"learning_rate": 2.3826714801444043e-05, |
|
"loss": 0.1857, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.630914826498423, |
|
"grad_norm": 21.124282836914062, |
|
"learning_rate": 2.3646209386281588e-05, |
|
"loss": 0.1966, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.6489409643983777, |
|
"grad_norm": 51.59513854980469, |
|
"learning_rate": 2.3465703971119137e-05, |
|
"loss": 0.2052, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.6669671022983326, |
|
"grad_norm": 27.74148941040039, |
|
"learning_rate": 2.328519855595668e-05, |
|
"loss": 0.1972, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.6849932401982874, |
|
"grad_norm": 23.677188873291016, |
|
"learning_rate": 2.3104693140794227e-05, |
|
"loss": 0.1564, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.7030193780982423, |
|
"grad_norm": 37.59199523925781, |
|
"learning_rate": 2.292418772563177e-05, |
|
"loss": 0.1556, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.721045515998197, |
|
"grad_norm": 39.62023162841797, |
|
"learning_rate": 2.2743682310469316e-05, |
|
"loss": 0.181, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.7390716538981525, |
|
"grad_norm": 41.49861526489258, |
|
"learning_rate": 2.2563176895306858e-05, |
|
"loss": 0.1858, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.757097791798107, |
|
"grad_norm": 24.64577293395996, |
|
"learning_rate": 2.2382671480144403e-05, |
|
"loss": 0.1665, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.7751239296980623, |
|
"grad_norm": 19.250690460205078, |
|
"learning_rate": 2.220216606498195e-05, |
|
"loss": 0.1582, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.793150067598017, |
|
"grad_norm": 32.70537185668945, |
|
"learning_rate": 2.2021660649819496e-05, |
|
"loss": 0.1774, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.811176205497972, |
|
"grad_norm": 27.5251522064209, |
|
"learning_rate": 2.184115523465704e-05, |
|
"loss": 0.1877, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.829202343397927, |
|
"grad_norm": 34.118892669677734, |
|
"learning_rate": 2.1660649819494586e-05, |
|
"loss": 0.1763, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.847228481297882, |
|
"grad_norm": 23.93291473388672, |
|
"learning_rate": 2.148014440433213e-05, |
|
"loss": 0.1448, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.8652546191978367, |
|
"grad_norm": 35.88968276977539, |
|
"learning_rate": 2.1299638989169676e-05, |
|
"loss": 0.2251, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.8832807570977916, |
|
"grad_norm": 28.2044620513916, |
|
"learning_rate": 2.111913357400722e-05, |
|
"loss": 0.1503, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.901306894997747, |
|
"grad_norm": 36.51847457885742, |
|
"learning_rate": 2.0938628158844766e-05, |
|
"loss": 0.1471, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.9193330328977014, |
|
"grad_norm": 35.991207122802734, |
|
"learning_rate": 2.075812274368231e-05, |
|
"loss": 0.1822, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.9373591707976567, |
|
"grad_norm": 47.02556610107422, |
|
"learning_rate": 2.057761732851986e-05, |
|
"loss": 0.1727, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.9553853086976116, |
|
"grad_norm": 22.891300201416016, |
|
"learning_rate": 2.0397111913357404e-05, |
|
"loss": 0.1627, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.9734114465975665, |
|
"grad_norm": 47.76613998413086, |
|
"learning_rate": 2.0216606498194946e-05, |
|
"loss": 0.171, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.9914375844975214, |
|
"grad_norm": 16.816410064697266, |
|
"learning_rate": 2.003610108303249e-05, |
|
"loss": 0.1314, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_f1": 0.8327495621716288, |
|
"eval_loss": 0.5104337930679321, |
|
"eval_precision": 0.8338448049101271, |
|
"eval_recall": 0.8316571928290337, |
|
"eval_runtime": 66.4346, |
|
"eval_samples_per_second": 70.34, |
|
"eval_steps_per_second": 8.806, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 3.0090130689499777, |
|
"grad_norm": 9.651093482971191, |
|
"learning_rate": 1.9855595667870036e-05, |
|
"loss": 0.1416, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.0270392068499326, |
|
"grad_norm": 31.92531967163086, |
|
"learning_rate": 1.967509025270758e-05, |
|
"loss": 0.0754, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.0450653447498874, |
|
"grad_norm": 73.67928314208984, |
|
"learning_rate": 1.9494584837545125e-05, |
|
"loss": 0.106, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.0630914826498423, |
|
"grad_norm": 42.071495056152344, |
|
"learning_rate": 1.9314079422382674e-05, |
|
"loss": 0.0619, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.081117620549797, |
|
"grad_norm": 26.15340805053711, |
|
"learning_rate": 1.913357400722022e-05, |
|
"loss": 0.0452, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.099143758449752, |
|
"grad_norm": 47.46036148071289, |
|
"learning_rate": 1.8953068592057764e-05, |
|
"loss": 0.0808, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.117169896349707, |
|
"grad_norm": 3.8104429244995117, |
|
"learning_rate": 1.877256317689531e-05, |
|
"loss": 0.0727, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.135196034249662, |
|
"grad_norm": 74.3051528930664, |
|
"learning_rate": 1.8592057761732854e-05, |
|
"loss": 0.0954, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.1532221721496168, |
|
"grad_norm": 13.70536994934082, |
|
"learning_rate": 1.84115523465704e-05, |
|
"loss": 0.0712, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.171248310049572, |
|
"grad_norm": 39.88608169555664, |
|
"learning_rate": 1.8231046931407943e-05, |
|
"loss": 0.0745, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.189274447949527, |
|
"grad_norm": 5.359338760375977, |
|
"learning_rate": 1.805054151624549e-05, |
|
"loss": 0.0819, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.207300585849482, |
|
"grad_norm": 32.22220993041992, |
|
"learning_rate": 1.7870036101083033e-05, |
|
"loss": 0.0875, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.2253267237494367, |
|
"grad_norm": 38.817562103271484, |
|
"learning_rate": 1.768953068592058e-05, |
|
"loss": 0.0862, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.2433528616493916, |
|
"grad_norm": 47.403221130371094, |
|
"learning_rate": 1.7509025270758123e-05, |
|
"loss": 0.0531, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.2613789995493465, |
|
"grad_norm": 32.57699966430664, |
|
"learning_rate": 1.7328519855595668e-05, |
|
"loss": 0.0668, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.2794051374493014, |
|
"grad_norm": 19.743186950683594, |
|
"learning_rate": 1.7148014440433213e-05, |
|
"loss": 0.098, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.2974312753492563, |
|
"grad_norm": 18.176307678222656, |
|
"learning_rate": 1.6967509025270758e-05, |
|
"loss": 0.1096, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.315457413249211, |
|
"grad_norm": 8.426152229309082, |
|
"learning_rate": 1.6787003610108303e-05, |
|
"loss": 0.0548, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 3.3334835511491665, |
|
"grad_norm": 24.95146942138672, |
|
"learning_rate": 1.6606498194945848e-05, |
|
"loss": 0.0863, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.3515096890491214, |
|
"grad_norm": 18.36710548400879, |
|
"learning_rate": 1.6425992779783393e-05, |
|
"loss": 0.0642, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.3695358269490763, |
|
"grad_norm": 65.7655029296875, |
|
"learning_rate": 1.624548736462094e-05, |
|
"loss": 0.0848, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 3.387561964849031, |
|
"grad_norm": 91.28118896484375, |
|
"learning_rate": 1.6064981949458486e-05, |
|
"loss": 0.0963, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 3.405588102748986, |
|
"grad_norm": 22.324848175048828, |
|
"learning_rate": 1.588447653429603e-05, |
|
"loss": 0.0902, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 3.423614240648941, |
|
"grad_norm": 78.93708801269531, |
|
"learning_rate": 1.5703971119133576e-05, |
|
"loss": 0.0903, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.441640378548896, |
|
"grad_norm": 8.107336044311523, |
|
"learning_rate": 1.552346570397112e-05, |
|
"loss": 0.0569, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 3.4596665164488507, |
|
"grad_norm": 484.3660888671875, |
|
"learning_rate": 1.5342960288808663e-05, |
|
"loss": 0.0786, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 3.4776926543488056, |
|
"grad_norm": 60.93991470336914, |
|
"learning_rate": 1.516245487364621e-05, |
|
"loss": 0.1171, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 3.495718792248761, |
|
"grad_norm": 24.582143783569336, |
|
"learning_rate": 1.4981949458483754e-05, |
|
"loss": 0.0771, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 3.5137449301487154, |
|
"grad_norm": 18.43077850341797, |
|
"learning_rate": 1.4801444043321299e-05, |
|
"loss": 0.0894, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.5317710680486707, |
|
"grad_norm": 38.594322204589844, |
|
"learning_rate": 1.4620938628158846e-05, |
|
"loss": 0.0583, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 3.5497972059486256, |
|
"grad_norm": 20.9086856842041, |
|
"learning_rate": 1.444043321299639e-05, |
|
"loss": 0.0981, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 3.5678233438485805, |
|
"grad_norm": 37.58344268798828, |
|
"learning_rate": 1.4259927797833936e-05, |
|
"loss": 0.0848, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 3.5858494817485354, |
|
"grad_norm": 23.61933135986328, |
|
"learning_rate": 1.407942238267148e-05, |
|
"loss": 0.0889, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 3.6038756196484902, |
|
"grad_norm": 29.957996368408203, |
|
"learning_rate": 1.3898916967509026e-05, |
|
"loss": 0.0874, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.621901757548445, |
|
"grad_norm": 33.87687683105469, |
|
"learning_rate": 1.3718411552346572e-05, |
|
"loss": 0.0745, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 3.6399278954484, |
|
"grad_norm": 49.98701095581055, |
|
"learning_rate": 1.3537906137184117e-05, |
|
"loss": 0.0769, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 3.6579540333483553, |
|
"grad_norm": 55.11968994140625, |
|
"learning_rate": 1.3357400722021662e-05, |
|
"loss": 0.1061, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 3.67598017124831, |
|
"grad_norm": 35.04582595825195, |
|
"learning_rate": 1.3176895306859207e-05, |
|
"loss": 0.0534, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 3.694006309148265, |
|
"grad_norm": 27.872615814208984, |
|
"learning_rate": 1.299638989169675e-05, |
|
"loss": 0.0788, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.71203244704822, |
|
"grad_norm": 24.054401397705078, |
|
"learning_rate": 1.2815884476534295e-05, |
|
"loss": 0.0867, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 3.730058584948175, |
|
"grad_norm": 21.985633850097656, |
|
"learning_rate": 1.263537906137184e-05, |
|
"loss": 0.067, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 3.7480847228481298, |
|
"grad_norm": 35.386085510253906, |
|
"learning_rate": 1.2454873646209387e-05, |
|
"loss": 0.0698, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 3.7661108607480847, |
|
"grad_norm": 21.687231063842773, |
|
"learning_rate": 1.2274368231046932e-05, |
|
"loss": 0.0904, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 3.7841369986480395, |
|
"grad_norm": 24.54305076599121, |
|
"learning_rate": 1.2093862815884477e-05, |
|
"loss": 0.0881, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.8021631365479944, |
|
"grad_norm": 52.710487365722656, |
|
"learning_rate": 1.1913357400722022e-05, |
|
"loss": 0.0931, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 3.8201892744479498, |
|
"grad_norm": 36.47780990600586, |
|
"learning_rate": 1.1732851985559568e-05, |
|
"loss": 0.084, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 3.838215412347904, |
|
"grad_norm": 53.25602722167969, |
|
"learning_rate": 1.1552346570397113e-05, |
|
"loss": 0.082, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 3.8562415502478595, |
|
"grad_norm": 31.34427261352539, |
|
"learning_rate": 1.1371841155234658e-05, |
|
"loss": 0.1156, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 3.8742676881478144, |
|
"grad_norm": 26.31972885131836, |
|
"learning_rate": 1.1191335740072201e-05, |
|
"loss": 0.1051, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.8922938260477693, |
|
"grad_norm": 17.960121154785156, |
|
"learning_rate": 1.1010830324909748e-05, |
|
"loss": 0.1016, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 3.910319963947724, |
|
"grad_norm": 10.466907501220703, |
|
"learning_rate": 1.0830324909747293e-05, |
|
"loss": 0.0518, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 3.928346101847679, |
|
"grad_norm": 38.48418045043945, |
|
"learning_rate": 1.0649819494584838e-05, |
|
"loss": 0.0406, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 3.946372239747634, |
|
"grad_norm": 36.69453811645508, |
|
"learning_rate": 1.0469314079422383e-05, |
|
"loss": 0.0654, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 3.964398377647589, |
|
"grad_norm": 3.7297933101654053, |
|
"learning_rate": 1.028880866425993e-05, |
|
"loss": 0.0963, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.982424515547544, |
|
"grad_norm": 69.28231048583984, |
|
"learning_rate": 1.0108303249097473e-05, |
|
"loss": 0.0509, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 48.080718994140625, |
|
"learning_rate": 9.927797833935018e-06, |
|
"loss": 0.0575, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_f1": 0.8324720068906115, |
|
"eval_loss": 0.784024178981781, |
|
"eval_precision": 0.8201103097157404, |
|
"eval_recall": 0.845212068211631, |
|
"eval_runtime": 52.7119, |
|
"eval_samples_per_second": 88.652, |
|
"eval_steps_per_second": 11.098, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.018026137899955, |
|
"grad_norm": 25.119190216064453, |
|
"learning_rate": 9.747292418772563e-06, |
|
"loss": 0.0346, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 4.03605227579991, |
|
"grad_norm": 10.888751029968262, |
|
"learning_rate": 9.56678700361011e-06, |
|
"loss": 0.0373, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 4.054078413699865, |
|
"grad_norm": 40.75419616699219, |
|
"learning_rate": 9.386281588447654e-06, |
|
"loss": 0.0257, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.0721045515998195, |
|
"grad_norm": 42.57258987426758, |
|
"learning_rate": 9.2057761732852e-06, |
|
"loss": 0.032, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 4.090130689499775, |
|
"grad_norm": 49.555885314941406, |
|
"learning_rate": 9.025270758122744e-06, |
|
"loss": 0.0452, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 4.108156827399729, |
|
"grad_norm": 13.99950885772705, |
|
"learning_rate": 8.84476534296029e-06, |
|
"loss": 0.0271, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 4.126182965299685, |
|
"grad_norm": 31.079801559448242, |
|
"learning_rate": 8.664259927797834e-06, |
|
"loss": 0.0342, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 4.144209103199639, |
|
"grad_norm": 1.8287452459335327, |
|
"learning_rate": 8.483754512635379e-06, |
|
"loss": 0.0284, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.162235241099594, |
|
"grad_norm": 1.6889796257019043, |
|
"learning_rate": 8.303249097472924e-06, |
|
"loss": 0.0563, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 4.18026137899955, |
|
"grad_norm": 10.131366729736328, |
|
"learning_rate": 8.12274368231047e-06, |
|
"loss": 0.0317, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 4.198287516899504, |
|
"grad_norm": 6.962566375732422, |
|
"learning_rate": 7.942238267148016e-06, |
|
"loss": 0.0558, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 4.2163136547994595, |
|
"grad_norm": 6.933462142944336, |
|
"learning_rate": 7.76173285198556e-06, |
|
"loss": 0.025, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 4.234339792699414, |
|
"grad_norm": 1.254787802696228, |
|
"learning_rate": 7.581227436823105e-06, |
|
"loss": 0.0601, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 4.252365930599369, |
|
"grad_norm": 38.4481201171875, |
|
"learning_rate": 7.4007220216606496e-06, |
|
"loss": 0.0185, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 4.270392068499324, |
|
"grad_norm": 27.38968849182129, |
|
"learning_rate": 7.220216606498195e-06, |
|
"loss": 0.0186, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 4.288418206399279, |
|
"grad_norm": 47.91832733154297, |
|
"learning_rate": 7.03971119133574e-06, |
|
"loss": 0.0532, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 4.3064443442992335, |
|
"grad_norm": 23.574234008789062, |
|
"learning_rate": 6.859205776173286e-06, |
|
"loss": 0.0391, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 4.324470482199189, |
|
"grad_norm": 33.0970573425293, |
|
"learning_rate": 6.678700361010831e-06, |
|
"loss": 0.0254, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 4.342496620099144, |
|
"grad_norm": 58.73637008666992, |
|
"learning_rate": 6.498194945848375e-06, |
|
"loss": 0.0283, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 4.360522757999099, |
|
"grad_norm": 14.183826446533203, |
|
"learning_rate": 6.31768953068592e-06, |
|
"loss": 0.0321, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 4.378548895899054, |
|
"grad_norm": 21.14553451538086, |
|
"learning_rate": 6.137184115523466e-06, |
|
"loss": 0.0341, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 4.396575033799008, |
|
"grad_norm": 15.327287673950195, |
|
"learning_rate": 5.956678700361011e-06, |
|
"loss": 0.0697, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 4.414601171698964, |
|
"grad_norm": 35.40299606323242, |
|
"learning_rate": 5.776173285198557e-06, |
|
"loss": 0.0437, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 4.432627309598918, |
|
"grad_norm": 2.252593517303467, |
|
"learning_rate": 5.595667870036101e-06, |
|
"loss": 0.0249, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 4.4506534474988735, |
|
"grad_norm": 25.71234893798828, |
|
"learning_rate": 5.4151624548736465e-06, |
|
"loss": 0.0451, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 4.468679585398828, |
|
"grad_norm": 73.09535217285156, |
|
"learning_rate": 5.2346570397111915e-06, |
|
"loss": 0.0493, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 4.486705723298783, |
|
"grad_norm": 2.6368682384490967, |
|
"learning_rate": 5.054151624548736e-06, |
|
"loss": 0.0461, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 4.504731861198739, |
|
"grad_norm": 8.797612190246582, |
|
"learning_rate": 4.873646209386281e-06, |
|
"loss": 0.0293, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.522757999098693, |
|
"grad_norm": 2.3368911743164062, |
|
"learning_rate": 4.693140794223827e-06, |
|
"loss": 0.0224, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 4.540784136998648, |
|
"grad_norm": 59.15225601196289, |
|
"learning_rate": 4.512635379061372e-06, |
|
"loss": 0.0385, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 4.558810274898603, |
|
"grad_norm": 84.5032958984375, |
|
"learning_rate": 4.332129963898917e-06, |
|
"loss": 0.0271, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 4.576836412798558, |
|
"grad_norm": 0.8333727121353149, |
|
"learning_rate": 4.151624548736462e-06, |
|
"loss": 0.0653, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 4.594862550698513, |
|
"grad_norm": 115.62662506103516, |
|
"learning_rate": 3.971119133574008e-06, |
|
"loss": 0.0489, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 4.612888688598468, |
|
"grad_norm": 87.90824890136719, |
|
"learning_rate": 3.7906137184115523e-06, |
|
"loss": 0.0317, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 4.630914826498422, |
|
"grad_norm": 77.7380599975586, |
|
"learning_rate": 3.6101083032490977e-06, |
|
"loss": 0.063, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 4.648940964398378, |
|
"grad_norm": 38.220497131347656, |
|
"learning_rate": 3.429602888086643e-06, |
|
"loss": 0.0282, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 4.666967102298333, |
|
"grad_norm": 82.26947021484375, |
|
"learning_rate": 3.2490974729241876e-06, |
|
"loss": 0.0239, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 4.6849932401982874, |
|
"grad_norm": 50.12388610839844, |
|
"learning_rate": 3.068592057761733e-06, |
|
"loss": 0.0263, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 4.703019378098243, |
|
"grad_norm": 44.167198181152344, |
|
"learning_rate": 2.8880866425992783e-06, |
|
"loss": 0.0563, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 4.721045515998197, |
|
"grad_norm": 27.569866180419922, |
|
"learning_rate": 2.7075812274368233e-06, |
|
"loss": 0.0272, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 4.7390716538981525, |
|
"grad_norm": 6.827781677246094, |
|
"learning_rate": 2.527075812274368e-06, |
|
"loss": 0.0378, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 4.757097791798107, |
|
"grad_norm": 17.286375045776367, |
|
"learning_rate": 2.3465703971119136e-06, |
|
"loss": 0.0213, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 4.775123929698062, |
|
"grad_norm": 25.621318817138672, |
|
"learning_rate": 2.1660649819494585e-06, |
|
"loss": 0.0311, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 4.793150067598017, |
|
"grad_norm": 65.85664367675781, |
|
"learning_rate": 1.985559566787004e-06, |
|
"loss": 0.0419, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 4.811176205497972, |
|
"grad_norm": 36.74288558959961, |
|
"learning_rate": 1.8050541516245488e-06, |
|
"loss": 0.0212, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 4.829202343397927, |
|
"grad_norm": 39.1642951965332, |
|
"learning_rate": 1.6245487364620938e-06, |
|
"loss": 0.0321, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 4.847228481297882, |
|
"grad_norm": 10.89847183227539, |
|
"learning_rate": 1.4440433212996392e-06, |
|
"loss": 0.0343, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 4.865254619197837, |
|
"grad_norm": 65.18307495117188, |
|
"learning_rate": 1.263537906137184e-06, |
|
"loss": 0.039, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 4.883280757097792, |
|
"grad_norm": 0.6980013251304626, |
|
"learning_rate": 1.0830324909747293e-06, |
|
"loss": 0.0279, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 4.901306894997747, |
|
"grad_norm": 49.98525619506836, |
|
"learning_rate": 9.025270758122744e-07, |
|
"loss": 0.0415, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 4.919333032897701, |
|
"grad_norm": 63.398868560791016, |
|
"learning_rate": 7.220216606498196e-07, |
|
"loss": 0.0732, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 4.937359170797657, |
|
"grad_norm": 50.19089889526367, |
|
"learning_rate": 5.415162454873646e-07, |
|
"loss": 0.041, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 4.955385308697611, |
|
"grad_norm": 4.164758682250977, |
|
"learning_rate": 3.610108303249098e-07, |
|
"loss": 0.0237, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 4.9734114465975665, |
|
"grad_norm": 55.19854736328125, |
|
"learning_rate": 1.805054151624549e-07, |
|
"loss": 0.0419, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 4.991437584497522, |
|
"grad_norm": 45.117130279541016, |
|
"learning_rate": 0.0, |
|
"loss": 0.0272, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 4.991437584497522, |
|
"eval_f1": 0.8358019466779517, |
|
"eval_loss": 1.0835028886795044, |
|
"eval_precision": 0.8097580975809758, |
|
"eval_recall": 0.8635767380848273, |
|
"eval_runtime": 63.3058, |
|
"eval_samples_per_second": 73.816, |
|
"eval_steps_per_second": 9.241, |
|
"step": 2770 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2770, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.629766539300045e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|