|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 555, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.018026137899954935, |
|
"grad_norm": 21.723926544189453, |
|
"learning_rate": 4.981949458483755e-05, |
|
"loss": 0.8104, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03605227579990987, |
|
"grad_norm": 55.4254035949707, |
|
"learning_rate": 4.963898916967509e-05, |
|
"loss": 0.5178, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.054078413699864804, |
|
"grad_norm": 41.4888801574707, |
|
"learning_rate": 4.945848375451264e-05, |
|
"loss": 0.5092, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07210455159981974, |
|
"grad_norm": 25.77608299255371, |
|
"learning_rate": 4.927797833935018e-05, |
|
"loss": 0.5581, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09013068949977468, |
|
"grad_norm": 61.5268669128418, |
|
"learning_rate": 4.909747292418773e-05, |
|
"loss": 0.5132, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10815682739972961, |
|
"grad_norm": 18.218244552612305, |
|
"learning_rate": 4.891696750902527e-05, |
|
"loss": 0.4614, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12618296529968454, |
|
"grad_norm": 30.51982879638672, |
|
"learning_rate": 4.873646209386282e-05, |
|
"loss": 0.4648, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14420910319963948, |
|
"grad_norm": 20.707260131835938, |
|
"learning_rate": 4.855595667870036e-05, |
|
"loss": 0.4317, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16223524109959442, |
|
"grad_norm": 21.699357986450195, |
|
"learning_rate": 4.837545126353791e-05, |
|
"loss": 0.4226, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18026137899954936, |
|
"grad_norm": 51.80731201171875, |
|
"learning_rate": 4.819494584837546e-05, |
|
"loss": 0.5002, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19828751689950427, |
|
"grad_norm": 44.694942474365234, |
|
"learning_rate": 4.8014440433213e-05, |
|
"loss": 0.4933, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21631365479945922, |
|
"grad_norm": 22.704530715942383, |
|
"learning_rate": 4.783393501805055e-05, |
|
"loss": 0.4399, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23433979269941416, |
|
"grad_norm": 26.04544448852539, |
|
"learning_rate": 4.765342960288809e-05, |
|
"loss": 0.438, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25236593059936907, |
|
"grad_norm": 14.642335891723633, |
|
"learning_rate": 4.747292418772563e-05, |
|
"loss": 0.4591, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.270392068499324, |
|
"grad_norm": 16.13884162902832, |
|
"learning_rate": 4.7292418772563177e-05, |
|
"loss": 0.4546, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28841820639927895, |
|
"grad_norm": 29.33772087097168, |
|
"learning_rate": 4.711191335740072e-05, |
|
"loss": 0.4665, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3064443442992339, |
|
"grad_norm": 15.900433540344238, |
|
"learning_rate": 4.693140794223827e-05, |
|
"loss": 0.4703, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32447048219918884, |
|
"grad_norm": 17.246356964111328, |
|
"learning_rate": 4.675090252707581e-05, |
|
"loss": 0.426, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3424966200991438, |
|
"grad_norm": 25.457477569580078, |
|
"learning_rate": 4.657039711191336e-05, |
|
"loss": 0.4711, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3605227579990987, |
|
"grad_norm": 25.43963050842285, |
|
"learning_rate": 4.63898916967509e-05, |
|
"loss": 0.4547, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3785488958990536, |
|
"grad_norm": 32.49702072143555, |
|
"learning_rate": 4.620938628158845e-05, |
|
"loss": 0.4603, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.39657503379900855, |
|
"grad_norm": 52.78520965576172, |
|
"learning_rate": 4.602888086642599e-05, |
|
"loss": 0.4471, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4146011716989635, |
|
"grad_norm": 16.70362663269043, |
|
"learning_rate": 4.584837545126354e-05, |
|
"loss": 0.4393, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.43262730959891843, |
|
"grad_norm": 23.55246353149414, |
|
"learning_rate": 4.566787003610109e-05, |
|
"loss": 0.4256, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.45065344749887337, |
|
"grad_norm": 18.868804931640625, |
|
"learning_rate": 4.548736462093863e-05, |
|
"loss": 0.4324, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4686795853988283, |
|
"grad_norm": 19.074827194213867, |
|
"learning_rate": 4.530685920577618e-05, |
|
"loss": 0.4419, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.48670572329878325, |
|
"grad_norm": 29.232803344726562, |
|
"learning_rate": 4.5126353790613716e-05, |
|
"loss": 0.4179, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5047318611987381, |
|
"grad_norm": 17.780059814453125, |
|
"learning_rate": 4.494584837545127e-05, |
|
"loss": 0.3917, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5227579990986931, |
|
"grad_norm": 13.99399471282959, |
|
"learning_rate": 4.4765342960288806e-05, |
|
"loss": 0.4321, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.540784136998648, |
|
"grad_norm": 14.700942039489746, |
|
"learning_rate": 4.458483754512636e-05, |
|
"loss": 0.4163, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.558810274898603, |
|
"grad_norm": 17.43704605102539, |
|
"learning_rate": 4.44043321299639e-05, |
|
"loss": 0.4268, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5768364127985579, |
|
"grad_norm": 28.500301361083984, |
|
"learning_rate": 4.422382671480145e-05, |
|
"loss": 0.4229, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5948625506985128, |
|
"grad_norm": 36.14088439941406, |
|
"learning_rate": 4.404332129963899e-05, |
|
"loss": 0.4559, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6128886885984678, |
|
"grad_norm": 15.279230117797852, |
|
"learning_rate": 4.386281588447654e-05, |
|
"loss": 0.4067, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6309148264984227, |
|
"grad_norm": 21.569740295410156, |
|
"learning_rate": 4.368231046931408e-05, |
|
"loss": 0.4298, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6489409643983777, |
|
"grad_norm": 20.630115509033203, |
|
"learning_rate": 4.350180505415163e-05, |
|
"loss": 0.4173, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6669671022983326, |
|
"grad_norm": 15.515020370483398, |
|
"learning_rate": 4.332129963898917e-05, |
|
"loss": 0.4063, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6849932401982876, |
|
"grad_norm": 16.33180046081543, |
|
"learning_rate": 4.314079422382672e-05, |
|
"loss": 0.3651, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7030193780982424, |
|
"grad_norm": 16.77664566040039, |
|
"learning_rate": 4.296028880866426e-05, |
|
"loss": 0.3879, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7210455159981974, |
|
"grad_norm": 11.113824844360352, |
|
"learning_rate": 4.277978339350181e-05, |
|
"loss": 0.4205, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7390716538981523, |
|
"grad_norm": 17.835678100585938, |
|
"learning_rate": 4.259927797833935e-05, |
|
"loss": 0.4466, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7570977917981072, |
|
"grad_norm": 24.48379135131836, |
|
"learning_rate": 4.24187725631769e-05, |
|
"loss": 0.4084, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7751239296980622, |
|
"grad_norm": 29.183530807495117, |
|
"learning_rate": 4.223826714801444e-05, |
|
"loss": 0.3588, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7931500675980171, |
|
"grad_norm": 16.398386001586914, |
|
"learning_rate": 4.205776173285199e-05, |
|
"loss": 0.4594, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8111762054979721, |
|
"grad_norm": 24.11660385131836, |
|
"learning_rate": 4.187725631768953e-05, |
|
"loss": 0.376, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.829202343397927, |
|
"grad_norm": 21.480924606323242, |
|
"learning_rate": 4.169675090252708e-05, |
|
"loss": 0.4035, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.847228481297882, |
|
"grad_norm": 11.91422176361084, |
|
"learning_rate": 4.151624548736462e-05, |
|
"loss": 0.4074, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8652546191978369, |
|
"grad_norm": 17.17136001586914, |
|
"learning_rate": 4.1335740072202167e-05, |
|
"loss": 0.4259, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8832807570977917, |
|
"grad_norm": 15.024781227111816, |
|
"learning_rate": 4.115523465703972e-05, |
|
"loss": 0.413, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9013068949977467, |
|
"grad_norm": 15.877848625183105, |
|
"learning_rate": 4.0974729241877256e-05, |
|
"loss": 0.4013, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9193330328977016, |
|
"grad_norm": 20.711393356323242, |
|
"learning_rate": 4.079422382671481e-05, |
|
"loss": 0.3875, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9373591707976566, |
|
"grad_norm": 14.360701560974121, |
|
"learning_rate": 4.0613718411552346e-05, |
|
"loss": 0.4025, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9553853086976115, |
|
"grad_norm": 16.94271469116211, |
|
"learning_rate": 4.043321299638989e-05, |
|
"loss": 0.3971, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9734114465975665, |
|
"grad_norm": 23.218050003051758, |
|
"learning_rate": 4.0252707581227436e-05, |
|
"loss": 0.4305, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9914375844975214, |
|
"grad_norm": 25.439455032348633, |
|
"learning_rate": 4.007220216606498e-05, |
|
"loss": 0.3969, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_f1": 0.8410362694300518, |
|
"eval_loss": 0.36654186248779297, |
|
"eval_precision": 0.7994483845547675, |
|
"eval_recall": 0.8871884564932225, |
|
"eval_runtime": 77.9824, |
|
"eval_samples_per_second": 59.924, |
|
"eval_steps_per_second": 7.502, |
|
"step": 555 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2770, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.929257128088371e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|