|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.34371643394199786, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0034371643394199786, |
|
"grad_norm": 0.5334700345993042, |
|
"learning_rate": 1e-05, |
|
"loss": 2.5683, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0034371643394199786, |
|
"eval_loss": 2.636681318283081, |
|
"eval_runtime": 16.6995, |
|
"eval_samples_per_second": 29.342, |
|
"eval_steps_per_second": 29.342, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006874328678839957, |
|
"grad_norm": 0.5012644529342651, |
|
"learning_rate": 2e-05, |
|
"loss": 2.5063, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.010311493018259935, |
|
"grad_norm": 0.5937795639038086, |
|
"learning_rate": 3e-05, |
|
"loss": 2.6041, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.013748657357679914, |
|
"grad_norm": 0.44918254017829895, |
|
"learning_rate": 4e-05, |
|
"loss": 2.5385, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.017185821697099892, |
|
"grad_norm": 0.4823933243751526, |
|
"learning_rate": 5e-05, |
|
"loss": 2.5282, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02062298603651987, |
|
"grad_norm": 0.4484499990940094, |
|
"learning_rate": 6e-05, |
|
"loss": 2.6244, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02406015037593985, |
|
"grad_norm": 0.7811288833618164, |
|
"learning_rate": 7e-05, |
|
"loss": 2.669, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02749731471535983, |
|
"grad_norm": 0.6050478219985962, |
|
"learning_rate": 8e-05, |
|
"loss": 2.5652, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.030934479054779807, |
|
"grad_norm": 0.3714107275009155, |
|
"learning_rate": 9e-05, |
|
"loss": 2.5476, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.034371643394199784, |
|
"grad_norm": 0.48898109793663025, |
|
"learning_rate": 0.0001, |
|
"loss": 2.4549, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03780880773361976, |
|
"grad_norm": 0.5035548210144043, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 2.5455, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04124597207303974, |
|
"grad_norm": 0.3796736001968384, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 2.5758, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04468313641245972, |
|
"grad_norm": 0.7549750208854675, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 2.4608, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0481203007518797, |
|
"grad_norm": 0.4282947778701782, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 2.6564, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05155746509129968, |
|
"grad_norm": 0.33888301253318787, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 2.4809, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05499462943071966, |
|
"grad_norm": 0.30642518401145935, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 2.3737, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.058431793770139635, |
|
"grad_norm": 0.37518784403800964, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 2.4509, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06186895810955961, |
|
"grad_norm": 0.32034221291542053, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 2.4367, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0653061224489796, |
|
"grad_norm": 0.5690639019012451, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 2.3744, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06874328678839957, |
|
"grad_norm": 0.35108500719070435, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 2.3465, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07218045112781955, |
|
"grad_norm": 0.4233250021934509, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 2.4106, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07561761546723952, |
|
"grad_norm": 0.2957749664783478, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 2.387, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07905477980665951, |
|
"grad_norm": 0.4620259404182434, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 2.5992, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08249194414607948, |
|
"grad_norm": 0.33380621671676636, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 2.3997, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08592910848549946, |
|
"grad_norm": 0.47963470220565796, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 2.4815, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08936627282491943, |
|
"grad_norm": 0.7058689594268799, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 2.2882, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09280343716433942, |
|
"grad_norm": 0.31185293197631836, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 2.3354, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0962406015037594, |
|
"grad_norm": 0.7282019257545471, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 2.4109, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09967776584317938, |
|
"grad_norm": 0.41433587670326233, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 2.4216, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.10311493018259936, |
|
"grad_norm": 0.3362176716327667, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 2.4594, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10655209452201933, |
|
"grad_norm": 0.3251080811023712, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 2.3187, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.10998925886143932, |
|
"grad_norm": 0.2905067503452301, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 2.2976, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11342642320085929, |
|
"grad_norm": 0.6331003308296204, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 2.3578, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11686358754027927, |
|
"grad_norm": 0.4537586271762848, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 2.2121, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.12030075187969924, |
|
"grad_norm": 0.43433278799057007, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 2.3014, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12373791621911923, |
|
"grad_norm": 0.3546953797340393, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 2.2345, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.1271750805585392, |
|
"grad_norm": 0.40316927433013916, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.3044, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1306122448979592, |
|
"grad_norm": 0.3827572762966156, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 2.2661, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13404940923737915, |
|
"grad_norm": 0.3343763053417206, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 2.2766, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.13748657357679914, |
|
"grad_norm": 0.2966914772987366, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 2.3064, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14092373791621912, |
|
"grad_norm": 0.30342233180999756, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 2.3846, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1443609022556391, |
|
"grad_norm": 0.2643699049949646, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 2.3035, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14779806659505906, |
|
"grad_norm": 0.35844695568084717, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 2.2482, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.15123523093447905, |
|
"grad_norm": 0.2792685627937317, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 2.2494, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15467239527389903, |
|
"grad_norm": 0.29278451204299927, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 2.3016, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15810955961331902, |
|
"grad_norm": 0.3153465688228607, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 2.2621, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.161546723952739, |
|
"grad_norm": 0.39804384112358093, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 2.2838, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16498388829215896, |
|
"grad_norm": 0.29693296551704407, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 2.2569, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.16842105263157894, |
|
"grad_norm": 0.4537481367588043, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 2.2107, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.17185821697099893, |
|
"grad_norm": 0.2757551670074463, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 2.2964, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17185821697099893, |
|
"eval_loss": 2.3215746879577637, |
|
"eval_runtime": 16.7462, |
|
"eval_samples_per_second": 29.26, |
|
"eval_steps_per_second": 29.26, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1752953813104189, |
|
"grad_norm": 0.3440859317779541, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 2.2643, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.17873254564983887, |
|
"grad_norm": 0.3143952488899231, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 2.3741, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.18216970998925885, |
|
"grad_norm": 0.5445249676704407, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 2.3737, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18560687432867884, |
|
"grad_norm": 0.29417288303375244, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 2.2626, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18904403866809882, |
|
"grad_norm": 0.2821301519870758, |
|
"learning_rate": 5e-05, |
|
"loss": 2.2226, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1924812030075188, |
|
"grad_norm": 0.5049765706062317, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 2.2056, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.19591836734693877, |
|
"grad_norm": 0.2741824984550476, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 2.0916, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19935553168635875, |
|
"grad_norm": 0.2932296097278595, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 2.1948, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.20279269602577873, |
|
"grad_norm": 0.2703547477722168, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 2.2377, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.20622986036519872, |
|
"grad_norm": 0.5145007371902466, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 2.2336, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.20966702470461868, |
|
"grad_norm": 0.3122769594192505, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 2.1879, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.21310418904403866, |
|
"grad_norm": 0.2763988673686981, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 2.2149, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21654135338345865, |
|
"grad_norm": 0.31647786498069763, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 2.1863, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.21997851772287863, |
|
"grad_norm": 0.5407307744026184, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 2.2516, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.22341568206229862, |
|
"grad_norm": 0.31190451979637146, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 2.2059, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22685284640171857, |
|
"grad_norm": 0.3045297861099243, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 2.2161, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.23029001074113856, |
|
"grad_norm": 0.3111981153488159, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 2.2722, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.23372717508055854, |
|
"grad_norm": 0.5072479844093323, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 2.1672, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.23716433941997853, |
|
"grad_norm": 0.4372091293334961, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 2.3158, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.24060150375939848, |
|
"grad_norm": 0.28215134143829346, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 2.2365, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.24403866809881847, |
|
"grad_norm": 0.3402535915374756, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 2.2135, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.24747583243823845, |
|
"grad_norm": 0.46998509764671326, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 2.3625, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.25091299677765844, |
|
"grad_norm": 0.2795167565345764, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 2.1809, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.2543501611170784, |
|
"grad_norm": 0.37644481658935547, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 2.2632, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2577873254564984, |
|
"grad_norm": 0.32024654746055603, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 2.2528, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2612244897959184, |
|
"grad_norm": 0.31380581855773926, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 2.2103, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2646616541353383, |
|
"grad_norm": 0.35320228338241577, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 2.2328, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2680988184747583, |
|
"grad_norm": 0.3124861419200897, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 2.2054, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2715359828141783, |
|
"grad_norm": 0.4113110899925232, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 2.0905, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2749731471535983, |
|
"grad_norm": 0.47795024514198303, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 2.3201, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.27841031149301826, |
|
"grad_norm": 0.29258468747138977, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 2.2022, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.28184747583243824, |
|
"grad_norm": 0.28537437319755554, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 2.2611, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.28528464017185823, |
|
"grad_norm": 0.3414091467857361, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 2.2069, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2887218045112782, |
|
"grad_norm": 0.39266514778137207, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 2.2485, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2921589688506982, |
|
"grad_norm": 0.5153230428695679, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 2.1257, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2955961331901181, |
|
"grad_norm": 0.31598207354545593, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 2.1899, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2990332975295381, |
|
"grad_norm": 0.3173138201236725, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 2.209, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3024704618689581, |
|
"grad_norm": 0.28669506311416626, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 2.1869, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3059076262083781, |
|
"grad_norm": 0.31095296144485474, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 2.164, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.30934479054779807, |
|
"grad_norm": 0.27381399273872375, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 2.202, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.31278195488721805, |
|
"grad_norm": 0.4227629601955414, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 2.3388, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.31621911922663803, |
|
"grad_norm": 0.4945060908794403, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 2.1518, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.319656283566058, |
|
"grad_norm": 0.3266584873199463, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 2.3398, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.323093447905478, |
|
"grad_norm": 0.2841348648071289, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 2.1788, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.32653061224489793, |
|
"grad_norm": 0.2800915539264679, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 2.121, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3299677765843179, |
|
"grad_norm": 0.3364481031894684, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 2.2739, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3334049409237379, |
|
"grad_norm": 0.5338711738586426, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 2.2574, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3368421052631579, |
|
"grad_norm": 0.29511675238609314, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 2.1717, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.34027926960257787, |
|
"grad_norm": 0.31296125054359436, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 2.1663, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.34371643394199786, |
|
"grad_norm": 0.2975694537162781, |
|
"learning_rate": 0.0, |
|
"loss": 2.1862, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.34371643394199786, |
|
"eval_loss": 2.2586705684661865, |
|
"eval_runtime": 16.6976, |
|
"eval_samples_per_second": 29.345, |
|
"eval_steps_per_second": 29.345, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 25, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2162580270612480.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|