|
{ |
|
"best_metric": 3.0250446796417236, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.05387931034482758, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005387931034482759, |
|
"grad_norm": 0.7922934889793396, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 3.2943, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0005387931034482759, |
|
"eval_loss": 3.5711169242858887, |
|
"eval_runtime": 411.9397, |
|
"eval_samples_per_second": 7.588, |
|
"eval_steps_per_second": 3.794, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0010775862068965517, |
|
"grad_norm": 0.8404221534729004, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 3.3738, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0016163793103448276, |
|
"grad_norm": 0.8657654523849487, |
|
"learning_rate": 1e-05, |
|
"loss": 3.3441, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0021551724137931034, |
|
"grad_norm": 0.8414548635482788, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 3.4101, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0026939655172413795, |
|
"grad_norm": 0.8666375875473022, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 3.4358, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.003232758620689655, |
|
"grad_norm": 0.8541607856750488, |
|
"learning_rate": 2e-05, |
|
"loss": 3.3805, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.003771551724137931, |
|
"grad_norm": 0.8899615406990051, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 3.4211, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.004310344827586207, |
|
"grad_norm": 0.801926851272583, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 3.3736, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.004849137931034483, |
|
"grad_norm": 0.8513683676719666, |
|
"learning_rate": 3e-05, |
|
"loss": 3.41, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.005387931034482759, |
|
"grad_norm": 0.80379718542099, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 3.2838, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.005926724137931034, |
|
"grad_norm": 0.5176191926002502, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 3.2512, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.00646551724137931, |
|
"grad_norm": 1.1420265436172485, |
|
"learning_rate": 4e-05, |
|
"loss": 3.2356, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.007004310344827586, |
|
"grad_norm": 0.5149586200714111, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 3.277, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.007543103448275862, |
|
"grad_norm": 0.7136310338973999, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 3.2422, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.008081896551724138, |
|
"grad_norm": 0.6616995930671692, |
|
"learning_rate": 5e-05, |
|
"loss": 3.2174, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.008620689655172414, |
|
"grad_norm": 1.0672518014907837, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 3.193, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.009159482758620689, |
|
"grad_norm": 0.29977813363075256, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 3.1336, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.009698275862068966, |
|
"grad_norm": 0.8956311345100403, |
|
"learning_rate": 6e-05, |
|
"loss": 3.1273, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.010237068965517241, |
|
"grad_norm": 0.3414318561553955, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 3.1157, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.010775862068965518, |
|
"grad_norm": 0.47326546907424927, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 3.1407, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.011314655172413793, |
|
"grad_norm": 0.29255905747413635, |
|
"learning_rate": 7e-05, |
|
"loss": 3.1171, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.011853448275862068, |
|
"grad_norm": 0.25283244252204895, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 3.0987, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.012392241379310345, |
|
"grad_norm": 0.6788113713264465, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 3.0829, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01293103448275862, |
|
"grad_norm": 2.8709752559661865, |
|
"learning_rate": 8e-05, |
|
"loss": 3.0979, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.013469827586206896, |
|
"grad_norm": 0.3539103865623474, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 3.0696, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.014008620689655173, |
|
"grad_norm": 1.9766085147857666, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 3.1419, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.014547413793103448, |
|
"grad_norm": 1.2451584339141846, |
|
"learning_rate": 9e-05, |
|
"loss": 3.1152, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.015086206896551725, |
|
"grad_norm": 3.3829805850982666, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 3.1891, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.015625, |
|
"grad_norm": 2.928429365158081, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 3.1886, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.016163793103448277, |
|
"grad_norm": 1.225688099861145, |
|
"learning_rate": 0.0001, |
|
"loss": 3.0961, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01670258620689655, |
|
"grad_norm": 1.955092430114746, |
|
"learning_rate": 9.994965332706573e-05, |
|
"loss": 3.1565, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.017241379310344827, |
|
"grad_norm": 1.2886714935302734, |
|
"learning_rate": 9.979871469976196e-05, |
|
"loss": 3.2317, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.017780172413793104, |
|
"grad_norm": 1.1216020584106445, |
|
"learning_rate": 9.954748808839674e-05, |
|
"loss": 3.1817, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.018318965517241378, |
|
"grad_norm": 0.5160207152366638, |
|
"learning_rate": 9.919647942993148e-05, |
|
"loss": 3.1562, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.018857758620689655, |
|
"grad_norm": 0.7734323740005493, |
|
"learning_rate": 9.874639560909117e-05, |
|
"loss": 3.1657, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01939655172413793, |
|
"grad_norm": 0.583284854888916, |
|
"learning_rate": 9.819814303479267e-05, |
|
"loss": 3.2008, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.01993534482758621, |
|
"grad_norm": 0.2643071115016937, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 3.1859, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.020474137931034482, |
|
"grad_norm": 0.694346010684967, |
|
"learning_rate": 9.681174353198687e-05, |
|
"loss": 3.2295, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02101293103448276, |
|
"grad_norm": 0.5720380544662476, |
|
"learning_rate": 9.597638862757255e-05, |
|
"loss": 3.2481, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.021551724137931036, |
|
"grad_norm": 0.3155198395252228, |
|
"learning_rate": 9.504844339512095e-05, |
|
"loss": 3.2689, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02209051724137931, |
|
"grad_norm": 0.9720726013183594, |
|
"learning_rate": 9.40297765928369e-05, |
|
"loss": 3.225, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.022629310344827586, |
|
"grad_norm": 1.3867518901824951, |
|
"learning_rate": 9.292243968009331e-05, |
|
"loss": 3.0532, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.023168103448275863, |
|
"grad_norm": 1.3067538738250732, |
|
"learning_rate": 9.172866268606513e-05, |
|
"loss": 3.0021, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.023706896551724137, |
|
"grad_norm": 1.5030200481414795, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 3.0634, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.024245689655172414, |
|
"grad_norm": 0.237008199095726, |
|
"learning_rate": 8.90915741234015e-05, |
|
"loss": 2.9759, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02478448275862069, |
|
"grad_norm": 0.5865659713745117, |
|
"learning_rate": 8.765357330018056e-05, |
|
"loss": 3.0223, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.025323275862068964, |
|
"grad_norm": 0.6696709394454956, |
|
"learning_rate": 8.613974319136958e-05, |
|
"loss": 2.9951, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.02586206896551724, |
|
"grad_norm": 0.1882406771183014, |
|
"learning_rate": 8.455313244934324e-05, |
|
"loss": 3.0037, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.026400862068965518, |
|
"grad_norm": 0.38517552614212036, |
|
"learning_rate": 8.289693629698564e-05, |
|
"loss": 2.9575, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.02693965517241379, |
|
"grad_norm": 0.9038740992546082, |
|
"learning_rate": 8.117449009293668e-05, |
|
"loss": 3.0508, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02693965517241379, |
|
"eval_loss": 3.033242702484131, |
|
"eval_runtime": 414.8921, |
|
"eval_samples_per_second": 7.534, |
|
"eval_steps_per_second": 3.767, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02747844827586207, |
|
"grad_norm": 1.095533013343811, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.7835, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.028017241379310345, |
|
"grad_norm": 0.3085789084434509, |
|
"learning_rate": 7.754484907260513e-05, |
|
"loss": 2.8091, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.028556034482758622, |
|
"grad_norm": 0.4151741564273834, |
|
"learning_rate": 7.564496387029532e-05, |
|
"loss": 2.8411, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.029094827586206896, |
|
"grad_norm": 0.284201979637146, |
|
"learning_rate": 7.369343312364993e-05, |
|
"loss": 2.8557, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.029633620689655173, |
|
"grad_norm": 0.30802321434020996, |
|
"learning_rate": 7.169418695587791e-05, |
|
"loss": 2.8606, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03017241379310345, |
|
"grad_norm": 0.16543473303318024, |
|
"learning_rate": 6.965125158269619e-05, |
|
"loss": 2.8668, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.030711206896551723, |
|
"grad_norm": 0.4956265985965729, |
|
"learning_rate": 6.756874120406714e-05, |
|
"loss": 2.9207, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03125, |
|
"grad_norm": 0.6670874357223511, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 2.9217, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03178879310344827, |
|
"grad_norm": 0.22392654418945312, |
|
"learning_rate": 6.330184227833376e-05, |
|
"loss": 2.9207, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.032327586206896554, |
|
"grad_norm": 0.7628993988037109, |
|
"learning_rate": 6.112604669781572e-05, |
|
"loss": 2.9229, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03286637931034483, |
|
"grad_norm": 0.5758177042007446, |
|
"learning_rate": 5.8927844739931834e-05, |
|
"loss": 2.9308, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0334051724137931, |
|
"grad_norm": 0.2536393702030182, |
|
"learning_rate": 5.6711663290882776e-05, |
|
"loss": 2.9689, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.03394396551724138, |
|
"grad_norm": 0.9088274240493774, |
|
"learning_rate": 5.448196544517168e-05, |
|
"loss": 2.9812, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.034482758620689655, |
|
"grad_norm": 0.3271176517009735, |
|
"learning_rate": 5.2243241517525754e-05, |
|
"loss": 2.9661, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.03502155172413793, |
|
"grad_norm": 0.12883682548999786, |
|
"learning_rate": 5e-05, |
|
"loss": 2.9647, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03556034482758621, |
|
"grad_norm": 0.3966907858848572, |
|
"learning_rate": 4.775675848247427e-05, |
|
"loss": 2.9658, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.03609913793103448, |
|
"grad_norm": 0.37074947357177734, |
|
"learning_rate": 4.551803455482833e-05, |
|
"loss": 2.9655, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.036637931034482756, |
|
"grad_norm": 0.16934780776500702, |
|
"learning_rate": 4.328833670911724e-05, |
|
"loss": 3.0221, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.037176724137931036, |
|
"grad_norm": 0.15378868579864502, |
|
"learning_rate": 4.107215526006817e-05, |
|
"loss": 3.0215, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.03771551724137931, |
|
"grad_norm": 0.23121915757656097, |
|
"learning_rate": 3.887395330218429e-05, |
|
"loss": 3.0209, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03825431034482758, |
|
"grad_norm": 0.2312898337841034, |
|
"learning_rate": 3.6698157721666246e-05, |
|
"loss": 3.0241, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.03879310344827586, |
|
"grad_norm": 0.13653403520584106, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 3.0201, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.03933189655172414, |
|
"grad_norm": 0.18783865869045258, |
|
"learning_rate": 3.243125879593286e-05, |
|
"loss": 3.0155, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.03987068965517242, |
|
"grad_norm": 0.1803734302520752, |
|
"learning_rate": 3.0348748417303823e-05, |
|
"loss": 3.0518, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04040948275862069, |
|
"grad_norm": 0.171559140086174, |
|
"learning_rate": 2.8305813044122097e-05, |
|
"loss": 3.0685, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.040948275862068964, |
|
"grad_norm": 0.1469101756811142, |
|
"learning_rate": 2.630656687635007e-05, |
|
"loss": 3.0742, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.041487068965517244, |
|
"grad_norm": 0.17329084873199463, |
|
"learning_rate": 2.43550361297047e-05, |
|
"loss": 3.0691, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04202586206896552, |
|
"grad_norm": 0.26190730929374695, |
|
"learning_rate": 2.245515092739488e-05, |
|
"loss": 3.0717, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04256465517241379, |
|
"grad_norm": 0.20236724615097046, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 3.0735, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04310344827586207, |
|
"grad_norm": 0.4072014093399048, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 3.0772, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.043642241379310345, |
|
"grad_norm": 0.22613751888275146, |
|
"learning_rate": 1.7103063703014372e-05, |
|
"loss": 3.1248, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04418103448275862, |
|
"grad_norm": 0.20124013721942902, |
|
"learning_rate": 1.544686755065677e-05, |
|
"loss": 3.1211, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.0447198275862069, |
|
"grad_norm": 0.3580072820186615, |
|
"learning_rate": 1.3860256808630428e-05, |
|
"loss": 3.1244, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.04525862068965517, |
|
"grad_norm": 0.3929254710674286, |
|
"learning_rate": 1.2346426699819458e-05, |
|
"loss": 3.1228, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.045797413793103446, |
|
"grad_norm": 0.2717115581035614, |
|
"learning_rate": 1.090842587659851e-05, |
|
"loss": 3.1604, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04633620689655173, |
|
"grad_norm": 0.16544808447360992, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 3.1741, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.046875, |
|
"grad_norm": 0.23587080836296082, |
|
"learning_rate": 8.271337313934869e-06, |
|
"loss": 3.171, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.04741379310344827, |
|
"grad_norm": 0.281700074672699, |
|
"learning_rate": 7.077560319906695e-06, |
|
"loss": 3.2154, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.047952586206896554, |
|
"grad_norm": 0.46962276101112366, |
|
"learning_rate": 5.9702234071631e-06, |
|
"loss": 3.2238, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.04849137931034483, |
|
"grad_norm": 0.6593974828720093, |
|
"learning_rate": 4.951556604879048e-06, |
|
"loss": 3.2778, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0490301724137931, |
|
"grad_norm": 0.2813006341457367, |
|
"learning_rate": 4.023611372427471e-06, |
|
"loss": 3.1348, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.04956896551724138, |
|
"grad_norm": 0.2134297639131546, |
|
"learning_rate": 3.18825646801314e-06, |
|
"loss": 3.0484, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.050107758620689655, |
|
"grad_norm": 0.26354482769966125, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 3.0046, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.05064655172413793, |
|
"grad_norm": 0.19792447984218597, |
|
"learning_rate": 1.8018569652073381e-06, |
|
"loss": 3.0429, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.05118534482758621, |
|
"grad_norm": 0.21508128941059113, |
|
"learning_rate": 1.2536043909088191e-06, |
|
"loss": 3.0099, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05172413793103448, |
|
"grad_norm": 0.23467521369457245, |
|
"learning_rate": 8.035205700685167e-07, |
|
"loss": 3.0566, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.052262931034482756, |
|
"grad_norm": 0.3338031768798828, |
|
"learning_rate": 4.52511911603265e-07, |
|
"loss": 3.0219, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.052801724137931036, |
|
"grad_norm": 0.24319298565387726, |
|
"learning_rate": 2.012853002380466e-07, |
|
"loss": 3.0833, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.05334051724137931, |
|
"grad_norm": 0.28416627645492554, |
|
"learning_rate": 5.0346672934270534e-08, |
|
"loss": 3.0058, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.05387931034482758, |
|
"grad_norm": 0.3014352321624756, |
|
"learning_rate": 0.0, |
|
"loss": 3.0246, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05387931034482758, |
|
"eval_loss": 3.0250446796417236, |
|
"eval_runtime": 415.4589, |
|
"eval_samples_per_second": 7.524, |
|
"eval_steps_per_second": 3.762, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.4281547880267776e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|