|
{ |
|
"best_metric": 0.8466009497642517, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.056657223796033995, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00028328611898016995, |
|
"grad_norm": 2.4459738731384277, |
|
"learning_rate": 5e-06, |
|
"loss": 1.0328, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00028328611898016995, |
|
"eval_loss": 1.364790678024292, |
|
"eval_runtime": 485.4956, |
|
"eval_samples_per_second": 12.247, |
|
"eval_steps_per_second": 6.124, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0005665722379603399, |
|
"grad_norm": 0.7349909543991089, |
|
"learning_rate": 1e-05, |
|
"loss": 1.044, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0008498583569405099, |
|
"grad_norm": 0.5141507387161255, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.0227, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0011331444759206798, |
|
"grad_norm": 1.156713604927063, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0904, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00141643059490085, |
|
"grad_norm": 0.7857816219329834, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.1226, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0016997167138810198, |
|
"grad_norm": 0.5737718939781189, |
|
"learning_rate": 3e-05, |
|
"loss": 1.0554, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00198300283286119, |
|
"grad_norm": 0.7259950637817383, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.0777, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0022662889518413596, |
|
"grad_norm": 0.5065688490867615, |
|
"learning_rate": 4e-05, |
|
"loss": 1.0204, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0025495750708215297, |
|
"grad_norm": 0.41538459062576294, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.04, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0028328611898017, |
|
"grad_norm": 0.3766935467720032, |
|
"learning_rate": 5e-05, |
|
"loss": 0.9753, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0031161473087818695, |
|
"grad_norm": 0.40806570649147034, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 0.9953, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0033994334277620396, |
|
"grad_norm": 0.3899538218975067, |
|
"learning_rate": 6e-05, |
|
"loss": 1.0186, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0036827195467422098, |
|
"grad_norm": 0.43268951773643494, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 0.9802, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.00396600566572238, |
|
"grad_norm": 0.42716556787490845, |
|
"learning_rate": 7e-05, |
|
"loss": 0.9618, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.00424929178470255, |
|
"grad_norm": 0.4129604697227478, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.9033, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.004532577903682719, |
|
"grad_norm": 0.40332239866256714, |
|
"learning_rate": 8e-05, |
|
"loss": 0.8851, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.004815864022662889, |
|
"grad_norm": 0.42246365547180176, |
|
"learning_rate": 8.5e-05, |
|
"loss": 0.995, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0050991501416430595, |
|
"grad_norm": 0.40158501267433167, |
|
"learning_rate": 9e-05, |
|
"loss": 0.9784, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.00538243626062323, |
|
"grad_norm": 0.3314235210418701, |
|
"learning_rate": 9.5e-05, |
|
"loss": 0.8783, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0056657223796034, |
|
"grad_norm": 0.30537742376327515, |
|
"learning_rate": 0.0001, |
|
"loss": 0.8774, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.00594900849858357, |
|
"grad_norm": 0.3494546711444855, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 0.8816, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.006232294617563739, |
|
"grad_norm": 0.3360932767391205, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 0.8611, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.006515580736543909, |
|
"grad_norm": 0.3689824044704437, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 0.8842, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.006798866855524079, |
|
"grad_norm": 0.37334781885147095, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 0.9124, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.007082152974504249, |
|
"grad_norm": 0.38459479808807373, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 0.8801, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0073654390934844195, |
|
"grad_norm": 0.3251090943813324, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 0.8642, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.00764872521246459, |
|
"grad_norm": 0.3844033479690552, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 0.9023, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.00793201133144476, |
|
"grad_norm": 0.40686604380607605, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 0.9195, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.00821529745042493, |
|
"grad_norm": 0.30939722061157227, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 0.8657, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0084985835694051, |
|
"grad_norm": 0.4041453003883362, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 0.9044, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.008781869688385268, |
|
"grad_norm": 0.4817129373550415, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 0.9488, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.009065155807365438, |
|
"grad_norm": 0.4093549847602844, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 0.9094, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.009348441926345609, |
|
"grad_norm": 0.43957093358039856, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 0.9521, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.009631728045325779, |
|
"grad_norm": 0.517327070236206, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 0.8606, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.009915014164305949, |
|
"grad_norm": 0.6792945861816406, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 1.0191, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.010198300283286119, |
|
"grad_norm": 0.5788185000419617, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 0.9683, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.010481586402266289, |
|
"grad_norm": 0.63746178150177, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 1.0815, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.01076487252124646, |
|
"grad_norm": 0.5034915208816528, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.9157, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.01104815864022663, |
|
"grad_norm": 0.568718433380127, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 1.0136, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0113314447592068, |
|
"grad_norm": 0.7134827971458435, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 0.9963, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01161473087818697, |
|
"grad_norm": 0.6913411617279053, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 1.0246, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.01189801699716714, |
|
"grad_norm": 0.7879395484924316, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 1.1409, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.012181303116147308, |
|
"grad_norm": 0.6506746411323547, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 1.0367, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.012464589235127478, |
|
"grad_norm": 0.7825458645820618, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 1.0962, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.012747875354107648, |
|
"grad_norm": 0.8112017512321472, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 0.9379, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.013031161473087818, |
|
"grad_norm": 0.9518360495567322, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 1.2331, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.013314447592067988, |
|
"grad_norm": 1.0096303224563599, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 1.0212, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.013597733711048159, |
|
"grad_norm": 1.4635624885559082, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 1.18, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.013881019830028329, |
|
"grad_norm": 1.5704519748687744, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 1.0703, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.014164305949008499, |
|
"grad_norm": 2.2931337356567383, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 1.0591, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.014164305949008499, |
|
"eval_loss": 0.9707683324813843, |
|
"eval_runtime": 488.0589, |
|
"eval_samples_per_second": 12.183, |
|
"eval_steps_per_second": 6.091, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.014447592067988669, |
|
"grad_norm": 0.31635814905166626, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 0.7657, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.014730878186968839, |
|
"grad_norm": 0.3915429711341858, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 0.8888, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.01501416430594901, |
|
"grad_norm": 0.37524592876434326, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 0.8045, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.01529745042492918, |
|
"grad_norm": 0.32623764872550964, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 0.8345, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.015580736543909348, |
|
"grad_norm": 0.2912946939468384, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 0.8638, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01586402266288952, |
|
"grad_norm": 0.28338658809661865, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.8685, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.01614730878186969, |
|
"grad_norm": 0.2784612476825714, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 0.8542, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.01643059490084986, |
|
"grad_norm": 0.273578405380249, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 0.8186, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.01671388101983003, |
|
"grad_norm": 0.27225029468536377, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 0.8194, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.0169971671388102, |
|
"grad_norm": 0.2823231816291809, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.8064, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.017280453257790367, |
|
"grad_norm": 0.27315282821655273, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 0.7767, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.017563739376770537, |
|
"grad_norm": 0.3057181239128113, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 0.8559, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.017847025495750707, |
|
"grad_norm": 0.25322389602661133, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 0.7594, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.018130311614730877, |
|
"grad_norm": 0.2614225149154663, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 0.8225, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.018413597733711047, |
|
"grad_norm": 0.23576407134532928, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 0.8388, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.018696883852691217, |
|
"grad_norm": 0.24987991154193878, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 0.782, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.018980169971671387, |
|
"grad_norm": 0.2600783705711365, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 0.8176, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.019263456090651557, |
|
"grad_norm": 0.2444659173488617, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 0.8305, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.019546742209631728, |
|
"grad_norm": 0.2555049657821655, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 0.8203, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.019830028328611898, |
|
"grad_norm": 0.2540525496006012, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.7953, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.020113314447592068, |
|
"grad_norm": 0.2432480752468109, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 0.8103, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.020396600566572238, |
|
"grad_norm": 0.28842082619667053, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 0.7827, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.020679886685552408, |
|
"grad_norm": 0.258884996175766, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 0.7813, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.020963172804532578, |
|
"grad_norm": 0.28011828660964966, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.8511, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.021246458923512748, |
|
"grad_norm": 0.3141220211982727, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 0.8916, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02152974504249292, |
|
"grad_norm": 0.31731855869293213, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 0.7985, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.02181303116147309, |
|
"grad_norm": 0.3145108222961426, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 0.8648, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.02209631728045326, |
|
"grad_norm": 0.33710357546806335, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 0.8102, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.02237960339943343, |
|
"grad_norm": 0.3097231090068817, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 0.8856, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0226628895184136, |
|
"grad_norm": 0.33917585015296936, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.8405, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02294617563739377, |
|
"grad_norm": 0.3195075988769531, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 0.8151, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.02322946175637394, |
|
"grad_norm": 0.4224698543548584, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 0.9041, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.02351274787535411, |
|
"grad_norm": 0.4021918475627899, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 0.9231, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.02379603399433428, |
|
"grad_norm": 0.4472004771232605, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 0.943, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.024079320113314446, |
|
"grad_norm": 0.5003901124000549, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 0.8979, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.024362606232294616, |
|
"grad_norm": 0.5862677097320557, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 1.0636, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.024645892351274786, |
|
"grad_norm": 0.4611576199531555, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 0.8857, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.024929178470254956, |
|
"grad_norm": 0.6086900234222412, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 0.9394, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.025212464589235126, |
|
"grad_norm": 0.5702813863754272, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 0.8928, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.025495750708215296, |
|
"grad_norm": 0.5099334716796875, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 0.9919, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.025779036827195467, |
|
"grad_norm": 0.6126192212104797, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 0.9299, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.026062322946175637, |
|
"grad_norm": 0.6228667497634888, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.9997, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.026345609065155807, |
|
"grad_norm": 0.6684215068817139, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 0.9355, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.026628895184135977, |
|
"grad_norm": 0.7169914841651917, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 1.0057, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.026912181303116147, |
|
"grad_norm": 0.7495495080947876, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 0.843, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.027195467422096317, |
|
"grad_norm": 1.0023761987686157, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 0.9734, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.027478753541076487, |
|
"grad_norm": 0.9798507690429688, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 1.0046, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.027762039660056657, |
|
"grad_norm": 0.9909435510635376, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 0.9592, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.028045325779036828, |
|
"grad_norm": 1.6045634746551514, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 1.0293, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.028328611898016998, |
|
"grad_norm": 3.4821736812591553, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 0.9679, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.028328611898016998, |
|
"eval_loss": 0.8943002223968506, |
|
"eval_runtime": 487.973, |
|
"eval_samples_per_second": 12.185, |
|
"eval_steps_per_second": 6.093, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.028611898016997168, |
|
"grad_norm": 0.26681363582611084, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 0.7179, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.028895184135977338, |
|
"grad_norm": 0.27887067198753357, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 0.7854, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.029178470254957508, |
|
"grad_norm": 0.2766787111759186, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 0.7794, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.029461756373937678, |
|
"grad_norm": 0.2650505304336548, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 0.7821, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.029745042492917848, |
|
"grad_norm": 0.25711944699287415, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 0.7428, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.03002832861189802, |
|
"grad_norm": 0.23184049129486084, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 0.7789, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.03031161473087819, |
|
"grad_norm": 0.23433373868465424, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 0.8222, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.03059490084985836, |
|
"grad_norm": 0.23752881586551666, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 0.7548, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.030878186968838525, |
|
"grad_norm": 0.23099955916404724, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 0.8607, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.031161473087818695, |
|
"grad_norm": 0.2357034683227539, |
|
"learning_rate": 5e-05, |
|
"loss": 0.8765, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.031444759206798865, |
|
"grad_norm": 0.2469700574874878, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 0.825, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.03172804532577904, |
|
"grad_norm": 0.24967269599437714, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 0.7931, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.032011331444759206, |
|
"grad_norm": 0.25203588604927063, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 0.7875, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.03229461756373938, |
|
"grad_norm": 0.25249430537223816, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 0.8702, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.032577903682719546, |
|
"grad_norm": 0.2569659948348999, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 0.8997, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.03286118980169972, |
|
"grad_norm": 0.2416309267282486, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 0.7305, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.033144475920679886, |
|
"grad_norm": 0.24729342758655548, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 0.8049, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.03342776203966006, |
|
"grad_norm": 0.27604395151138306, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 0.8112, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.033711048158640226, |
|
"grad_norm": 0.27382591366767883, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 0.8149, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.0339943342776204, |
|
"grad_norm": 0.25334885716438293, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.8281, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03427762039660057, |
|
"grad_norm": 0.2882319390773773, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 0.8037, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.03456090651558073, |
|
"grad_norm": 0.28409236669540405, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 0.8182, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.03484419263456091, |
|
"grad_norm": 0.2688352167606354, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 0.7252, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.03512747875354107, |
|
"grad_norm": 0.3232026994228363, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 0.8757, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.03541076487252125, |
|
"grad_norm": 0.3008348345756531, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 0.8151, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.035694050991501414, |
|
"grad_norm": 0.2845551073551178, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 0.8039, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.03597733711048159, |
|
"grad_norm": 0.30033618211746216, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 0.8236, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.036260623229461754, |
|
"grad_norm": 0.2865908145904541, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.8584, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.03654390934844193, |
|
"grad_norm": 0.3274208903312683, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 0.8048, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.036827195467422094, |
|
"grad_norm": 0.32286253571510315, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 0.8669, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03711048158640227, |
|
"grad_norm": 0.32492363452911377, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 0.8294, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.037393767705382434, |
|
"grad_norm": 0.3788565993309021, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 0.9411, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.03767705382436261, |
|
"grad_norm": 0.3922273516654968, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 0.896, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.037960339943342775, |
|
"grad_norm": 0.4324563443660736, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 0.8272, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.03824362606232295, |
|
"grad_norm": 0.4802010655403137, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 0.7869, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.038526912181303115, |
|
"grad_norm": 0.5401066541671753, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 0.8878, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.03881019830028329, |
|
"grad_norm": 0.49897003173828125, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 0.9156, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.039093484419263455, |
|
"grad_norm": 0.5094231963157654, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 0.8411, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.03937677053824363, |
|
"grad_norm": 0.5654172897338867, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 0.9716, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.039660056657223795, |
|
"grad_norm": 0.5526919960975647, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.9828, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03994334277620397, |
|
"grad_norm": 0.5752049088478088, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 0.9976, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.040226628895184136, |
|
"grad_norm": 0.6576734781265259, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 0.9673, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.04050991501416431, |
|
"grad_norm": 0.7551366090774536, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 0.9781, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.040793201133144476, |
|
"grad_norm": 0.7085397839546204, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 1.0065, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.04107648725212465, |
|
"grad_norm": 0.7727219462394714, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 0.8517, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.041359773371104816, |
|
"grad_norm": 0.8478469848632812, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.9225, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.04164305949008498, |
|
"grad_norm": 0.9258629083633423, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 0.9349, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.041926345609065156, |
|
"grad_norm": 1.0141416788101196, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 0.8708, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.04220963172804532, |
|
"grad_norm": 1.2329179048538208, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 0.855, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.042492917847025496, |
|
"grad_norm": 2.4406769275665283, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 0.868, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.042492917847025496, |
|
"eval_loss": 0.8521701693534851, |
|
"eval_runtime": 487.8912, |
|
"eval_samples_per_second": 12.187, |
|
"eval_steps_per_second": 6.094, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.04277620396600566, |
|
"grad_norm": 0.16932883858680725, |
|
"learning_rate": 1.7197048550474643e-05, |
|
"loss": 0.6469, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.04305949008498584, |
|
"grad_norm": 0.18113374710083008, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 0.8065, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.043342776203966, |
|
"grad_norm": 0.19942761957645416, |
|
"learning_rate": 1.5900081996875083e-05, |
|
"loss": 0.8106, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.04362606232294618, |
|
"grad_norm": 0.2146538347005844, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 0.7978, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.043909348441926344, |
|
"grad_norm": 0.22361645102500916, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 0.7684, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.04419263456090652, |
|
"grad_norm": 0.24466411769390106, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 0.7754, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.044475920679886684, |
|
"grad_norm": 0.24689705669879913, |
|
"learning_rate": 1.3432314919041478e-05, |
|
"loss": 0.8686, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.04475920679886686, |
|
"grad_norm": 0.21561266481876373, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 0.7307, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.045042492917847024, |
|
"grad_norm": 0.2240486890077591, |
|
"learning_rate": 1.22645209888614e-05, |
|
"loss": 0.8214, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.0453257790368272, |
|
"grad_norm": 0.24344871938228607, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 0.7673, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.045609065155807364, |
|
"grad_norm": 0.25965842604637146, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 0.847, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.04589235127478754, |
|
"grad_norm": 0.2654353976249695, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 0.8523, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.046175637393767704, |
|
"grad_norm": 0.235614612698555, |
|
"learning_rate": 1.006822449763537e-05, |
|
"loss": 0.8052, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.04645892351274788, |
|
"grad_norm": 0.2644803822040558, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.7474, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.046742209631728045, |
|
"grad_norm": 0.2636964023113251, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 0.8446, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04702549575070822, |
|
"grad_norm": 0.2771541178226471, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 0.8625, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.047308781869688385, |
|
"grad_norm": 0.29605501890182495, |
|
"learning_rate": 8.066471602728803e-06, |
|
"loss": 0.8621, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.04759206798866856, |
|
"grad_norm": 0.23421235382556915, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 0.8255, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.047875354107648725, |
|
"grad_norm": 0.2577279806137085, |
|
"learning_rate": 7.1416349648943894e-06, |
|
"loss": 0.7929, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.04815864022662889, |
|
"grad_norm": 0.27677690982818604, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.7816, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.048441926345609065, |
|
"grad_norm": 0.2778450846672058, |
|
"learning_rate": 6.269014643030213e-06, |
|
"loss": 0.9325, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.04872521246458923, |
|
"grad_norm": 0.30286088585853577, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 0.816, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.049008498583569406, |
|
"grad_norm": 0.2972896993160248, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 0.8276, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.04929178470254957, |
|
"grad_norm": 0.27526426315307617, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 0.767, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.049575070821529746, |
|
"grad_norm": 0.2896153926849365, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 0.8496, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04985835694050991, |
|
"grad_norm": 0.30759337544441223, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 0.8891, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.050141643059490086, |
|
"grad_norm": 0.29936984181404114, |
|
"learning_rate": 3.974757327377981e-06, |
|
"loss": 0.8428, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.05042492917847025, |
|
"grad_norm": 0.31567350029945374, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 0.81, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.050708215297450426, |
|
"grad_norm": 0.33163803815841675, |
|
"learning_rate": 3.3209786751399187e-06, |
|
"loss": 0.8172, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.05099150141643059, |
|
"grad_norm": 0.37980544567108154, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 0.8965, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.05127478753541077, |
|
"grad_norm": 0.38456615805625916, |
|
"learning_rate": 2.724071220034158e-06, |
|
"loss": 0.8401, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.05155807365439093, |
|
"grad_norm": 0.4374862015247345, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.8419, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.05184135977337111, |
|
"grad_norm": 0.39432716369628906, |
|
"learning_rate": 2.1847622018482283e-06, |
|
"loss": 0.893, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.05212464589235127, |
|
"grad_norm": 0.45420652627944946, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 0.8105, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.05240793201133145, |
|
"grad_norm": 0.5010901689529419, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 0.869, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.052691218130311614, |
|
"grad_norm": 0.5344345569610596, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 0.9526, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.05297450424929179, |
|
"grad_norm": 0.5573408603668213, |
|
"learning_rate": 1.2814967607382432e-06, |
|
"loss": 0.9204, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.053257790368271954, |
|
"grad_norm": 0.631679892539978, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 0.9246, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.05354107648725213, |
|
"grad_norm": 0.5736504793167114, |
|
"learning_rate": 9.186408276168013e-07, |
|
"loss": 0.8899, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.053824362606232294, |
|
"grad_norm": 0.7445414066314697, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 1.1011, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.05410764872521247, |
|
"grad_norm": 0.691126823425293, |
|
"learning_rate": 6.15582970243117e-07, |
|
"loss": 1.0024, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.054390934844192634, |
|
"grad_norm": 0.6986340284347534, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 0.8758, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.05467422096317281, |
|
"grad_norm": 0.6812741160392761, |
|
"learning_rate": 3.7269241793390085e-07, |
|
"loss": 0.8845, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.054957507082152975, |
|
"grad_norm": 0.9052487015724182, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 0.9353, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.05524079320113314, |
|
"grad_norm": 0.8438551425933838, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 0.907, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.055524079320113315, |
|
"grad_norm": 0.9962527751922607, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 1.0064, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.05580736543909348, |
|
"grad_norm": 0.9022462964057922, |
|
"learning_rate": 6.852326227130834e-08, |
|
"loss": 0.8814, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.056090651558073655, |
|
"grad_norm": 0.9061633348464966, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 0.9099, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.05637393767705382, |
|
"grad_norm": 1.1028547286987305, |
|
"learning_rate": 7.615242180436522e-09, |
|
"loss": 0.8361, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.056657223796033995, |
|
"grad_norm": 1.411783218383789, |
|
"learning_rate": 0.0, |
|
"loss": 0.9002, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.056657223796033995, |
|
"eval_loss": 0.8466009497642517, |
|
"eval_runtime": 487.6748, |
|
"eval_samples_per_second": 12.193, |
|
"eval_steps_per_second": 6.096, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.75209475063808e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|