|
{ |
|
"best_metric": 5.363029956817627, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.4592422502870264, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004592422502870264, |
|
"grad_norm": 17.422351837158203, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 49.8036, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004592422502870264, |
|
"eval_loss": 6.581690788269043, |
|
"eval_runtime": 41.1686, |
|
"eval_samples_per_second": 8.915, |
|
"eval_steps_per_second": 2.235, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009184845005740528, |
|
"grad_norm": 15.727997779846191, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 50.5852, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.013777267508610792, |
|
"grad_norm": 15.347269058227539, |
|
"learning_rate": 1.5e-06, |
|
"loss": 51.7824, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.018369690011481057, |
|
"grad_norm": 16.722976684570312, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 51.7411, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.022962112514351322, |
|
"grad_norm": 15.326921463012695, |
|
"learning_rate": 2.5e-06, |
|
"loss": 50.5851, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.027554535017221583, |
|
"grad_norm": 17.22072410583496, |
|
"learning_rate": 3e-06, |
|
"loss": 53.0931, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03214695752009185, |
|
"grad_norm": 17.842632293701172, |
|
"learning_rate": 3.5e-06, |
|
"loss": 52.5499, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03673938002296211, |
|
"grad_norm": 17.188825607299805, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 52.0334, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04133180252583238, |
|
"grad_norm": 15.418607711791992, |
|
"learning_rate": 4.5e-06, |
|
"loss": 49.4726, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.045924225028702644, |
|
"grad_norm": 17.196287155151367, |
|
"learning_rate": 5e-06, |
|
"loss": 51.543, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0505166475315729, |
|
"grad_norm": 16.128028869628906, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 50.8256, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05510907003444317, |
|
"grad_norm": 17.322240829467773, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 52.4383, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.05970149253731343, |
|
"grad_norm": 17.042463302612305, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 50.8534, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0642939150401837, |
|
"grad_norm": 18.691532135009766, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 53.126, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06888633754305395, |
|
"grad_norm": 18.797557830810547, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 52.3491, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07347876004592423, |
|
"grad_norm": 19.990497589111328, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 55.9718, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07807118254879448, |
|
"grad_norm": 18.329801559448242, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 51.4399, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08266360505166476, |
|
"grad_norm": 19.932241439819336, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 51.7598, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08725602755453502, |
|
"grad_norm": 19.305652618408203, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 51.3118, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.09184845005740529, |
|
"grad_norm": 20.77489471435547, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 52.1306, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09644087256027555, |
|
"grad_norm": 19.153535842895508, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 51.4403, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.1010332950631458, |
|
"grad_norm": 19.786102294921875, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 51.161, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.10562571756601608, |
|
"grad_norm": 18.834474563598633, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 49.2061, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.11021814006888633, |
|
"grad_norm": 20.738544464111328, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 51.229, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1148105625717566, |
|
"grad_norm": 20.025314331054688, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 51.0834, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11940298507462686, |
|
"grad_norm": 22.799274444580078, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 51.7177, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.12399540757749714, |
|
"grad_norm": 21.667531967163086, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 52.6204, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.1285878300803674, |
|
"grad_norm": 22.614103317260742, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 50.8502, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.13318025258323765, |
|
"grad_norm": 19.7918758392334, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 49.1687, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1377726750861079, |
|
"grad_norm": 23.40234375, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 52.8214, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1423650975889782, |
|
"grad_norm": 24.31424903869629, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 50.4354, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.14695752009184845, |
|
"grad_norm": 23.1087646484375, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 50.5401, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.1515499425947187, |
|
"grad_norm": 24.652694702148438, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 51.1058, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.15614236509758897, |
|
"grad_norm": 24.83074951171875, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 52.2553, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.16073478760045926, |
|
"grad_norm": 24.135053634643555, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 50.662, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1653272101033295, |
|
"grad_norm": 24.16924476623535, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 48.2732, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.16991963260619977, |
|
"grad_norm": 26.378135681152344, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 50.8932, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.17451205510907003, |
|
"grad_norm": 25.19991111755371, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 49.9703, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1791044776119403, |
|
"grad_norm": 28.445180892944336, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 51.1727, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.18369690011481057, |
|
"grad_norm": 24.99131965637207, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 48.2178, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18828932261768083, |
|
"grad_norm": 26.75794219970703, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 48.9569, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1928817451205511, |
|
"grad_norm": 28.006494522094727, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 48.6871, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.19747416762342135, |
|
"grad_norm": 28.05718994140625, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 51.0425, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2020665901262916, |
|
"grad_norm": 28.426515579223633, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 49.6784, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2066590126291619, |
|
"grad_norm": 29.402090072631836, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 50.3752, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.21125143513203215, |
|
"grad_norm": 28.24565887451172, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 48.4813, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.2158438576349024, |
|
"grad_norm": 27.259733200073242, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 47.8943, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.22043628013777267, |
|
"grad_norm": 30.925739288330078, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 51.9337, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.22502870264064295, |
|
"grad_norm": 29.552427291870117, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 51.5688, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2296211251435132, |
|
"grad_norm": 28.672847747802734, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 51.1267, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2296211251435132, |
|
"eval_loss": 5.869631767272949, |
|
"eval_runtime": 41.6165, |
|
"eval_samples_per_second": 8.819, |
|
"eval_steps_per_second": 2.211, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.23421354764638347, |
|
"grad_norm": 26.808902740478516, |
|
"learning_rate": 2.847932752400164e-06, |
|
"loss": 47.6216, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.23880597014925373, |
|
"grad_norm": 26.181222915649414, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 45.5904, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.24339839265212398, |
|
"grad_norm": 24.47733497619629, |
|
"learning_rate": 2.6743911843603134e-06, |
|
"loss": 44.2045, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.24799081515499427, |
|
"grad_norm": 27.00351905822754, |
|
"learning_rate": 2.587248741756253e-06, |
|
"loss": 46.1095, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2525832376578645, |
|
"grad_norm": 25.429462432861328, |
|
"learning_rate": 2.5e-06, |
|
"loss": 45.1595, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2571756601607348, |
|
"grad_norm": 27.074621200561523, |
|
"learning_rate": 2.4127512582437486e-06, |
|
"loss": 46.2527, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2617680826636051, |
|
"grad_norm": 28.328506469726562, |
|
"learning_rate": 2.325608815639687e-06, |
|
"loss": 44.0381, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2663605051664753, |
|
"grad_norm": 27.414587020874023, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 44.427, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2709529276693456, |
|
"grad_norm": 27.592138290405273, |
|
"learning_rate": 2.1520672475998374e-06, |
|
"loss": 45.2812, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2755453501722158, |
|
"grad_norm": 26.86080551147461, |
|
"learning_rate": 2.0658795558326745e-06, |
|
"loss": 44.7676, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2801377726750861, |
|
"grad_norm": 27.480749130249023, |
|
"learning_rate": 1.9802207729556023e-06, |
|
"loss": 44.6694, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2847301951779564, |
|
"grad_norm": 27.362363815307617, |
|
"learning_rate": 1.895195261000831e-06, |
|
"loss": 44.9002, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2893226176808266, |
|
"grad_norm": 28.51040267944336, |
|
"learning_rate": 1.8109066104575023e-06, |
|
"loss": 45.6995, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2939150401836969, |
|
"grad_norm": 29.854806900024414, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 46.2855, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.29850746268656714, |
|
"grad_norm": 26.63895606994629, |
|
"learning_rate": 1.6449496416858285e-06, |
|
"loss": 42.679, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3030998851894374, |
|
"grad_norm": 26.914358139038086, |
|
"learning_rate": 1.56348351646022e-06, |
|
"loss": 43.8516, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 25.817581176757812, |
|
"learning_rate": 1.4831583923105e-06, |
|
"loss": 43.8709, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.31228473019517794, |
|
"grad_norm": 28.417312622070312, |
|
"learning_rate": 1.4040721330273063e-06, |
|
"loss": 44.2127, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3168771526980482, |
|
"grad_norm": 27.355661392211914, |
|
"learning_rate": 1.3263210930352737e-06, |
|
"loss": 44.3729, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3214695752009185, |
|
"grad_norm": 27.43345832824707, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 43.7944, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.32606199770378874, |
|
"grad_norm": 29.279691696166992, |
|
"learning_rate": 1.1752018394169882e-06, |
|
"loss": 44.0171, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.330654420206659, |
|
"grad_norm": 27.410545349121094, |
|
"learning_rate": 1.1020177413231334e-06, |
|
"loss": 43.095, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.33524684270952926, |
|
"grad_norm": 29.67399024963379, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 45.0487, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.33983926521239954, |
|
"grad_norm": 30.19101333618164, |
|
"learning_rate": 9.608463116858544e-07, |
|
"loss": 44.676, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.34443168771526983, |
|
"grad_norm": 27.907899856567383, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 43.2354, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.34902411021814006, |
|
"grad_norm": 28.882566452026367, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 43.4213, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.35361653272101035, |
|
"grad_norm": 30.828548431396484, |
|
"learning_rate": 7.633540738525066e-07, |
|
"loss": 45.3045, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3582089552238806, |
|
"grad_norm": 29.114967346191406, |
|
"learning_rate": 7.016504991533727e-07, |
|
"loss": 43.4449, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.36280137772675086, |
|
"grad_norm": 29.279699325561523, |
|
"learning_rate": 6.421379363065142e-07, |
|
"loss": 42.7139, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.36739380022962115, |
|
"grad_norm": 29.65679931640625, |
|
"learning_rate": 5.848888922025553e-07, |
|
"loss": 43.0745, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3719862227324914, |
|
"grad_norm": 29.90504264831543, |
|
"learning_rate": 5.299731159831953e-07, |
|
"loss": 44.1824, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.37657864523536166, |
|
"grad_norm": 30.5111083984375, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 44.0602, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3811710677382319, |
|
"grad_norm": 28.713394165039062, |
|
"learning_rate": 4.27406068612396e-07, |
|
"loss": 43.8557, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3857634902411022, |
|
"grad_norm": 28.297386169433594, |
|
"learning_rate": 3.798797596089351e-07, |
|
"loss": 42.6137, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.39035591274397247, |
|
"grad_norm": 28.568580627441406, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 43.4909, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3949483352468427, |
|
"grad_norm": 29.288326263427734, |
|
"learning_rate": 2.9263101785268253e-07, |
|
"loss": 42.8888, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.399540757749713, |
|
"grad_norm": 28.4726619720459, |
|
"learning_rate": 2.53014884252083e-07, |
|
"loss": 43.0369, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.4041331802525832, |
|
"grad_norm": 29.91521644592285, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 41.9991, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4087256027554535, |
|
"grad_norm": 30.32415008544922, |
|
"learning_rate": 1.8204036358303173e-07, |
|
"loss": 42.7265, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.4133180252583238, |
|
"grad_norm": 30.52402114868164, |
|
"learning_rate": 1.507684480352292e-07, |
|
"loss": 43.7341, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.417910447761194, |
|
"grad_norm": 29.194290161132812, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 44.0457, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4225028702640643, |
|
"grad_norm": 30.200559616088867, |
|
"learning_rate": 9.684576015420277e-08, |
|
"loss": 44.1988, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.42709529276693453, |
|
"grad_norm": 30.448226928710938, |
|
"learning_rate": 7.426068431000883e-08, |
|
"loss": 43.5972, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4316877152698048, |
|
"grad_norm": 31.17372703552246, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 45.4866, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.4362801377726751, |
|
"grad_norm": 30.126811981201172, |
|
"learning_rate": 3.798061746947995e-08, |
|
"loss": 43.0588, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.44087256027554533, |
|
"grad_norm": 31.021615982055664, |
|
"learning_rate": 2.4329828146074096e-08, |
|
"loss": 43.662, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4454649827784156, |
|
"grad_norm": 31.53817367553711, |
|
"learning_rate": 1.3695261579316776e-08, |
|
"loss": 45.2449, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4500574052812859, |
|
"grad_norm": 33.527503967285156, |
|
"learning_rate": 6.089874350439507e-09, |
|
"loss": 45.0926, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.45464982778415614, |
|
"grad_norm": 30.129289627075195, |
|
"learning_rate": 1.5229324522605949e-09, |
|
"loss": 44.7906, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4592422502870264, |
|
"grad_norm": 31.717391967773438, |
|
"learning_rate": 0.0, |
|
"loss": 46.6076, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4592422502870264, |
|
"eval_loss": 5.363029956817627, |
|
"eval_runtime": 41.6293, |
|
"eval_samples_per_second": 8.816, |
|
"eval_steps_per_second": 2.21, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.53913198493696e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|