|
{ |
|
"best_metric": 2.1182491779327393, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 1.0081504702194357, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010031347962382446, |
|
"grad_norm": 0.5013210773468018, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 2.0579, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.010031347962382446, |
|
"eval_loss": 2.4447879791259766, |
|
"eval_runtime": 49.2666, |
|
"eval_samples_per_second": 13.64, |
|
"eval_steps_per_second": 1.705, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02006269592476489, |
|
"grad_norm": 0.5867398381233215, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 2.1459, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.030094043887147336, |
|
"grad_norm": 0.6160461902618408, |
|
"learning_rate": 1.5e-06, |
|
"loss": 2.1973, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04012539184952978, |
|
"grad_norm": 0.6387326717376709, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.2502, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.050156739811912224, |
|
"grad_norm": 0.6501869559288025, |
|
"learning_rate": 2.5e-06, |
|
"loss": 2.2475, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06018808777429467, |
|
"grad_norm": 0.6972927451133728, |
|
"learning_rate": 3e-06, |
|
"loss": 2.3085, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07021943573667712, |
|
"grad_norm": 0.7163339853286743, |
|
"learning_rate": 3.5e-06, |
|
"loss": 2.2789, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08025078369905957, |
|
"grad_norm": 0.7239258885383606, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.2794, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.090282131661442, |
|
"grad_norm": 0.7243937253952026, |
|
"learning_rate": 4.5e-06, |
|
"loss": 2.2982, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.10031347962382445, |
|
"grad_norm": 0.7575347423553467, |
|
"learning_rate": 5e-06, |
|
"loss": 2.3438, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1103448275862069, |
|
"grad_norm": 0.7794107794761658, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 2.354, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12037617554858934, |
|
"grad_norm": 0.8005712628364563, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 2.3741, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1304075235109718, |
|
"grad_norm": 0.843438446521759, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 2.3853, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.14043887147335424, |
|
"grad_norm": 0.8797963261604309, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 2.4064, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.15047021943573669, |
|
"grad_norm": 0.9428530931472778, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 2.4542, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.16050156739811913, |
|
"grad_norm": 0.9352163076400757, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 2.4665, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.17053291536050158, |
|
"grad_norm": 1.0022633075714111, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 2.4377, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.180564263322884, |
|
"grad_norm": 1.0534271001815796, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 2.5266, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.19059561128526645, |
|
"grad_norm": 1.0811848640441895, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 2.5807, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.2006269592476489, |
|
"grad_norm": 1.1316111087799072, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 2.5902, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.21065830721003134, |
|
"grad_norm": 1.189561367034912, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 2.6038, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2206896551724138, |
|
"grad_norm": 1.2571277618408203, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 2.7178, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.23072100313479624, |
|
"grad_norm": 1.4015403985977173, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 2.8131, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.24075235109717869, |
|
"grad_norm": 1.7429771423339844, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 3.1713, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2507836990595611, |
|
"grad_norm": 0.6240499019622803, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 2.0797, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2608150470219436, |
|
"grad_norm": 0.7012020349502563, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 2.0826, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.270846394984326, |
|
"grad_norm": 0.7514374256134033, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 2.1961, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2808777429467085, |
|
"grad_norm": 0.7977500557899475, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 2.153, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2909090909090909, |
|
"grad_norm": 0.7912695407867432, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 2.176, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.30094043887147337, |
|
"grad_norm": 0.8330540060997009, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 2.1654, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3109717868338558, |
|
"grad_norm": 0.8662275075912476, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 2.1926, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.32100313479623827, |
|
"grad_norm": 0.8899848461151123, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 2.197, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3310344827586207, |
|
"grad_norm": 0.963080883026123, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 2.3028, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.34106583072100316, |
|
"grad_norm": 0.9748149514198303, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 2.2829, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3510971786833856, |
|
"grad_norm": 0.9774594306945801, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 2.272, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.361128526645768, |
|
"grad_norm": 0.9856119155883789, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 2.2706, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3711598746081505, |
|
"grad_norm": 1.0438799858093262, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 2.3098, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3811912225705329, |
|
"grad_norm": 1.0557576417922974, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 2.236, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.39122257053291537, |
|
"grad_norm": 1.1337238550186157, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 2.3059, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.4012539184952978, |
|
"grad_norm": 1.1216912269592285, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 2.3342, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.41128526645768027, |
|
"grad_norm": 1.1198524236679077, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 2.32, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.4213166144200627, |
|
"grad_norm": 1.182173252105713, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 2.4223, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.43134796238244516, |
|
"grad_norm": 1.2724802494049072, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 2.4149, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4413793103448276, |
|
"grad_norm": 1.2972792387008667, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 2.4336, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.45141065830721006, |
|
"grad_norm": 1.3523412942886353, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 2.4776, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4614420062695925, |
|
"grad_norm": 1.4430625438690186, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 2.5075, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4714733542319749, |
|
"grad_norm": 1.5807729959487915, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 2.6176, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.48150470219435737, |
|
"grad_norm": 1.8612488508224487, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 2.9744, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4915360501567398, |
|
"grad_norm": 0.6271159648895264, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 1.9217, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.5015673981191222, |
|
"grad_norm": 0.7170687317848206, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 2.0208, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5015673981191222, |
|
"eval_loss": 2.2289443016052246, |
|
"eval_runtime": 49.8574, |
|
"eval_samples_per_second": 13.478, |
|
"eval_steps_per_second": 1.685, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5115987460815047, |
|
"grad_norm": 0.7622096538543701, |
|
"learning_rate": 2.847932752400164e-06, |
|
"loss": 2.0219, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5216300940438872, |
|
"grad_norm": 0.7824783325195312, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 2.0423, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5316614420062696, |
|
"grad_norm": 0.8110308647155762, |
|
"learning_rate": 2.6743911843603134e-06, |
|
"loss": 2.0342, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.541692789968652, |
|
"grad_norm": 0.8110449314117432, |
|
"learning_rate": 2.587248741756253e-06, |
|
"loss": 2.0627, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5517241379310345, |
|
"grad_norm": 0.8489147424697876, |
|
"learning_rate": 2.5e-06, |
|
"loss": 2.0647, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.561755485893417, |
|
"grad_norm": 0.835385799407959, |
|
"learning_rate": 2.4127512582437486e-06, |
|
"loss": 2.0835, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5717868338557994, |
|
"grad_norm": 0.8666210770606995, |
|
"learning_rate": 2.325608815639687e-06, |
|
"loss": 2.0915, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5818181818181818, |
|
"grad_norm": 0.893517255783081, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 2.122, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5918495297805643, |
|
"grad_norm": 0.9489184617996216, |
|
"learning_rate": 2.1520672475998374e-06, |
|
"loss": 2.1537, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6018808777429467, |
|
"grad_norm": 0.9988653659820557, |
|
"learning_rate": 2.0658795558326745e-06, |
|
"loss": 2.1574, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6119122257053291, |
|
"grad_norm": 0.9343753457069397, |
|
"learning_rate": 1.9802207729556023e-06, |
|
"loss": 2.1283, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.6219435736677116, |
|
"grad_norm": 0.9588767290115356, |
|
"learning_rate": 1.895195261000831e-06, |
|
"loss": 2.0984, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.631974921630094, |
|
"grad_norm": 1.0283074378967285, |
|
"learning_rate": 1.8109066104575023e-06, |
|
"loss": 2.1398, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6420062695924765, |
|
"grad_norm": 1.0181758403778076, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 2.1759, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6520376175548589, |
|
"grad_norm": 1.0908583402633667, |
|
"learning_rate": 1.6449496416858285e-06, |
|
"loss": 2.2327, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6620689655172414, |
|
"grad_norm": 1.1794666051864624, |
|
"learning_rate": 1.56348351646022e-06, |
|
"loss": 2.2237, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6721003134796238, |
|
"grad_norm": 1.2034351825714111, |
|
"learning_rate": 1.4831583923105e-06, |
|
"loss": 2.2669, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6821316614420063, |
|
"grad_norm": 1.1914037466049194, |
|
"learning_rate": 1.4040721330273063e-06, |
|
"loss": 2.3433, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6921630094043887, |
|
"grad_norm": 1.2532089948654175, |
|
"learning_rate": 1.3263210930352737e-06, |
|
"loss": 2.2484, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7021943573667712, |
|
"grad_norm": 1.376366376876831, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 2.3746, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.7122257053291536, |
|
"grad_norm": 1.4628711938858032, |
|
"learning_rate": 1.1752018394169882e-06, |
|
"loss": 2.4658, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.722257053291536, |
|
"grad_norm": 1.7385092973709106, |
|
"learning_rate": 1.1020177413231334e-06, |
|
"loss": 2.7303, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7322884012539185, |
|
"grad_norm": 0.5511379837989807, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 1.9222, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.742319749216301, |
|
"grad_norm": 0.6141335964202881, |
|
"learning_rate": 9.608463116858544e-07, |
|
"loss": 1.9357, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.7523510971786834, |
|
"grad_norm": 0.6731216311454773, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 1.9706, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7623824451410658, |
|
"grad_norm": 0.6861745119094849, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 1.9597, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7724137931034483, |
|
"grad_norm": 0.7158825397491455, |
|
"learning_rate": 7.633540738525066e-07, |
|
"loss": 1.9872, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7824451410658307, |
|
"grad_norm": 0.7349813580513, |
|
"learning_rate": 7.016504991533727e-07, |
|
"loss": 1.971, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7924764890282132, |
|
"grad_norm": 0.7533213496208191, |
|
"learning_rate": 6.421379363065142e-07, |
|
"loss": 1.987, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8025078369905956, |
|
"grad_norm": 0.8110781908035278, |
|
"learning_rate": 5.848888922025553e-07, |
|
"loss": 2.0393, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.812539184952978, |
|
"grad_norm": 0.8404735922813416, |
|
"learning_rate": 5.299731159831953e-07, |
|
"loss": 2.0574, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.8225705329153605, |
|
"grad_norm": 0.8332526087760925, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 2.0136, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.8326018808777429, |
|
"grad_norm": 0.8636597394943237, |
|
"learning_rate": 4.27406068612396e-07, |
|
"loss": 2.0972, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.8426332288401254, |
|
"grad_norm": 0.9107650518417358, |
|
"learning_rate": 3.798797596089351e-07, |
|
"loss": 2.0861, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.8526645768025078, |
|
"grad_norm": 0.9134276509284973, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 2.1062, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.8626959247648903, |
|
"grad_norm": 0.9543719291687012, |
|
"learning_rate": 2.9263101785268253e-07, |
|
"loss": 2.1049, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8727272727272727, |
|
"grad_norm": 0.9964223504066467, |
|
"learning_rate": 2.53014884252083e-07, |
|
"loss": 2.1389, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8827586206896552, |
|
"grad_norm": 0.9783273935317993, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 2.1203, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.8927899686520376, |
|
"grad_norm": 1.0522716045379639, |
|
"learning_rate": 1.8204036358303173e-07, |
|
"loss": 2.1538, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.9028213166144201, |
|
"grad_norm": 1.0797299146652222, |
|
"learning_rate": 1.507684480352292e-07, |
|
"loss": 2.1707, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.9128526645768025, |
|
"grad_norm": 1.0894622802734375, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 2.1841, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.922884012539185, |
|
"grad_norm": 1.1516773700714111, |
|
"learning_rate": 9.684576015420277e-08, |
|
"loss": 2.2291, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.9329153605015674, |
|
"grad_norm": 1.241176962852478, |
|
"learning_rate": 7.426068431000883e-08, |
|
"loss": 2.2777, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.9429467084639498, |
|
"grad_norm": 1.3007570505142212, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 2.33, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.9529780564263323, |
|
"grad_norm": 1.4791982173919678, |
|
"learning_rate": 3.798061746947995e-08, |
|
"loss": 2.4737, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.9630094043887147, |
|
"grad_norm": 1.7149653434753418, |
|
"learning_rate": 2.4329828146074096e-08, |
|
"loss": 2.7394, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.9730407523510972, |
|
"grad_norm": 0.6517561078071594, |
|
"learning_rate": 1.3695261579316776e-08, |
|
"loss": 1.9604, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.9830721003134796, |
|
"grad_norm": 0.7938938736915588, |
|
"learning_rate": 6.089874350439507e-09, |
|
"loss": 2.0268, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.993103448275862, |
|
"grad_norm": 1.0271896123886108, |
|
"learning_rate": 1.5229324522605949e-09, |
|
"loss": 2.1866, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.0081504702194357, |
|
"grad_norm": 1.5798444747924805, |
|
"learning_rate": 0.0, |
|
"loss": 3.9263, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.0081504702194357, |
|
"eval_loss": 2.1182491779327393, |
|
"eval_runtime": 49.8296, |
|
"eval_samples_per_second": 13.486, |
|
"eval_steps_per_second": 1.686, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.305280287997952e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|