|
{ |
|
"best_metric": 1.954980492591858, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.01631787214947171, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0001631787214947171, |
|
"grad_norm": 0.9626743197441101, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 2.3231, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0001631787214947171, |
|
"eval_loss": 2.1344542503356934, |
|
"eval_runtime": 976.0493, |
|
"eval_samples_per_second": 10.575, |
|
"eval_steps_per_second": 2.644, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0003263574429894342, |
|
"grad_norm": 0.8612233996391296, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 2.1608, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0004895361644841513, |
|
"grad_norm": 0.9304367899894714, |
|
"learning_rate": 1.5e-06, |
|
"loss": 2.2746, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0006527148859788684, |
|
"grad_norm": 0.8620933294296265, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.1838, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0008158936074735855, |
|
"grad_norm": 0.8950667977333069, |
|
"learning_rate": 2.5e-06, |
|
"loss": 2.1265, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0009790723289683026, |
|
"grad_norm": 0.8841383457183838, |
|
"learning_rate": 3e-06, |
|
"loss": 2.2276, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0011422510504630196, |
|
"grad_norm": 0.8289633989334106, |
|
"learning_rate": 3.5e-06, |
|
"loss": 2.2067, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0013054297719577368, |
|
"grad_norm": 0.8701562881469727, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.1737, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0014686084934524537, |
|
"grad_norm": 0.836694598197937, |
|
"learning_rate": 4.5e-06, |
|
"loss": 2.1575, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.001631787214947171, |
|
"grad_norm": 0.8217163681983948, |
|
"learning_rate": 5e-06, |
|
"loss": 2.1041, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.001794965936441888, |
|
"grad_norm": 0.8088410496711731, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 2.1987, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.001958144657936605, |
|
"grad_norm": 0.8314682245254517, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 2.0736, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0021213233794313223, |
|
"grad_norm": 0.8567140698432922, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 2.1567, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.002284502100926039, |
|
"grad_norm": 0.8521527051925659, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 2.0756, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0024476808224207563, |
|
"grad_norm": 0.890558123588562, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 2.0736, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0026108595439154735, |
|
"grad_norm": 0.8253862857818604, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 2.0023, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0027740382654101907, |
|
"grad_norm": 0.8322637677192688, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 2.0468, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0029372169869049075, |
|
"grad_norm": 0.8288842439651489, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 1.9811, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0031003957083996247, |
|
"grad_norm": 0.9009273648262024, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 2.1364, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.003263574429894342, |
|
"grad_norm": 0.8775635957717896, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 2.0339, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0034267531513890587, |
|
"grad_norm": 0.9163408279418945, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 2.0333, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.003589931872883776, |
|
"grad_norm": 0.9037641286849976, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 2.0447, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.003753110594378493, |
|
"grad_norm": 0.9167019724845886, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 1.9999, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.00391628931587321, |
|
"grad_norm": 0.8852622509002686, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 2.0036, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0040794680373679275, |
|
"grad_norm": 0.890647292137146, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 2.0591, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.004242646758862645, |
|
"grad_norm": 0.8380885124206543, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 2.0312, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.004405825480357361, |
|
"grad_norm": 0.9328538179397583, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 2.0167, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.004569004201852078, |
|
"grad_norm": 0.8684221506118774, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 2.0018, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.004732182923346795, |
|
"grad_norm": 0.9270681738853455, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 2.0179, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.004895361644841513, |
|
"grad_norm": 0.9681967496871948, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 2.1236, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.00505854036633623, |
|
"grad_norm": 0.9239471554756165, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 1.9945, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.005221719087830947, |
|
"grad_norm": 0.9569579362869263, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 1.9907, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.005384897809325664, |
|
"grad_norm": 0.9254785776138306, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 2.0334, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0055480765308203814, |
|
"grad_norm": 0.8775327205657959, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 2.049, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.005711255252315098, |
|
"grad_norm": 0.9445580840110779, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 1.9781, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.005874433973809815, |
|
"grad_norm": 0.8967560529708862, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 2.017, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.006037612695304532, |
|
"grad_norm": 0.9454420208930969, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 1.9148, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.006200791416799249, |
|
"grad_norm": 1.0066184997558594, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 2.0719, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.006363970138293967, |
|
"grad_norm": 0.9081863164901733, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 1.9958, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.006527148859788684, |
|
"grad_norm": 0.9053497314453125, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 2.0227, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.006690327581283401, |
|
"grad_norm": 0.9111195206642151, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 1.9154, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.006853506302778117, |
|
"grad_norm": 0.8714330792427063, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 2.0181, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0070166850242728345, |
|
"grad_norm": 0.9528706073760986, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 2.1378, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.007179863745767552, |
|
"grad_norm": 0.9247714877128601, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 2.0865, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.007343042467262269, |
|
"grad_norm": 1.0060503482818604, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 2.1272, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.007506221188756986, |
|
"grad_norm": 1.0275453329086304, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 2.0768, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.007669399910251703, |
|
"grad_norm": 1.0227175951004028, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 2.1448, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.00783257863174642, |
|
"grad_norm": 1.136629343032837, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 2.2079, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.007995757353241137, |
|
"grad_norm": 1.1337389945983887, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 2.1598, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.008158936074735855, |
|
"grad_norm": 1.3956023454666138, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 2.1415, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.008158936074735855, |
|
"eval_loss": 1.986087441444397, |
|
"eval_runtime": 980.5339, |
|
"eval_samples_per_second": 10.527, |
|
"eval_steps_per_second": 2.632, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.008322114796230571, |
|
"grad_norm": 0.6517155170440674, |
|
"learning_rate": 2.847932752400164e-06, |
|
"loss": 2.1295, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.00848529351772529, |
|
"grad_norm": 0.6166985034942627, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 2.0326, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.008648472239220006, |
|
"grad_norm": 0.6049599647521973, |
|
"learning_rate": 2.6743911843603134e-06, |
|
"loss": 2.069, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.008811650960714722, |
|
"grad_norm": 0.5539830923080444, |
|
"learning_rate": 2.587248741756253e-06, |
|
"loss": 2.0095, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.00897482968220944, |
|
"grad_norm": 0.5926068425178528, |
|
"learning_rate": 2.5e-06, |
|
"loss": 2.1317, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.009138008403704156, |
|
"grad_norm": 0.6043214797973633, |
|
"learning_rate": 2.4127512582437486e-06, |
|
"loss": 2.0575, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.009301187125198875, |
|
"grad_norm": 0.5795297622680664, |
|
"learning_rate": 2.325608815639687e-06, |
|
"loss": 2.0149, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.00946436584669359, |
|
"grad_norm": 0.5584662556648254, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 2.0126, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.009627544568188309, |
|
"grad_norm": 0.6003333330154419, |
|
"learning_rate": 2.1520672475998374e-06, |
|
"loss": 2.0313, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.009790723289683025, |
|
"grad_norm": 0.5960946679115295, |
|
"learning_rate": 2.0658795558326745e-06, |
|
"loss": 1.9306, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.009953902011177742, |
|
"grad_norm": 0.5591971278190613, |
|
"learning_rate": 1.9802207729556023e-06, |
|
"loss": 2.0074, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.01011708073267246, |
|
"grad_norm": 0.5890583395957947, |
|
"learning_rate": 1.895195261000831e-06, |
|
"loss": 1.9372, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.010280259454167176, |
|
"grad_norm": 0.5822552442550659, |
|
"learning_rate": 1.8109066104575023e-06, |
|
"loss": 1.9925, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.010443438175661894, |
|
"grad_norm": 0.5664207339286804, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 1.8616, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.01060661689715661, |
|
"grad_norm": 0.5835029482841492, |
|
"learning_rate": 1.6449496416858285e-06, |
|
"loss": 1.9502, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.010769795618651328, |
|
"grad_norm": 0.5808869004249573, |
|
"learning_rate": 1.56348351646022e-06, |
|
"loss": 1.9177, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.010932974340146045, |
|
"grad_norm": 0.5943641662597656, |
|
"learning_rate": 1.4831583923105e-06, |
|
"loss": 1.8628, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.011096153061640763, |
|
"grad_norm": 0.6218149662017822, |
|
"learning_rate": 1.4040721330273063e-06, |
|
"loss": 1.8908, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.01125933178313548, |
|
"grad_norm": 0.5880563855171204, |
|
"learning_rate": 1.3263210930352737e-06, |
|
"loss": 1.9614, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.011422510504630196, |
|
"grad_norm": 0.5864894986152649, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 1.8818, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.011585689226124914, |
|
"grad_norm": 0.5821499824523926, |
|
"learning_rate": 1.1752018394169882e-06, |
|
"loss": 1.9263, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.01174886794761963, |
|
"grad_norm": 0.5684424638748169, |
|
"learning_rate": 1.1020177413231334e-06, |
|
"loss": 1.9137, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.011912046669114348, |
|
"grad_norm": 0.5831771492958069, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 1.8792, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.012075225390609064, |
|
"grad_norm": 0.6676979660987854, |
|
"learning_rate": 9.608463116858544e-07, |
|
"loss": 1.8534, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.012238404112103782, |
|
"grad_norm": 0.614493727684021, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 1.9241, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.012401582833598499, |
|
"grad_norm": 0.6089916229248047, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 1.9294, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.012564761555093215, |
|
"grad_norm": 0.6394822597503662, |
|
"learning_rate": 7.633540738525066e-07, |
|
"loss": 1.9013, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.012727940276587933, |
|
"grad_norm": 0.6592298150062561, |
|
"learning_rate": 7.016504991533727e-07, |
|
"loss": 1.9368, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.01289111899808265, |
|
"grad_norm": 0.6934499144554138, |
|
"learning_rate": 6.421379363065142e-07, |
|
"loss": 1.9195, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.013054297719577368, |
|
"grad_norm": 0.7235682606697083, |
|
"learning_rate": 5.848888922025553e-07, |
|
"loss": 1.9364, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.013217476441072084, |
|
"grad_norm": 0.6777318716049194, |
|
"learning_rate": 5.299731159831953e-07, |
|
"loss": 1.9084, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.013380655162566802, |
|
"grad_norm": 0.6706295609474182, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 1.8473, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.013543833884061518, |
|
"grad_norm": 0.7311359643936157, |
|
"learning_rate": 4.27406068612396e-07, |
|
"loss": 1.8132, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.013707012605556235, |
|
"grad_norm": 0.6486523747444153, |
|
"learning_rate": 3.798797596089351e-07, |
|
"loss": 1.8764, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.013870191327050953, |
|
"grad_norm": 0.6983171105384827, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 1.8729, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.014033370048545669, |
|
"grad_norm": 0.6943270564079285, |
|
"learning_rate": 2.9263101785268253e-07, |
|
"loss": 1.7492, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.014196548770040387, |
|
"grad_norm": 0.6928853988647461, |
|
"learning_rate": 2.53014884252083e-07, |
|
"loss": 1.8753, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.014359727491535103, |
|
"grad_norm": 0.7341625094413757, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 1.828, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.014522906213029822, |
|
"grad_norm": 0.7738997340202332, |
|
"learning_rate": 1.8204036358303173e-07, |
|
"loss": 1.9893, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.014686084934524538, |
|
"grad_norm": 0.7556887269020081, |
|
"learning_rate": 1.507684480352292e-07, |
|
"loss": 1.9602, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.014849263656019254, |
|
"grad_norm": 0.7297267913818359, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 1.9517, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.015012442377513972, |
|
"grad_norm": 0.8259098529815674, |
|
"learning_rate": 9.684576015420277e-08, |
|
"loss": 2.0288, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.015175621099008689, |
|
"grad_norm": 0.8356319665908813, |
|
"learning_rate": 7.426068431000883e-08, |
|
"loss": 2.0425, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.015338799820503407, |
|
"grad_norm": 0.8901850581169128, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 1.9574, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.015501978541998123, |
|
"grad_norm": 0.8994855284690857, |
|
"learning_rate": 3.798061746947995e-08, |
|
"loss": 1.9856, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.01566515726349284, |
|
"grad_norm": 0.9636688828468323, |
|
"learning_rate": 2.4329828146074096e-08, |
|
"loss": 2.1306, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.01582833598498756, |
|
"grad_norm": 1.0676484107971191, |
|
"learning_rate": 1.3695261579316776e-08, |
|
"loss": 2.0383, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.015991514706482274, |
|
"grad_norm": 0.999144971370697, |
|
"learning_rate": 6.089874350439507e-09, |
|
"loss": 2.2039, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.016154693427976992, |
|
"grad_norm": 1.143728494644165, |
|
"learning_rate": 1.5229324522605949e-09, |
|
"loss": 2.0779, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.01631787214947171, |
|
"grad_norm": 1.522695779800415, |
|
"learning_rate": 0.0, |
|
"loss": 2.1333, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01631787214947171, |
|
"eval_loss": 1.954980492591858, |
|
"eval_runtime": 980.7518, |
|
"eval_samples_per_second": 10.525, |
|
"eval_steps_per_second": 2.632, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.7378709617403494e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|