|
{ |
|
"best_metric": 0.5063894391059875, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.07582938388625593, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0003791469194312796, |
|
"grad_norm": 2.3607397079467773, |
|
"learning_rate": 6e-06, |
|
"loss": 2.3674, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0003791469194312796, |
|
"eval_loss": 1.6981829404830933, |
|
"eval_runtime": 342.6691, |
|
"eval_samples_per_second": 12.963, |
|
"eval_steps_per_second": 3.242, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0007582938388625592, |
|
"grad_norm": 2.2280433177948, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.7743, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.001137440758293839, |
|
"grad_norm": 2.1979401111602783, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.7584, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0015165876777251184, |
|
"grad_norm": 2.395792245864868, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.7522, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0018957345971563982, |
|
"grad_norm": 2.4676456451416016, |
|
"learning_rate": 3e-05, |
|
"loss": 1.6792, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.002274881516587678, |
|
"grad_norm": 1.9544316530227661, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.5587, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.002654028436018957, |
|
"grad_norm": 1.9189141988754272, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.5178, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.003033175355450237, |
|
"grad_norm": 1.8870714902877808, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.4159, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0034123222748815166, |
|
"grad_norm": 2.4351065158843994, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.297, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0037914691943127963, |
|
"grad_norm": 2.2076759338378906, |
|
"learning_rate": 6e-05, |
|
"loss": 1.1759, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.004170616113744076, |
|
"grad_norm": 1.7516471147537231, |
|
"learning_rate": 5.999589914977407e-05, |
|
"loss": 1.1303, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.004549763033175356, |
|
"grad_norm": 1.5598973035812378, |
|
"learning_rate": 5.998359772022778e-05, |
|
"loss": 0.9447, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0049289099526066355, |
|
"grad_norm": 1.4637537002563477, |
|
"learning_rate": 5.996309907444915e-05, |
|
"loss": 0.9049, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.005308056872037914, |
|
"grad_norm": 1.9019067287445068, |
|
"learning_rate": 5.9934408816563236e-05, |
|
"loss": 0.8778, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.005687203791469194, |
|
"grad_norm": 1.5424095392227173, |
|
"learning_rate": 5.98975347902001e-05, |
|
"loss": 0.8591, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.006066350710900474, |
|
"grad_norm": 1.3776594400405884, |
|
"learning_rate": 5.9852487076350345e-05, |
|
"loss": 0.7328, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0064454976303317535, |
|
"grad_norm": 1.3162641525268555, |
|
"learning_rate": 5.979927799060915e-05, |
|
"loss": 0.7089, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.006824644549763033, |
|
"grad_norm": 1.2267054319381714, |
|
"learning_rate": 5.9737922079809257e-05, |
|
"loss": 0.7679, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.007203791469194313, |
|
"grad_norm": 1.1532204151153564, |
|
"learning_rate": 5.9668436118044054e-05, |
|
"loss": 0.7228, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.007582938388625593, |
|
"grad_norm": 1.3049921989440918, |
|
"learning_rate": 5.959083910208167e-05, |
|
"loss": 0.7155, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.007962085308056872, |
|
"grad_norm": 1.3345766067504883, |
|
"learning_rate": 5.9505152246171474e-05, |
|
"loss": 0.7048, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.008341232227488152, |
|
"grad_norm": 1.353661060333252, |
|
"learning_rate": 5.941139897624428e-05, |
|
"loss": 0.77, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.008720379146919432, |
|
"grad_norm": 1.2893695831298828, |
|
"learning_rate": 5.9309604923507984e-05, |
|
"loss": 0.7045, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.009099526066350712, |
|
"grad_norm": 1.139123558998108, |
|
"learning_rate": 5.9199797917440176e-05, |
|
"loss": 0.6403, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.009478672985781991, |
|
"grad_norm": 1.1754416227340698, |
|
"learning_rate": 5.908200797817991e-05, |
|
"loss": 0.6316, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.009857819905213271, |
|
"grad_norm": 1.3844693899154663, |
|
"learning_rate": 5.895626730832046e-05, |
|
"loss": 0.6302, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.010236966824644549, |
|
"grad_norm": 1.0756479501724243, |
|
"learning_rate": 5.882261028410545e-05, |
|
"loss": 0.5767, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.010616113744075829, |
|
"grad_norm": 1.1990379095077515, |
|
"learning_rate": 5.8681073446030734e-05, |
|
"loss": 0.5942, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.010995260663507108, |
|
"grad_norm": 1.2510521411895752, |
|
"learning_rate": 5.853169548885461e-05, |
|
"loss": 0.6293, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.011374407582938388, |
|
"grad_norm": 1.0987434387207031, |
|
"learning_rate": 5.8374517251019035e-05, |
|
"loss": 0.5444, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.011753554502369668, |
|
"grad_norm": 0.9962146878242493, |
|
"learning_rate": 5.820958170348484e-05, |
|
"loss": 0.5284, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.012132701421800948, |
|
"grad_norm": 1.1842890977859497, |
|
"learning_rate": 5.8036933937983825e-05, |
|
"loss": 0.5764, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.012511848341232227, |
|
"grad_norm": 1.1667747497558594, |
|
"learning_rate": 5.7856621154691217e-05, |
|
"loss": 0.6086, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.012890995260663507, |
|
"grad_norm": 1.1322537660598755, |
|
"learning_rate": 5.766869264932154e-05, |
|
"loss": 0.5379, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.013270142180094787, |
|
"grad_norm": 1.07042396068573, |
|
"learning_rate": 5.747319979965172e-05, |
|
"loss": 0.5195, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.013649289099526066, |
|
"grad_norm": 1.023470163345337, |
|
"learning_rate": 5.727019605147488e-05, |
|
"loss": 0.543, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.014028436018957346, |
|
"grad_norm": 1.1579279899597168, |
|
"learning_rate": 5.7059736903988775e-05, |
|
"loss": 0.5042, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.014407582938388626, |
|
"grad_norm": 1.0734813213348389, |
|
"learning_rate": 5.684187989462291e-05, |
|
"loss": 0.5276, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.014786729857819906, |
|
"grad_norm": 1.0260837078094482, |
|
"learning_rate": 5.661668458330836e-05, |
|
"loss": 0.4359, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.015165876777251185, |
|
"grad_norm": 0.9480069279670715, |
|
"learning_rate": 5.638421253619467e-05, |
|
"loss": 0.437, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.015545023696682465, |
|
"grad_norm": 1.0021203756332397, |
|
"learning_rate": 5.614452730881832e-05, |
|
"loss": 0.487, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.015924170616113745, |
|
"grad_norm": 0.9898553490638733, |
|
"learning_rate": 5.589769442872722e-05, |
|
"loss": 0.4045, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.016303317535545023, |
|
"grad_norm": 1.1742738485336304, |
|
"learning_rate": 5.5643781377566175e-05, |
|
"loss": 0.4673, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.016682464454976304, |
|
"grad_norm": 1.0911322832107544, |
|
"learning_rate": 5.538285757262806e-05, |
|
"loss": 0.4501, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.017061611374407582, |
|
"grad_norm": 1.2655229568481445, |
|
"learning_rate": 5.5114994347875856e-05, |
|
"loss": 0.4084, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.017440758293838864, |
|
"grad_norm": 1.3887434005737305, |
|
"learning_rate": 5.48402649344406e-05, |
|
"loss": 0.3981, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.01781990521327014, |
|
"grad_norm": 1.1302635669708252, |
|
"learning_rate": 5.455874444060078e-05, |
|
"loss": 0.4061, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.018199052132701423, |
|
"grad_norm": 0.8546876907348633, |
|
"learning_rate": 5.427050983124843e-05, |
|
"loss": 0.3365, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0185781990521327, |
|
"grad_norm": 0.8889405727386475, |
|
"learning_rate": 5.397563990684774e-05, |
|
"loss": 0.2457, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.018957345971563982, |
|
"grad_norm": 0.9020243883132935, |
|
"learning_rate": 5.367421528189181e-05, |
|
"loss": 0.2258, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.018957345971563982, |
|
"eval_loss": 0.6159882545471191, |
|
"eval_runtime": 344.9982, |
|
"eval_samples_per_second": 12.875, |
|
"eval_steps_per_second": 3.22, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01933649289099526, |
|
"grad_norm": 2.4133408069610596, |
|
"learning_rate": 5.336631836286338e-05, |
|
"loss": 1.5293, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.019715639810426542, |
|
"grad_norm": 1.7940583229064941, |
|
"learning_rate": 5.3052033325705774e-05, |
|
"loss": 0.8329, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.02009478672985782, |
|
"grad_norm": 1.4519822597503662, |
|
"learning_rate": 5.2731446092810044e-05, |
|
"loss": 0.7793, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.020473933649289098, |
|
"grad_norm": 1.110809326171875, |
|
"learning_rate": 5.240464430952462e-05, |
|
"loss": 0.7678, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.02085308056872038, |
|
"grad_norm": 1.0020638704299927, |
|
"learning_rate": 5.207171732019395e-05, |
|
"loss": 0.7016, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.021232227488151657, |
|
"grad_norm": 1.0127681493759155, |
|
"learning_rate": 5.1732756143732675e-05, |
|
"loss": 0.6799, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.02161137440758294, |
|
"grad_norm": 1.004103422164917, |
|
"learning_rate": 5.1387853448741916e-05, |
|
"loss": 0.7143, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.021990521327014217, |
|
"grad_norm": 1.1212753057479858, |
|
"learning_rate": 5.103710352817465e-05, |
|
"loss": 0.6442, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.022369668246445498, |
|
"grad_norm": 1.13973069190979, |
|
"learning_rate": 5.068060227355698e-05, |
|
"loss": 0.7217, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.022748815165876776, |
|
"grad_norm": 1.1548775434494019, |
|
"learning_rate": 5.0318447148772234e-05, |
|
"loss": 0.708, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.023127962085308058, |
|
"grad_norm": 1.039237380027771, |
|
"learning_rate": 4.995073716341545e-05, |
|
"loss": 0.6506, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.023507109004739336, |
|
"grad_norm": 1.0963629484176636, |
|
"learning_rate": 4.957757284572506e-05, |
|
"loss": 0.6685, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.023886255924170617, |
|
"grad_norm": 1.0616074800491333, |
|
"learning_rate": 4.91990562150995e-05, |
|
"loss": 0.6461, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.024265402843601895, |
|
"grad_norm": 1.0263690948486328, |
|
"learning_rate": 4.881529075420611e-05, |
|
"loss": 0.5949, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.024644549763033177, |
|
"grad_norm": 1.1550185680389404, |
|
"learning_rate": 4.8426381380690036e-05, |
|
"loss": 0.6779, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.025023696682464455, |
|
"grad_norm": 0.9933828115463257, |
|
"learning_rate": 4.8032434418490753e-05, |
|
"loss": 0.6456, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.025402843601895736, |
|
"grad_norm": 1.00188410282135, |
|
"learning_rate": 4.7633557568774194e-05, |
|
"loss": 0.6354, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.025781990521327014, |
|
"grad_norm": 1.0373916625976562, |
|
"learning_rate": 4.722985988048831e-05, |
|
"loss": 0.6082, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.026161137440758295, |
|
"grad_norm": 0.9275899529457092, |
|
"learning_rate": 4.6821451720550184e-05, |
|
"loss": 0.6274, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.026540284360189573, |
|
"grad_norm": 0.9165945649147034, |
|
"learning_rate": 4.640844474367282e-05, |
|
"loss": 0.6005, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02691943127962085, |
|
"grad_norm": 0.9279446601867676, |
|
"learning_rate": 4.5990951861839815e-05, |
|
"loss": 0.5644, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.027298578199052133, |
|
"grad_norm": 0.856342077255249, |
|
"learning_rate": 4.5569087213436455e-05, |
|
"loss": 0.5951, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.02767772511848341, |
|
"grad_norm": 0.95192950963974, |
|
"learning_rate": 4.514296613204532e-05, |
|
"loss": 0.5506, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.028056872037914692, |
|
"grad_norm": 1.0360265970230103, |
|
"learning_rate": 4.471270511491525e-05, |
|
"loss": 0.64, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.02843601895734597, |
|
"grad_norm": 0.9761951565742493, |
|
"learning_rate": 4.427842179111221e-05, |
|
"loss": 0.5912, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02881516587677725, |
|
"grad_norm": 0.8985703587532043, |
|
"learning_rate": 4.3840234889360634e-05, |
|
"loss": 0.5406, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.02919431279620853, |
|
"grad_norm": 0.9850453734397888, |
|
"learning_rate": 4.33982642055842e-05, |
|
"loss": 0.6044, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.02957345971563981, |
|
"grad_norm": 0.9527453184127808, |
|
"learning_rate": 4.2952630570154785e-05, |
|
"loss": 0.5498, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.02995260663507109, |
|
"grad_norm": 0.8898606300354004, |
|
"learning_rate": 4.250345581485871e-05, |
|
"loss": 0.498, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.03033175355450237, |
|
"grad_norm": 1.0251396894454956, |
|
"learning_rate": 4.205086273958909e-05, |
|
"loss": 0.5551, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03071090047393365, |
|
"grad_norm": 0.9887294173240662, |
|
"learning_rate": 4.1594975078773565e-05, |
|
"loss": 0.5348, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.03109004739336493, |
|
"grad_norm": 0.9571895599365234, |
|
"learning_rate": 4.113591746754662e-05, |
|
"loss": 0.5147, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.03146919431279621, |
|
"grad_norm": 0.8705043792724609, |
|
"learning_rate": 4.06738154076755e-05, |
|
"loss": 0.4836, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.03184834123222749, |
|
"grad_norm": 0.9828000068664551, |
|
"learning_rate": 4.020879523324929e-05, |
|
"loss": 0.5008, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.03222748815165877, |
|
"grad_norm": 0.9241380095481873, |
|
"learning_rate": 3.974098407614051e-05, |
|
"loss": 0.4675, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.032606635071090045, |
|
"grad_norm": 0.9122922420501709, |
|
"learning_rate": 3.927050983124842e-05, |
|
"loss": 0.5142, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.03298578199052133, |
|
"grad_norm": 0.8403096795082092, |
|
"learning_rate": 3.8797501121533946e-05, |
|
"loss": 0.4439, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.03336492890995261, |
|
"grad_norm": 0.910892128944397, |
|
"learning_rate": 3.832208726285534e-05, |
|
"loss": 0.4519, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.033744075829383886, |
|
"grad_norm": 0.9453192353248596, |
|
"learning_rate": 3.784439822861459e-05, |
|
"loss": 0.4743, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.034123222748815164, |
|
"grad_norm": 0.9362481832504272, |
|
"learning_rate": 3.7364564614223976e-05, |
|
"loss": 0.4588, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03450236966824644, |
|
"grad_norm": 0.8357151746749878, |
|
"learning_rate": 3.688271760140255e-05, |
|
"loss": 0.3989, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.03488151658767773, |
|
"grad_norm": 0.9535480737686157, |
|
"learning_rate": 3.6398988922312406e-05, |
|
"loss": 0.4292, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.035260663507109005, |
|
"grad_norm": 0.8818041086196899, |
|
"learning_rate": 3.591351082354441e-05, |
|
"loss": 0.376, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.03563981042654028, |
|
"grad_norm": 0.9949309229850769, |
|
"learning_rate": 3.54264160299633e-05, |
|
"loss": 0.4139, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.03601895734597156, |
|
"grad_norm": 0.8851547837257385, |
|
"learning_rate": 3.493783770842202e-05, |
|
"loss": 0.3293, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.036398104265402846, |
|
"grad_norm": 0.8010683059692383, |
|
"learning_rate": 3.444790943135526e-05, |
|
"loss": 0.3311, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.036777251184834124, |
|
"grad_norm": 0.7428768873214722, |
|
"learning_rate": 3.3956765140262074e-05, |
|
"loss": 0.2844, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.0371563981042654, |
|
"grad_norm": 0.9548898339271545, |
|
"learning_rate": 3.346453910908759e-05, |
|
"loss": 0.376, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.03753554502369668, |
|
"grad_norm": 1.0641423463821411, |
|
"learning_rate": 3.297136590751389e-05, |
|
"loss": 0.3601, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.037914691943127965, |
|
"grad_norm": 0.7730882167816162, |
|
"learning_rate": 3.247738036416998e-05, |
|
"loss": 0.2464, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.037914691943127965, |
|
"eval_loss": 0.5508739948272705, |
|
"eval_runtime": 345.5738, |
|
"eval_samples_per_second": 12.854, |
|
"eval_steps_per_second": 3.215, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03829383886255924, |
|
"grad_norm": 1.8822741508483887, |
|
"learning_rate": 3.1982717529770985e-05, |
|
"loss": 1.4074, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.03867298578199052, |
|
"grad_norm": 1.272026777267456, |
|
"learning_rate": 3.148751264019667e-05, |
|
"loss": 0.7856, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.0390521327014218, |
|
"grad_norm": 1.161899209022522, |
|
"learning_rate": 3.099190107951924e-05, |
|
"loss": 0.6956, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.039431279620853084, |
|
"grad_norm": 1.0137699842453003, |
|
"learning_rate": 3.049601834299076e-05, |
|
"loss": 0.6313, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.03981042654028436, |
|
"grad_norm": 1.0051027536392212, |
|
"learning_rate": 3e-05, |
|
"loss": 0.6899, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.04018957345971564, |
|
"grad_norm": 0.9395660758018494, |
|
"learning_rate": 2.9503981657009246e-05, |
|
"loss": 0.6345, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.04056872037914692, |
|
"grad_norm": 0.8711168766021729, |
|
"learning_rate": 2.9008098920480752e-05, |
|
"loss": 0.6447, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.040947867298578196, |
|
"grad_norm": 1.1823228597640991, |
|
"learning_rate": 2.851248735980333e-05, |
|
"loss": 0.663, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.04132701421800948, |
|
"grad_norm": 0.9468877911567688, |
|
"learning_rate": 2.801728247022902e-05, |
|
"loss": 0.6448, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.04170616113744076, |
|
"grad_norm": 0.8840280175209045, |
|
"learning_rate": 2.7522619635830034e-05, |
|
"loss": 0.6168, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04208530805687204, |
|
"grad_norm": 0.9660087823867798, |
|
"learning_rate": 2.702863409248612e-05, |
|
"loss": 0.6257, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.042464454976303315, |
|
"grad_norm": 0.9464119076728821, |
|
"learning_rate": 2.6535460890912416e-05, |
|
"loss": 0.6864, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.0428436018957346, |
|
"grad_norm": 1.0040414333343506, |
|
"learning_rate": 2.604323485973793e-05, |
|
"loss": 0.6249, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.04322274881516588, |
|
"grad_norm": 0.9637667536735535, |
|
"learning_rate": 2.555209056864474e-05, |
|
"loss": 0.5605, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.043601895734597156, |
|
"grad_norm": 0.9878002405166626, |
|
"learning_rate": 2.5062162291577978e-05, |
|
"loss": 0.536, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.043981042654028434, |
|
"grad_norm": 1.0215932130813599, |
|
"learning_rate": 2.4573583970036712e-05, |
|
"loss": 0.6054, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.04436018957345972, |
|
"grad_norm": 0.9616793394088745, |
|
"learning_rate": 2.4086489176455595e-05, |
|
"loss": 0.5458, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.044739336492890996, |
|
"grad_norm": 0.9715977311134338, |
|
"learning_rate": 2.36010110776876e-05, |
|
"loss": 0.5747, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.045118483412322274, |
|
"grad_norm": 0.9184060096740723, |
|
"learning_rate": 2.3117282398597456e-05, |
|
"loss": 0.5391, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.04549763033175355, |
|
"grad_norm": 0.9710631370544434, |
|
"learning_rate": 2.263543538577603e-05, |
|
"loss": 0.5382, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04587677725118484, |
|
"grad_norm": 0.8977413773536682, |
|
"learning_rate": 2.215560177138541e-05, |
|
"loss": 0.5466, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.046255924170616115, |
|
"grad_norm": 0.9749202728271484, |
|
"learning_rate": 2.167791273714467e-05, |
|
"loss": 0.5813, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.04663507109004739, |
|
"grad_norm": 1.0063494443893433, |
|
"learning_rate": 2.1202498878466062e-05, |
|
"loss": 0.5904, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.04701421800947867, |
|
"grad_norm": 1.0267138481140137, |
|
"learning_rate": 2.072949016875158e-05, |
|
"loss": 0.5809, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.04739336492890995, |
|
"grad_norm": 0.916971743106842, |
|
"learning_rate": 2.0259015923859498e-05, |
|
"loss": 0.5226, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.047772511848341234, |
|
"grad_norm": 0.9448188543319702, |
|
"learning_rate": 1.979120476675071e-05, |
|
"loss": 0.5503, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.04815165876777251, |
|
"grad_norm": 0.8960833549499512, |
|
"learning_rate": 1.9326184592324503e-05, |
|
"loss": 0.5472, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.04853080568720379, |
|
"grad_norm": 0.9310248494148254, |
|
"learning_rate": 1.8864082532453373e-05, |
|
"loss": 0.554, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.04890995260663507, |
|
"grad_norm": 0.8872618675231934, |
|
"learning_rate": 1.840502492122644e-05, |
|
"loss": 0.5129, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.04928909952606635, |
|
"grad_norm": 0.9954771399497986, |
|
"learning_rate": 1.7949137260410924e-05, |
|
"loss": 0.5348, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04966824644549763, |
|
"grad_norm": 0.8588917255401611, |
|
"learning_rate": 1.7496544185141295e-05, |
|
"loss": 0.4947, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.05004739336492891, |
|
"grad_norm": 0.8972111940383911, |
|
"learning_rate": 1.7047369429845216e-05, |
|
"loss": 0.5559, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.05042654028436019, |
|
"grad_norm": 0.8581748008728027, |
|
"learning_rate": 1.6601735794415806e-05, |
|
"loss": 0.4764, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.05080568720379147, |
|
"grad_norm": 0.8218798041343689, |
|
"learning_rate": 1.615976511063937e-05, |
|
"loss": 0.4472, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.05118483412322275, |
|
"grad_norm": 0.8914713859558105, |
|
"learning_rate": 1.5721578208887793e-05, |
|
"loss": 0.4507, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.05156398104265403, |
|
"grad_norm": 0.891606330871582, |
|
"learning_rate": 1.5287294885084766e-05, |
|
"loss": 0.4769, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.051943127962085306, |
|
"grad_norm": 0.9011698365211487, |
|
"learning_rate": 1.4857033867954697e-05, |
|
"loss": 0.5192, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.05232227488151659, |
|
"grad_norm": 0.8463770151138306, |
|
"learning_rate": 1.4430912786563554e-05, |
|
"loss": 0.4164, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.05270142180094787, |
|
"grad_norm": 0.9531325101852417, |
|
"learning_rate": 1.4009048138160195e-05, |
|
"loss": 0.4199, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.05308056872037915, |
|
"grad_norm": 0.8462571501731873, |
|
"learning_rate": 1.3591555256327199e-05, |
|
"loss": 0.4274, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.053459715639810425, |
|
"grad_norm": 0.862342894077301, |
|
"learning_rate": 1.3178548279449822e-05, |
|
"loss": 0.3976, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.0538388625592417, |
|
"grad_norm": 0.7988135814666748, |
|
"learning_rate": 1.2770140119511693e-05, |
|
"loss": 0.3224, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.05421800947867299, |
|
"grad_norm": 0.8891763091087341, |
|
"learning_rate": 1.2366442431225809e-05, |
|
"loss": 0.4034, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.054597156398104266, |
|
"grad_norm": 0.9104804396629333, |
|
"learning_rate": 1.1967565581509248e-05, |
|
"loss": 0.3469, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.054976303317535544, |
|
"grad_norm": 0.8460737466812134, |
|
"learning_rate": 1.1573618619309965e-05, |
|
"loss": 0.323, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.05535545023696682, |
|
"grad_norm": 0.7780117988586426, |
|
"learning_rate": 1.1184709245793889e-05, |
|
"loss": 0.2783, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.05573459715639811, |
|
"grad_norm": 0.8993276357650757, |
|
"learning_rate": 1.0800943784900502e-05, |
|
"loss": 0.3748, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.056113744075829385, |
|
"grad_norm": 0.8467982411384583, |
|
"learning_rate": 1.042242715427494e-05, |
|
"loss": 0.2837, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.05649289099526066, |
|
"grad_norm": 0.8935648202896118, |
|
"learning_rate": 1.004926283658455e-05, |
|
"loss": 0.3136, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.05687203791469194, |
|
"grad_norm": 0.9242781400680542, |
|
"learning_rate": 9.681552851227774e-06, |
|
"loss": 0.2324, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05687203791469194, |
|
"eval_loss": 0.5187870860099792, |
|
"eval_runtime": 345.6495, |
|
"eval_samples_per_second": 12.851, |
|
"eval_steps_per_second": 3.214, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.057251184834123225, |
|
"grad_norm": 1.3798567056655884, |
|
"learning_rate": 9.319397726443026e-06, |
|
"loss": 1.0638, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.0576303317535545, |
|
"grad_norm": 1.1334023475646973, |
|
"learning_rate": 8.962896471825342e-06, |
|
"loss": 0.6878, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.05800947867298578, |
|
"grad_norm": 1.0488735437393188, |
|
"learning_rate": 8.61214655125809e-06, |
|
"loss": 0.7195, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.05838862559241706, |
|
"grad_norm": 1.0702718496322632, |
|
"learning_rate": 8.267243856267331e-06, |
|
"loss": 0.5706, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.058767772511848344, |
|
"grad_norm": 0.9154753088951111, |
|
"learning_rate": 7.928282679806052e-06, |
|
"loss": 0.5379, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.05914691943127962, |
|
"grad_norm": 1.022912859916687, |
|
"learning_rate": 7.595355690475393e-06, |
|
"loss": 0.6094, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.0595260663507109, |
|
"grad_norm": 1.1987192630767822, |
|
"learning_rate": 7.268553907189964e-06, |
|
"loss": 0.6575, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.05990521327014218, |
|
"grad_norm": 1.0124115943908691, |
|
"learning_rate": 6.947966674294236e-06, |
|
"loss": 0.6735, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.060284360189573456, |
|
"grad_norm": 1.0271550416946411, |
|
"learning_rate": 6.6336816371366305e-06, |
|
"loss": 0.5953, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.06066350710900474, |
|
"grad_norm": 1.0127638578414917, |
|
"learning_rate": 6.325784718108196e-06, |
|
"loss": 0.6548, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06104265402843602, |
|
"grad_norm": 0.8849831223487854, |
|
"learning_rate": 6.0243600931522595e-06, |
|
"loss": 0.6348, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.0614218009478673, |
|
"grad_norm": 0.9687224626541138, |
|
"learning_rate": 5.72949016875158e-06, |
|
"loss": 0.5602, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.061800947867298575, |
|
"grad_norm": 0.9322991371154785, |
|
"learning_rate": 5.44125555939923e-06, |
|
"loss": 0.6793, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.06218009478672986, |
|
"grad_norm": 0.8064345717430115, |
|
"learning_rate": 5.159735065559399e-06, |
|
"loss": 0.4625, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.06255924170616113, |
|
"grad_norm": 0.9643939137458801, |
|
"learning_rate": 4.885005652124144e-06, |
|
"loss": 0.6543, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.06293838862559242, |
|
"grad_norm": 0.9320531487464905, |
|
"learning_rate": 4.617142427371934e-06, |
|
"loss": 0.5322, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.0633175355450237, |
|
"grad_norm": 0.7888116240501404, |
|
"learning_rate": 4.3562186224338265e-06, |
|
"loss": 0.4682, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.06369668246445498, |
|
"grad_norm": 0.9415287375450134, |
|
"learning_rate": 4.102305571272783e-06, |
|
"loss": 0.6159, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.06407582938388626, |
|
"grad_norm": 0.8795173168182373, |
|
"learning_rate": 3.855472691181678e-06, |
|
"loss": 0.5751, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.06445497630331753, |
|
"grad_norm": 0.9194141030311584, |
|
"learning_rate": 3.615787463805331e-06, |
|
"loss": 0.5376, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06483412322274881, |
|
"grad_norm": 0.9153357744216919, |
|
"learning_rate": 3.383315416691646e-06, |
|
"loss": 0.5864, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.06521327014218009, |
|
"grad_norm": 0.8993094563484192, |
|
"learning_rate": 3.158120105377096e-06, |
|
"loss": 0.5267, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.06559241706161137, |
|
"grad_norm": 0.8435608744621277, |
|
"learning_rate": 2.940263096011233e-06, |
|
"loss": 0.5413, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.06597156398104266, |
|
"grad_norm": 0.9139825105667114, |
|
"learning_rate": 2.729803948525125e-06, |
|
"loss": 0.4994, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.06635071090047394, |
|
"grad_norm": 0.8998444080352783, |
|
"learning_rate": 2.526800200348275e-06, |
|
"loss": 0.5262, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.06672985781990522, |
|
"grad_norm": 0.8086129426956177, |
|
"learning_rate": 2.3313073506784575e-06, |
|
"loss": 0.4989, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.0671090047393365, |
|
"grad_norm": 0.805290937423706, |
|
"learning_rate": 2.143378845308791e-06, |
|
"loss": 0.4473, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.06748815165876777, |
|
"grad_norm": 0.8601678609848022, |
|
"learning_rate": 1.9630660620161777e-06, |
|
"loss": 0.4955, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.06786729857819905, |
|
"grad_norm": 0.875322699546814, |
|
"learning_rate": 1.790418296515165e-06, |
|
"loss": 0.5145, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.06824644549763033, |
|
"grad_norm": 0.8989061117172241, |
|
"learning_rate": 1.625482748980961e-06, |
|
"loss": 0.5265, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.0686255924170616, |
|
"grad_norm": 0.8949686884880066, |
|
"learning_rate": 1.4683045111453942e-06, |
|
"loss": 0.4835, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.06900473933649288, |
|
"grad_norm": 0.9829505681991577, |
|
"learning_rate": 1.3189265539692707e-06, |
|
"loss": 0.4863, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.06938388625592418, |
|
"grad_norm": 0.9040274024009705, |
|
"learning_rate": 1.1773897158945557e-06, |
|
"loss": 0.4871, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.06976303317535545, |
|
"grad_norm": 0.8389787077903748, |
|
"learning_rate": 1.0437326916795432e-06, |
|
"loss": 0.4487, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.07014218009478673, |
|
"grad_norm": 0.8816047310829163, |
|
"learning_rate": 9.179920218200888e-07, |
|
"loss": 0.4567, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.07052132701421801, |
|
"grad_norm": 0.951728343963623, |
|
"learning_rate": 8.002020825598277e-07, |
|
"loss": 0.4308, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.07090047393364929, |
|
"grad_norm": 0.9444287419319153, |
|
"learning_rate": 6.90395076492022e-07, |
|
"loss": 0.4475, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.07127962085308057, |
|
"grad_norm": 0.7584393620491028, |
|
"learning_rate": 5.886010237557194e-07, |
|
"loss": 0.3609, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.07165876777251184, |
|
"grad_norm": 0.8242846727371216, |
|
"learning_rate": 4.94847753828529e-07, |
|
"loss": 0.3971, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.07203791469194312, |
|
"grad_norm": 0.8676409125328064, |
|
"learning_rate": 4.091608979183303e-07, |
|
"loss": 0.3851, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07241706161137441, |
|
"grad_norm": 0.8776127696037292, |
|
"learning_rate": 3.315638819559452e-07, |
|
"loss": 0.3837, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.07279620853080569, |
|
"grad_norm": 0.9045898914337158, |
|
"learning_rate": 2.6207792019074414e-07, |
|
"loss": 0.4312, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.07317535545023697, |
|
"grad_norm": 0.9427354335784912, |
|
"learning_rate": 2.0072200939085573e-07, |
|
"loss": 0.4484, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.07355450236966825, |
|
"grad_norm": 0.8181795477867126, |
|
"learning_rate": 1.475129236496575e-07, |
|
"loss": 0.3509, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.07393364928909953, |
|
"grad_norm": 0.7674962878227234, |
|
"learning_rate": 1.0246520979990459e-07, |
|
"loss": 0.3238, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.0743127962085308, |
|
"grad_norm": 0.8669456839561462, |
|
"learning_rate": 6.559118343676396e-08, |
|
"loss": 0.3703, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.07469194312796208, |
|
"grad_norm": 0.8156073093414307, |
|
"learning_rate": 3.690092555085789e-08, |
|
"loss": 0.2708, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.07507109004739336, |
|
"grad_norm": 0.8143777847290039, |
|
"learning_rate": 1.640227977221853e-08, |
|
"loss": 0.269, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.07545023696682464, |
|
"grad_norm": 0.8005136847496033, |
|
"learning_rate": 4.1008502259298755e-09, |
|
"loss": 0.2552, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.07582938388625593, |
|
"grad_norm": 0.886074423789978, |
|
"learning_rate": 0.0, |
|
"loss": 0.2134, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07582938388625593, |
|
"eval_loss": 0.5063894391059875, |
|
"eval_runtime": 344.7064, |
|
"eval_samples_per_second": 12.886, |
|
"eval_steps_per_second": 3.223, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 4, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.092452836429005e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|