|
{ |
|
"best_metric": 1.2121704816818237, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.5145797598627787, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003430531732418525, |
|
"grad_norm": 0.11610166728496552, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.4495, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003430531732418525, |
|
"eval_loss": 1.635466456413269, |
|
"eval_runtime": 13.7969, |
|
"eval_samples_per_second": 35.588, |
|
"eval_steps_per_second": 17.83, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00686106346483705, |
|
"grad_norm": 0.11409541964530945, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.5905, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.010291595197255575, |
|
"grad_norm": 0.12520195543766022, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5542, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0137221269296741, |
|
"grad_norm": 0.12021903693675995, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.5811, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.017152658662092625, |
|
"grad_norm": 0.13395224511623383, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.6203, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02058319039451115, |
|
"grad_norm": 0.14789403975009918, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5317, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.024013722126929673, |
|
"grad_norm": 0.14486268162727356, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 1.545, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0274442538593482, |
|
"grad_norm": 0.1345217078924179, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 1.5916, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.030874785591766724, |
|
"grad_norm": 0.15623602271080017, |
|
"learning_rate": 3e-05, |
|
"loss": 1.6245, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03430531732418525, |
|
"grad_norm": 0.14663076400756836, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.5943, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03773584905660377, |
|
"grad_norm": 0.1576271653175354, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 1.6079, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0411663807890223, |
|
"grad_norm": 0.14678916335105896, |
|
"learning_rate": 4e-05, |
|
"loss": 1.5798, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.044596912521440824, |
|
"grad_norm": 0.16557946801185608, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 1.5852, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.048027444253859346, |
|
"grad_norm": 0.14881809055805206, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 1.5622, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.051457975986277875, |
|
"grad_norm": 0.16381366550922394, |
|
"learning_rate": 5e-05, |
|
"loss": 1.6237, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0548885077186964, |
|
"grad_norm": 0.16224238276481628, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 1.6018, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.058319039451114926, |
|
"grad_norm": 0.15393707156181335, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 1.5426, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06174957118353345, |
|
"grad_norm": 0.16475345194339752, |
|
"learning_rate": 6e-05, |
|
"loss": 1.6154, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06518010291595197, |
|
"grad_norm": 0.1683150976896286, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 1.5662, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0686106346483705, |
|
"grad_norm": 0.17348520457744598, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 1.5324, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07204116638078903, |
|
"grad_norm": 0.19107702374458313, |
|
"learning_rate": 7e-05, |
|
"loss": 1.5642, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07547169811320754, |
|
"grad_norm": 0.18596619367599487, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 1.5147, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07890222984562607, |
|
"grad_norm": 0.16649138927459717, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 1.5331, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0823327615780446, |
|
"grad_norm": 0.18072673678398132, |
|
"learning_rate": 8e-05, |
|
"loss": 1.5107, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08576329331046312, |
|
"grad_norm": 0.19641919434070587, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 1.5476, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08919382504288165, |
|
"grad_norm": 0.19105511903762817, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 1.4737, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09262435677530018, |
|
"grad_norm": 0.19515928626060486, |
|
"learning_rate": 9e-05, |
|
"loss": 1.5276, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09605488850771869, |
|
"grad_norm": 0.2127508521080017, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 1.4991, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09948542024013722, |
|
"grad_norm": 0.21000954508781433, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 1.4888, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.10291595197255575, |
|
"grad_norm": 0.23589254915714264, |
|
"learning_rate": 0.0001, |
|
"loss": 1.4491, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10634648370497427, |
|
"grad_norm": 0.21637442708015442, |
|
"learning_rate": 9.999146252290264e-05, |
|
"loss": 1.5132, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.1097770154373928, |
|
"grad_norm": 0.209445059299469, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 1.3777, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11320754716981132, |
|
"grad_norm": 0.21992652118206024, |
|
"learning_rate": 9.99231801983717e-05, |
|
"loss": 1.3847, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11663807890222985, |
|
"grad_norm": 0.2562519311904907, |
|
"learning_rate": 9.986345866928941e-05, |
|
"loss": 1.3906, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.12006861063464837, |
|
"grad_norm": 0.2565302848815918, |
|
"learning_rate": 9.978670881475172e-05, |
|
"loss": 1.408, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1234991423670669, |
|
"grad_norm": 0.26474887132644653, |
|
"learning_rate": 9.96929568447637e-05, |
|
"loss": 1.4223, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.1269296740994854, |
|
"grad_norm": 0.24229450523853302, |
|
"learning_rate": 9.958223477553714e-05, |
|
"loss": 1.4247, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.13036020583190394, |
|
"grad_norm": 0.2645803987979889, |
|
"learning_rate": 9.94545804185573e-05, |
|
"loss": 1.3998, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13379073756432247, |
|
"grad_norm": 0.2674286663532257, |
|
"learning_rate": 9.931003736767013e-05, |
|
"loss": 1.422, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.137221269296741, |
|
"grad_norm": 0.30323857069015503, |
|
"learning_rate": 9.91486549841951e-05, |
|
"loss": 1.3878, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14065180102915953, |
|
"grad_norm": 0.28205499053001404, |
|
"learning_rate": 9.89704883800683e-05, |
|
"loss": 1.4102, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14408233276157806, |
|
"grad_norm": 0.2820606827735901, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 1.3916, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14751286449399656, |
|
"grad_norm": 0.32162535190582275, |
|
"learning_rate": 9.85640515958057e-05, |
|
"loss": 1.4053, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1509433962264151, |
|
"grad_norm": 0.36674660444259644, |
|
"learning_rate": 9.833592021345937e-05, |
|
"loss": 1.3878, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15437392795883362, |
|
"grad_norm": 0.3291763663291931, |
|
"learning_rate": 9.809128215864097e-05, |
|
"loss": 1.3465, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15780445969125215, |
|
"grad_norm": 0.35642141103744507, |
|
"learning_rate": 9.783022097502204e-05, |
|
"loss": 1.4211, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.16123499142367068, |
|
"grad_norm": 0.35451963543891907, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.456, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1646655231560892, |
|
"grad_norm": 0.36406123638153076, |
|
"learning_rate": 9.725919140804099e-05, |
|
"loss": 1.3712, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1680960548885077, |
|
"grad_norm": 0.4121352434158325, |
|
"learning_rate": 9.694941803075283e-05, |
|
"loss": 1.3851, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.17152658662092624, |
|
"grad_norm": 0.45500677824020386, |
|
"learning_rate": 9.662361147021779e-05, |
|
"loss": 1.3785, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17152658662092624, |
|
"eval_loss": 1.3733711242675781, |
|
"eval_runtime": 13.893, |
|
"eval_samples_per_second": 35.342, |
|
"eval_steps_per_second": 17.707, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17495711835334476, |
|
"grad_norm": 0.46119561791419983, |
|
"learning_rate": 9.628188298907782e-05, |
|
"loss": 1.4779, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1783876500857633, |
|
"grad_norm": 0.3245432376861572, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 1.5325, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 0.29628750681877136, |
|
"learning_rate": 9.555113246230442e-05, |
|
"loss": 1.5435, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18524871355060035, |
|
"grad_norm": 0.31039223074913025, |
|
"learning_rate": 9.516235996730645e-05, |
|
"loss": 1.4436, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18867924528301888, |
|
"grad_norm": 0.30162546038627625, |
|
"learning_rate": 9.475816456775313e-05, |
|
"loss": 1.4301, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.19210977701543738, |
|
"grad_norm": 0.2942914366722107, |
|
"learning_rate": 9.43386842960031e-05, |
|
"loss": 1.3478, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1955403087478559, |
|
"grad_norm": 0.23570454120635986, |
|
"learning_rate": 9.39040624041849e-05, |
|
"loss": 1.4735, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19897084048027444, |
|
"grad_norm": 0.18676193058490753, |
|
"learning_rate": 9.345444731527642e-05, |
|
"loss": 1.4547, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.20240137221269297, |
|
"grad_norm": 0.19960841536521912, |
|
"learning_rate": 9.298999257241863e-05, |
|
"loss": 1.414, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2058319039451115, |
|
"grad_norm": 0.2147475779056549, |
|
"learning_rate": 9.251085678648072e-05, |
|
"loss": 1.3576, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.20926243567753003, |
|
"grad_norm": 0.2323458343744278, |
|
"learning_rate": 9.201720358189464e-05, |
|
"loss": 1.3581, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.21269296740994853, |
|
"grad_norm": 0.2327861487865448, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 1.4149, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21612349914236706, |
|
"grad_norm": 0.20968282222747803, |
|
"learning_rate": 9.098702414536107e-05, |
|
"loss": 1.3564, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2195540308747856, |
|
"grad_norm": 0.18935877084732056, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.3218, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.22298456260720412, |
|
"grad_norm": 0.17681340873241425, |
|
"learning_rate": 8.9900861364012e-05, |
|
"loss": 1.3779, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22641509433962265, |
|
"grad_norm": 0.19004489481449127, |
|
"learning_rate": 8.933724690167417e-05, |
|
"loss": 1.3508, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.22984562607204118, |
|
"grad_norm": 0.2089233696460724, |
|
"learning_rate": 8.876019880555649e-05, |
|
"loss": 1.3112, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2332761578044597, |
|
"grad_norm": 0.2069028615951538, |
|
"learning_rate": 8.816991413705516e-05, |
|
"loss": 1.3396, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2367066895368782, |
|
"grad_norm": 0.2308463752269745, |
|
"learning_rate": 8.756659447784368e-05, |
|
"loss": 1.3472, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.24013722126929674, |
|
"grad_norm": 0.22154006361961365, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 1.3622, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.24356775300171526, |
|
"grad_norm": 0.2146824598312378, |
|
"learning_rate": 8.632167870081121e-05, |
|
"loss": 1.374, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2469982847341338, |
|
"grad_norm": 0.21277782320976257, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 1.3181, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2504288164665523, |
|
"grad_norm": 0.21313494443893433, |
|
"learning_rate": 8.502715187966455e-05, |
|
"loss": 1.2741, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.2538593481989708, |
|
"grad_norm": 0.2278512716293335, |
|
"learning_rate": 8.436183429846313e-05, |
|
"loss": 1.2932, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.25728987993138935, |
|
"grad_norm": 0.23364584147930145, |
|
"learning_rate": 8.368478218232787e-05, |
|
"loss": 1.3354, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2607204116638079, |
|
"grad_norm": 0.24717149138450623, |
|
"learning_rate": 8.299622674393614e-05, |
|
"loss": 1.3353, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2641509433962264, |
|
"grad_norm": 0.23784001171588898, |
|
"learning_rate": 8.229640312433937e-05, |
|
"loss": 1.3005, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.26758147512864494, |
|
"grad_norm": 0.2664402723312378, |
|
"learning_rate": 8.158555031266254e-05, |
|
"loss": 1.318, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.27101200686106347, |
|
"grad_norm": 0.22661960124969482, |
|
"learning_rate": 8.086391106448965e-05, |
|
"loss": 1.3024, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.274442538593482, |
|
"grad_norm": 0.21936969459056854, |
|
"learning_rate": 8.013173181896283e-05, |
|
"loss": 1.2807, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.27787307032590053, |
|
"grad_norm": 0.27401024103164673, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.2482, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.28130360205831906, |
|
"grad_norm": 0.2559613287448883, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 1.322, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2847341337907376, |
|
"grad_norm": 0.24693572521209717, |
|
"learning_rate": 7.787447196714427e-05, |
|
"loss": 1.2792, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2881646655231561, |
|
"grad_norm": 0.2803778350353241, |
|
"learning_rate": 7.710266782362247e-05, |
|
"loss": 1.3206, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2915951972555746, |
|
"grad_norm": 0.2616997957229614, |
|
"learning_rate": 7.63216081438678e-05, |
|
"loss": 1.2982, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2950257289879931, |
|
"grad_norm": 0.274962842464447, |
|
"learning_rate": 7.553155965904535e-05, |
|
"loss": 1.2987, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.29845626072041165, |
|
"grad_norm": 0.3058188557624817, |
|
"learning_rate": 7.473279216998895e-05, |
|
"loss": 1.3222, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3018867924528302, |
|
"grad_norm": 0.2801516354084015, |
|
"learning_rate": 7.392557845506432e-05, |
|
"loss": 1.2495, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3053173241852487, |
|
"grad_norm": 0.2805141508579254, |
|
"learning_rate": 7.311019417701566e-05, |
|
"loss": 1.2647, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.30874785591766724, |
|
"grad_norm": 0.30509936809539795, |
|
"learning_rate": 7.228691778882693e-05, |
|
"loss": 1.2689, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.31217838765008576, |
|
"grad_norm": 0.29796192049980164, |
|
"learning_rate": 7.145603043863045e-05, |
|
"loss": 1.2896, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3156089193825043, |
|
"grad_norm": 0.31429192423820496, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 1.2563, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3190394511149228, |
|
"grad_norm": 0.3246179521083832, |
|
"learning_rate": 6.977256034352712e-05, |
|
"loss": 1.2631, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.32246998284734135, |
|
"grad_norm": 0.32547494769096375, |
|
"learning_rate": 6.892055250211552e-05, |
|
"loss": 1.2476, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3259005145797599, |
|
"grad_norm": 0.3263494670391083, |
|
"learning_rate": 6.806208330935766e-05, |
|
"loss": 1.2056, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3293310463121784, |
|
"grad_norm": 0.3600931763648987, |
|
"learning_rate": 6.719744593169641e-05, |
|
"loss": 1.237, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.33276157804459694, |
|
"grad_norm": 0.36960792541503906, |
|
"learning_rate": 6.632693564200416e-05, |
|
"loss": 1.1656, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3361921097770154, |
|
"grad_norm": 0.39583271741867065, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.2885, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.33962264150943394, |
|
"grad_norm": 0.42984476685523987, |
|
"learning_rate": 6.456948734446624e-05, |
|
"loss": 1.2469, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.34305317324185247, |
|
"grad_norm": 0.5518860816955566, |
|
"learning_rate": 6.368314950360415e-05, |
|
"loss": 1.3106, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.34305317324185247, |
|
"eval_loss": 1.2660377025604248, |
|
"eval_runtime": 13.8359, |
|
"eval_samples_per_second": 35.487, |
|
"eval_steps_per_second": 17.78, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.346483704974271, |
|
"grad_norm": 0.46960487961769104, |
|
"learning_rate": 6.279213887972179e-05, |
|
"loss": 1.3384, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.34991423670668953, |
|
"grad_norm": 0.4278581142425537, |
|
"learning_rate": 6.189675975213094e-05, |
|
"loss": 1.5108, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.35334476843910806, |
|
"grad_norm": 0.3242987394332886, |
|
"learning_rate": 6.099731789198344e-05, |
|
"loss": 1.4282, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3567753001715266, |
|
"grad_norm": 0.2774030864238739, |
|
"learning_rate": 6.009412045785051e-05, |
|
"loss": 1.3264, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3602058319039451, |
|
"grad_norm": 0.28905799984931946, |
|
"learning_rate": 5.918747589082853e-05, |
|
"loss": 1.3673, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.286334753036499, |
|
"learning_rate": 5.82776938092065e-05, |
|
"loss": 1.3391, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3670668953687822, |
|
"grad_norm": 0.28761571645736694, |
|
"learning_rate": 5.736508490273188e-05, |
|
"loss": 1.3136, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.3704974271012007, |
|
"grad_norm": 0.23601886630058289, |
|
"learning_rate": 5.644996082651017e-05, |
|
"loss": 1.4489, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.37392795883361923, |
|
"grad_norm": 0.26613518595695496, |
|
"learning_rate": 5.553263409457504e-05, |
|
"loss": 1.2763, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.37735849056603776, |
|
"grad_norm": 0.24539977312088013, |
|
"learning_rate": 5.4613417973165106e-05, |
|
"loss": 1.2729, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.38078902229845624, |
|
"grad_norm": 0.23666392266750336, |
|
"learning_rate": 5.3692626373743706e-05, |
|
"loss": 1.3449, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.38421955403087477, |
|
"grad_norm": 0.2275010645389557, |
|
"learning_rate": 5.27705737457985e-05, |
|
"loss": 1.2318, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3876500857632933, |
|
"grad_norm": 0.24695216119289398, |
|
"learning_rate": 5.184757496945726e-05, |
|
"loss": 1.3218, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3910806174957118, |
|
"grad_norm": 0.25049513578414917, |
|
"learning_rate": 5.092394524795649e-05, |
|
"loss": 1.3459, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.39451114922813035, |
|
"grad_norm": 0.2567203640937805, |
|
"learning_rate": 5e-05, |
|
"loss": 1.2598, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3979416809605489, |
|
"grad_norm": 0.22766773402690887, |
|
"learning_rate": 4.907605475204352e-05, |
|
"loss": 1.217, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4013722126929674, |
|
"grad_norm": 0.24498119950294495, |
|
"learning_rate": 4.8152425030542766e-05, |
|
"loss": 1.2414, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.40480274442538594, |
|
"grad_norm": 0.2613143026828766, |
|
"learning_rate": 4.72294262542015e-05, |
|
"loss": 1.2934, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.40823327615780447, |
|
"grad_norm": 0.2593250870704651, |
|
"learning_rate": 4.6307373626256306e-05, |
|
"loss": 1.2766, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.411663807890223, |
|
"grad_norm": 0.23566867411136627, |
|
"learning_rate": 4.5386582026834906e-05, |
|
"loss": 1.221, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.41509433962264153, |
|
"grad_norm": 0.2302766591310501, |
|
"learning_rate": 4.446736590542497e-05, |
|
"loss": 1.2444, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.41852487135506006, |
|
"grad_norm": 0.22572791576385498, |
|
"learning_rate": 4.3550039173489845e-05, |
|
"loss": 1.2369, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4219554030874786, |
|
"grad_norm": 0.2492562234401703, |
|
"learning_rate": 4.2634915097268115e-05, |
|
"loss": 1.1794, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.42538593481989706, |
|
"grad_norm": 0.26300421357154846, |
|
"learning_rate": 4.1722306190793495e-05, |
|
"loss": 1.2073, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.4288164665523156, |
|
"grad_norm": 0.25651299953460693, |
|
"learning_rate": 4.0812524109171476e-05, |
|
"loss": 1.2113, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4322469982847341, |
|
"grad_norm": 0.25439882278442383, |
|
"learning_rate": 3.99058795421495e-05, |
|
"loss": 1.3366, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.43567753001715265, |
|
"grad_norm": 0.25761932134628296, |
|
"learning_rate": 3.9002682108016585e-05, |
|
"loss": 1.2507, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.4391080617495712, |
|
"grad_norm": 0.25570839643478394, |
|
"learning_rate": 3.8103240247869075e-05, |
|
"loss": 1.2225, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.4425385934819897, |
|
"grad_norm": 0.28426146507263184, |
|
"learning_rate": 3.720786112027822e-05, |
|
"loss": 1.261, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.44596912521440824, |
|
"grad_norm": 0.2929293215274811, |
|
"learning_rate": 3.631685049639586e-05, |
|
"loss": 1.1955, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.44939965694682676, |
|
"grad_norm": 0.26186859607696533, |
|
"learning_rate": 3.543051265553377e-05, |
|
"loss": 1.2179, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.4528301886792453, |
|
"grad_norm": 0.2666632831096649, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.1792, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.4562607204116638, |
|
"grad_norm": 0.30045852065086365, |
|
"learning_rate": 3.367306435799584e-05, |
|
"loss": 1.2296, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.45969125214408235, |
|
"grad_norm": 0.2867948114871979, |
|
"learning_rate": 3.2802554068303596e-05, |
|
"loss": 1.166, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.4631217838765009, |
|
"grad_norm": 0.3068961501121521, |
|
"learning_rate": 3.1937916690642356e-05, |
|
"loss": 1.2112, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4665523156089194, |
|
"grad_norm": 0.31013593077659607, |
|
"learning_rate": 3.107944749788449e-05, |
|
"loss": 1.2374, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4699828473413379, |
|
"grad_norm": 0.3226790726184845, |
|
"learning_rate": 3.0227439656472877e-05, |
|
"loss": 1.2555, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.4734133790737564, |
|
"grad_norm": 0.30750301480293274, |
|
"learning_rate": 2.9382184126304834e-05, |
|
"loss": 1.2291, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.47684391080617494, |
|
"grad_norm": 0.3350450396537781, |
|
"learning_rate": 2.8543969561369556e-05, |
|
"loss": 1.2717, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.48027444253859347, |
|
"grad_norm": 0.33156758546829224, |
|
"learning_rate": 2.771308221117309e-05, |
|
"loss": 1.2334, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.483704974271012, |
|
"grad_norm": 0.3310351073741913, |
|
"learning_rate": 2.688980582298435e-05, |
|
"loss": 1.1668, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.48713550600343053, |
|
"grad_norm": 0.3672282099723816, |
|
"learning_rate": 2.607442154493568e-05, |
|
"loss": 1.1896, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.49056603773584906, |
|
"grad_norm": 0.38358932733535767, |
|
"learning_rate": 2.5267207830011068e-05, |
|
"loss": 1.2141, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4939965694682676, |
|
"grad_norm": 0.3809894025325775, |
|
"learning_rate": 2.446844034095466e-05, |
|
"loss": 1.2576, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4974271012006861, |
|
"grad_norm": 0.3670910894870758, |
|
"learning_rate": 2.3678391856132204e-05, |
|
"loss": 1.199, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5008576329331046, |
|
"grad_norm": 0.4124976098537445, |
|
"learning_rate": 2.2897332176377528e-05, |
|
"loss": 1.169, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.5042881646655232, |
|
"grad_norm": 0.4204322099685669, |
|
"learning_rate": 2.2125528032855724e-05, |
|
"loss": 1.1429, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.5077186963979416, |
|
"grad_norm": 0.4493495523929596, |
|
"learning_rate": 2.136324299597474e-05, |
|
"loss": 1.2079, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5111492281303602, |
|
"grad_norm": 0.5025396943092346, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.3403, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5145797598627787, |
|
"grad_norm": 0.6223259568214417, |
|
"learning_rate": 1.9868268181037185e-05, |
|
"loss": 1.3662, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5145797598627787, |
|
"eval_loss": 1.2121704816818237, |
|
"eval_runtime": 13.7988, |
|
"eval_samples_per_second": 35.583, |
|
"eval_steps_per_second": 17.828, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.080131307700224e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|