|
{ |
|
"best_metric": 0.3401404023170471, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.14380729822038468, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0007190364911019234, |
|
"grad_norm": 0.6023048758506775, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.8203, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0007190364911019234, |
|
"eval_loss": 1.5364902019500732, |
|
"eval_runtime": 58.5395, |
|
"eval_samples_per_second": 40.007, |
|
"eval_steps_per_second": 20.004, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0014380729822038468, |
|
"grad_norm": 0.6873555183410645, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.9046, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0021571094733057704, |
|
"grad_norm": 0.742555558681488, |
|
"learning_rate": 1e-05, |
|
"loss": 1.709, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0028761459644076936, |
|
"grad_norm": 0.8109886050224304, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 2.233, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.003595182455509617, |
|
"grad_norm": 0.6829097867012024, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.7143, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004314218946611541, |
|
"grad_norm": 0.7394075393676758, |
|
"learning_rate": 2e-05, |
|
"loss": 1.7139, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.005033255437713464, |
|
"grad_norm": 0.8270502090454102, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 1.8787, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.005752291928815387, |
|
"grad_norm": 0.6169760823249817, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 1.6642, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.006471328419917311, |
|
"grad_norm": 0.6499037742614746, |
|
"learning_rate": 3e-05, |
|
"loss": 1.6552, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.007190364911019234, |
|
"grad_norm": 0.6464849710464478, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.7106, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.007909401402121157, |
|
"grad_norm": 0.5525938272476196, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 1.6372, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.008628437893223082, |
|
"grad_norm": 0.44765809178352356, |
|
"learning_rate": 4e-05, |
|
"loss": 1.4306, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.009347474384325004, |
|
"grad_norm": 0.4276944696903229, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 1.3624, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.010066510875426928, |
|
"grad_norm": 0.4440835416316986, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 1.428, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.010785547366528852, |
|
"grad_norm": 0.4091176986694336, |
|
"learning_rate": 5e-05, |
|
"loss": 1.3089, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.011504583857630774, |
|
"grad_norm": 0.38626205921173096, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 1.2807, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.012223620348732698, |
|
"grad_norm": 0.38527804613113403, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 1.334, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.012942656839834622, |
|
"grad_norm": 0.3838706910610199, |
|
"learning_rate": 6e-05, |
|
"loss": 1.3725, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.013661693330936545, |
|
"grad_norm": 0.3551206886768341, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 1.3084, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.014380729822038469, |
|
"grad_norm": 0.35459205508232117, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 1.3165, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015099766313140393, |
|
"grad_norm": 0.3400305509567261, |
|
"learning_rate": 7e-05, |
|
"loss": 1.2457, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.015818802804242315, |
|
"grad_norm": 0.3397105038166046, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 1.2202, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.016537839295344237, |
|
"grad_norm": 0.35030993819236755, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 1.234, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.017256875786446163, |
|
"grad_norm": 0.33557063341140747, |
|
"learning_rate": 8e-05, |
|
"loss": 1.1808, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.017975912277548085, |
|
"grad_norm": 0.30998116731643677, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 1.1153, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.018694948768650008, |
|
"grad_norm": 0.307394802570343, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 1.0866, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.019413985259751933, |
|
"grad_norm": 0.3161311447620392, |
|
"learning_rate": 9e-05, |
|
"loss": 1.0822, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.020133021750853856, |
|
"grad_norm": 0.3012605607509613, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 1.0352, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.020852058241955778, |
|
"grad_norm": 0.3080648183822632, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 0.9947, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.021571094733057704, |
|
"grad_norm": 0.30388250946998596, |
|
"learning_rate": 0.0001, |
|
"loss": 0.991, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.022290131224159626, |
|
"grad_norm": 0.3022182881832123, |
|
"learning_rate": 9.999146252290264e-05, |
|
"loss": 0.9371, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02300916771526155, |
|
"grad_norm": 0.280342698097229, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 0.9211, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.023728204206363474, |
|
"grad_norm": 0.30300894379615784, |
|
"learning_rate": 9.99231801983717e-05, |
|
"loss": 0.8932, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.024447240697465396, |
|
"grad_norm": 0.27925336360931396, |
|
"learning_rate": 9.986345866928941e-05, |
|
"loss": 0.8711, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02516627718856732, |
|
"grad_norm": 0.2760774493217468, |
|
"learning_rate": 9.978670881475172e-05, |
|
"loss": 0.839, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.025885313679669245, |
|
"grad_norm": 0.28717532753944397, |
|
"learning_rate": 9.96929568447637e-05, |
|
"loss": 0.8329, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.026604350170771167, |
|
"grad_norm": 0.24982668459415436, |
|
"learning_rate": 9.958223477553714e-05, |
|
"loss": 0.7724, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02732338666187309, |
|
"grad_norm": 0.26853281259536743, |
|
"learning_rate": 9.94545804185573e-05, |
|
"loss": 0.7699, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.028042423152975015, |
|
"grad_norm": 0.27748724818229675, |
|
"learning_rate": 9.931003736767013e-05, |
|
"loss": 0.7257, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.028761459644076937, |
|
"grad_norm": 0.2774640917778015, |
|
"learning_rate": 9.91486549841951e-05, |
|
"loss": 0.7207, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02948049613517886, |
|
"grad_norm": 0.3005022704601288, |
|
"learning_rate": 9.89704883800683e-05, |
|
"loss": 0.6897, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.030199532626280785, |
|
"grad_norm": 0.27618181705474854, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 0.6481, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.030918569117382708, |
|
"grad_norm": 0.2797397971153259, |
|
"learning_rate": 9.85640515958057e-05, |
|
"loss": 0.6316, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03163760560848463, |
|
"grad_norm": 0.29535141587257385, |
|
"learning_rate": 9.833592021345937e-05, |
|
"loss": 0.6072, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03235664209958655, |
|
"grad_norm": 0.2769714295864105, |
|
"learning_rate": 9.809128215864097e-05, |
|
"loss": 0.5522, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.033075678590688475, |
|
"grad_norm": 0.2524624466896057, |
|
"learning_rate": 9.783022097502204e-05, |
|
"loss": 0.5423, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.033794715081790404, |
|
"grad_norm": 0.2533889412879944, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.5077, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.034513751572892326, |
|
"grad_norm": 0.2663760483264923, |
|
"learning_rate": 9.725919140804099e-05, |
|
"loss": 0.4958, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03523278806399425, |
|
"grad_norm": 0.2982303500175476, |
|
"learning_rate": 9.694941803075283e-05, |
|
"loss": 0.48, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03595182455509617, |
|
"grad_norm": 0.3360510468482971, |
|
"learning_rate": 9.662361147021779e-05, |
|
"loss": 0.4886, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03595182455509617, |
|
"eval_loss": 0.543006181716919, |
|
"eval_runtime": 58.3426, |
|
"eval_samples_per_second": 40.142, |
|
"eval_steps_per_second": 20.071, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03667086104619809, |
|
"grad_norm": 0.4645315706729889, |
|
"learning_rate": 9.628188298907782e-05, |
|
"loss": 0.5215, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.037389897537300015, |
|
"grad_norm": 0.4897192120552063, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 0.5516, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.038108934028401945, |
|
"grad_norm": 0.8688023686408997, |
|
"learning_rate": 9.555113246230442e-05, |
|
"loss": 0.502, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03882797051950387, |
|
"grad_norm": 0.7859905362129211, |
|
"learning_rate": 9.516235996730645e-05, |
|
"loss": 0.5325, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03954700701060579, |
|
"grad_norm": 0.7686246633529663, |
|
"learning_rate": 9.475816456775313e-05, |
|
"loss": 0.6318, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04026604350170771, |
|
"grad_norm": 0.47948557138442993, |
|
"learning_rate": 9.43386842960031e-05, |
|
"loss": 0.6223, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.040985079992809634, |
|
"grad_norm": 0.5028918385505676, |
|
"learning_rate": 9.39040624041849e-05, |
|
"loss": 0.548, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.041704116483911556, |
|
"grad_norm": 0.530976414680481, |
|
"learning_rate": 9.345444731527642e-05, |
|
"loss": 0.5258, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.042423152975013485, |
|
"grad_norm": 0.5062253475189209, |
|
"learning_rate": 9.298999257241863e-05, |
|
"loss": 0.5511, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.04314218946611541, |
|
"grad_norm": 0.4118981659412384, |
|
"learning_rate": 9.251085678648072e-05, |
|
"loss": 0.5256, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04386122595721733, |
|
"grad_norm": 0.4127258360385895, |
|
"learning_rate": 9.201720358189464e-05, |
|
"loss": 0.461, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.04458026244831925, |
|
"grad_norm": 0.6444300413131714, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 0.4467, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.045299298939421175, |
|
"grad_norm": 0.5414140820503235, |
|
"learning_rate": 9.098702414536107e-05, |
|
"loss": 0.4442, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0460183354305231, |
|
"grad_norm": 0.27357229590415955, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.3968, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04673737192162502, |
|
"grad_norm": 0.25929877161979675, |
|
"learning_rate": 8.9900861364012e-05, |
|
"loss": 0.4143, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04745640841272695, |
|
"grad_norm": 0.27426910400390625, |
|
"learning_rate": 8.933724690167417e-05, |
|
"loss": 0.4045, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04817544490382887, |
|
"grad_norm": 0.32933494448661804, |
|
"learning_rate": 8.876019880555649e-05, |
|
"loss": 0.3966, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04889448139493079, |
|
"grad_norm": 0.2937968373298645, |
|
"learning_rate": 8.816991413705516e-05, |
|
"loss": 0.3956, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.049613517886032715, |
|
"grad_norm": 0.3488810658454895, |
|
"learning_rate": 8.756659447784368e-05, |
|
"loss": 0.3861, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.05033255437713464, |
|
"grad_norm": 0.23227158188819885, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 0.3739, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05105159086823656, |
|
"grad_norm": 0.21385645866394043, |
|
"learning_rate": 8.632167870081121e-05, |
|
"loss": 0.3851, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.05177062735933849, |
|
"grad_norm": 0.27515462040901184, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 0.378, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.05248966385044041, |
|
"grad_norm": 0.280422180891037, |
|
"learning_rate": 8.502715187966455e-05, |
|
"loss": 0.3699, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.053208700341542334, |
|
"grad_norm": 0.25346243381500244, |
|
"learning_rate": 8.436183429846313e-05, |
|
"loss": 0.3905, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.053927736832644256, |
|
"grad_norm": 0.26597610116004944, |
|
"learning_rate": 8.368478218232787e-05, |
|
"loss": 0.4217, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05464677332374618, |
|
"grad_norm": 0.23165351152420044, |
|
"learning_rate": 8.299622674393614e-05, |
|
"loss": 0.4055, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0553658098148481, |
|
"grad_norm": 0.23990345001220703, |
|
"learning_rate": 8.229640312433937e-05, |
|
"loss": 0.4223, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.05608484630595003, |
|
"grad_norm": 0.19223831593990326, |
|
"learning_rate": 8.158555031266254e-05, |
|
"loss": 0.3819, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.05680388279705195, |
|
"grad_norm": 0.17479577660560608, |
|
"learning_rate": 8.086391106448965e-05, |
|
"loss": 0.3974, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.057522919288153875, |
|
"grad_norm": 0.20406880974769592, |
|
"learning_rate": 8.013173181896283e-05, |
|
"loss": 0.4011, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0582419557792558, |
|
"grad_norm": 0.2144848257303238, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.4025, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.05896099227035772, |
|
"grad_norm": 0.2034243494272232, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 0.4417, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.05968002876145964, |
|
"grad_norm": 0.20847761631011963, |
|
"learning_rate": 7.787447196714427e-05, |
|
"loss": 0.4451, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.06039906525256157, |
|
"grad_norm": 0.15953093767166138, |
|
"learning_rate": 7.710266782362247e-05, |
|
"loss": 0.4236, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.06111810174366349, |
|
"grad_norm": 0.178719624876976, |
|
"learning_rate": 7.63216081438678e-05, |
|
"loss": 0.4178, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.061837138234765415, |
|
"grad_norm": 0.1618184745311737, |
|
"learning_rate": 7.553155965904535e-05, |
|
"loss": 0.3903, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.06255617472586734, |
|
"grad_norm": 0.18507510423660278, |
|
"learning_rate": 7.473279216998895e-05, |
|
"loss": 0.3658, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.06327521121696926, |
|
"grad_norm": 0.16280052065849304, |
|
"learning_rate": 7.392557845506432e-05, |
|
"loss": 0.3835, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.06399424770807119, |
|
"grad_norm": 0.18812771141529083, |
|
"learning_rate": 7.311019417701566e-05, |
|
"loss": 0.3639, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0647132841991731, |
|
"grad_norm": 0.20103318989276886, |
|
"learning_rate": 7.228691778882693e-05, |
|
"loss": 0.3782, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06543232069027503, |
|
"grad_norm": 0.1834009438753128, |
|
"learning_rate": 7.145603043863045e-05, |
|
"loss": 0.3744, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.06615135718137695, |
|
"grad_norm": 0.20019042491912842, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 0.3403, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.06687039367247888, |
|
"grad_norm": 0.21144333481788635, |
|
"learning_rate": 6.977256034352712e-05, |
|
"loss": 0.3926, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.06758943016358081, |
|
"grad_norm": 0.18462230265140533, |
|
"learning_rate": 6.892055250211552e-05, |
|
"loss": 0.3744, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.06830846665468272, |
|
"grad_norm": 0.1614454835653305, |
|
"learning_rate": 6.806208330935766e-05, |
|
"loss": 0.3242, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06902750314578465, |
|
"grad_norm": 0.1568639725446701, |
|
"learning_rate": 6.719744593169641e-05, |
|
"loss": 0.3231, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.06974653963688657, |
|
"grad_norm": 0.17419424653053284, |
|
"learning_rate": 6.632693564200416e-05, |
|
"loss": 0.3012, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.0704655761279885, |
|
"grad_norm": 0.19043923914432526, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.2923, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.07118461261909041, |
|
"grad_norm": 0.21909672021865845, |
|
"learning_rate": 6.456948734446624e-05, |
|
"loss": 0.3132, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.07190364911019234, |
|
"grad_norm": 0.25625696778297424, |
|
"learning_rate": 6.368314950360415e-05, |
|
"loss": 0.3616, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07190364911019234, |
|
"eval_loss": 0.38074085116386414, |
|
"eval_runtime": 58.3498, |
|
"eval_samples_per_second": 40.137, |
|
"eval_steps_per_second": 20.069, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07262268560129427, |
|
"grad_norm": 0.33766958117485046, |
|
"learning_rate": 6.279213887972179e-05, |
|
"loss": 0.307, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.07334172209239619, |
|
"grad_norm": 0.34318283200263977, |
|
"learning_rate": 6.189675975213094e-05, |
|
"loss": 0.4072, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.07406075858349812, |
|
"grad_norm": 0.32589223980903625, |
|
"learning_rate": 6.099731789198344e-05, |
|
"loss": 0.4191, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.07477979507460003, |
|
"grad_norm": 0.4203377664089203, |
|
"learning_rate": 6.009412045785051e-05, |
|
"loss": 0.3067, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.07549883156570196, |
|
"grad_norm": 0.2928689122200012, |
|
"learning_rate": 5.918747589082853e-05, |
|
"loss": 0.4129, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07621786805680389, |
|
"grad_norm": 0.36445778608322144, |
|
"learning_rate": 5.82776938092065e-05, |
|
"loss": 0.4685, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.0769369045479058, |
|
"grad_norm": 0.30332398414611816, |
|
"learning_rate": 5.736508490273188e-05, |
|
"loss": 0.447, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.07765594103900773, |
|
"grad_norm": 0.345113605260849, |
|
"learning_rate": 5.644996082651017e-05, |
|
"loss": 0.437, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.07837497753010965, |
|
"grad_norm": 0.48073431849479675, |
|
"learning_rate": 5.553263409457504e-05, |
|
"loss": 0.5559, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.07909401402121158, |
|
"grad_norm": 0.36484697461128235, |
|
"learning_rate": 5.4613417973165106e-05, |
|
"loss": 0.3887, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0798130505123135, |
|
"grad_norm": 0.36127138137817383, |
|
"learning_rate": 5.3692626373743706e-05, |
|
"loss": 0.3855, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.08053208700341542, |
|
"grad_norm": 0.4991846978664398, |
|
"learning_rate": 5.27705737457985e-05, |
|
"loss": 0.4507, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.08125112349451735, |
|
"grad_norm": 0.3157109320163727, |
|
"learning_rate": 5.184757496945726e-05, |
|
"loss": 0.3359, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.08197015998561927, |
|
"grad_norm": 0.27371692657470703, |
|
"learning_rate": 5.092394524795649e-05, |
|
"loss": 0.3133, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.0826891964767212, |
|
"grad_norm": 0.2584126889705658, |
|
"learning_rate": 5e-05, |
|
"loss": 0.3422, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.08340823296782311, |
|
"grad_norm": 0.25695544481277466, |
|
"learning_rate": 4.907605475204352e-05, |
|
"loss": 0.3427, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.08412726945892504, |
|
"grad_norm": 0.2519172430038452, |
|
"learning_rate": 4.8152425030542766e-05, |
|
"loss": 0.3448, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.08484630595002697, |
|
"grad_norm": 0.2275260090827942, |
|
"learning_rate": 4.72294262542015e-05, |
|
"loss": 0.3182, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.08556534244112889, |
|
"grad_norm": 0.207272008061409, |
|
"learning_rate": 4.6307373626256306e-05, |
|
"loss": 0.3349, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.08628437893223082, |
|
"grad_norm": 0.2518884241580963, |
|
"learning_rate": 4.5386582026834906e-05, |
|
"loss": 0.3105, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08700341542333273, |
|
"grad_norm": 0.24043984711170197, |
|
"learning_rate": 4.446736590542497e-05, |
|
"loss": 0.3513, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.08772245191443466, |
|
"grad_norm": 0.2303398847579956, |
|
"learning_rate": 4.3550039173489845e-05, |
|
"loss": 0.3194, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.08844148840553658, |
|
"grad_norm": 0.2184295505285263, |
|
"learning_rate": 4.2634915097268115e-05, |
|
"loss": 0.325, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.0891605248966385, |
|
"grad_norm": 0.23223859071731567, |
|
"learning_rate": 4.1722306190793495e-05, |
|
"loss": 0.3429, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.08987956138774043, |
|
"grad_norm": 0.20050419867038727, |
|
"learning_rate": 4.0812524109171476e-05, |
|
"loss": 0.3547, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.09059859787884235, |
|
"grad_norm": 0.17888019979000092, |
|
"learning_rate": 3.99058795421495e-05, |
|
"loss": 0.3294, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.09131763436994428, |
|
"grad_norm": 0.21177802979946136, |
|
"learning_rate": 3.9002682108016585e-05, |
|
"loss": 0.3581, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.0920366708610462, |
|
"grad_norm": 0.21987402439117432, |
|
"learning_rate": 3.8103240247869075e-05, |
|
"loss": 0.3762, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.09275570735214812, |
|
"grad_norm": 0.20270776748657227, |
|
"learning_rate": 3.720786112027822e-05, |
|
"loss": 0.3337, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.09347474384325004, |
|
"grad_norm": 0.19828373193740845, |
|
"learning_rate": 3.631685049639586e-05, |
|
"loss": 0.3515, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09419378033435197, |
|
"grad_norm": 0.20338848233222961, |
|
"learning_rate": 3.543051265553377e-05, |
|
"loss": 0.3462, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.0949128168254539, |
|
"grad_norm": 0.2387722134590149, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.393, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.09563185331655581, |
|
"grad_norm": 0.1974070519208908, |
|
"learning_rate": 3.367306435799584e-05, |
|
"loss": 0.3852, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.09635088980765774, |
|
"grad_norm": 0.15162739157676697, |
|
"learning_rate": 3.2802554068303596e-05, |
|
"loss": 0.3544, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.09706992629875966, |
|
"grad_norm": 0.18993422389030457, |
|
"learning_rate": 3.1937916690642356e-05, |
|
"loss": 0.3877, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.09778896278986159, |
|
"grad_norm": 0.2038678079843521, |
|
"learning_rate": 3.107944749788449e-05, |
|
"loss": 0.3657, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.09850799928096352, |
|
"grad_norm": 0.18316876888275146, |
|
"learning_rate": 3.0227439656472877e-05, |
|
"loss": 0.3442, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.09922703577206543, |
|
"grad_norm": 0.18696491420269012, |
|
"learning_rate": 2.9382184126304834e-05, |
|
"loss": 0.3674, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.09994607226316736, |
|
"grad_norm": 0.15872074663639069, |
|
"learning_rate": 2.8543969561369556e-05, |
|
"loss": 0.3374, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.10066510875426928, |
|
"grad_norm": 0.1745549738407135, |
|
"learning_rate": 2.771308221117309e-05, |
|
"loss": 0.3503, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1013841452453712, |
|
"grad_norm": 0.20435255765914917, |
|
"learning_rate": 2.688980582298435e-05, |
|
"loss": 0.3567, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.10210318173647312, |
|
"grad_norm": 0.18533742427825928, |
|
"learning_rate": 2.607442154493568e-05, |
|
"loss": 0.3254, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.10282221822757505, |
|
"grad_norm": 0.15743325650691986, |
|
"learning_rate": 2.5267207830011068e-05, |
|
"loss": 0.3218, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.10354125471867698, |
|
"grad_norm": 0.15209344029426575, |
|
"learning_rate": 2.446844034095466e-05, |
|
"loss": 0.316, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.1042602912097789, |
|
"grad_norm": 0.16973493993282318, |
|
"learning_rate": 2.3678391856132204e-05, |
|
"loss": 0.2995, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.10497932770088082, |
|
"grad_norm": 0.17065779864788055, |
|
"learning_rate": 2.2897332176377528e-05, |
|
"loss": 0.2766, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.10569836419198274, |
|
"grad_norm": 0.20378005504608154, |
|
"learning_rate": 2.2125528032855724e-05, |
|
"loss": 0.3278, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.10641740068308467, |
|
"grad_norm": 0.17352186143398285, |
|
"learning_rate": 2.136324299597474e-05, |
|
"loss": 0.2776, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.1071364371741866, |
|
"grad_norm": 0.19107244908809662, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.316, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.10785547366528851, |
|
"grad_norm": 0.2604922950267792, |
|
"learning_rate": 1.9868268181037185e-05, |
|
"loss": 0.3095, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.10785547366528851, |
|
"eval_loss": 0.34555259346961975, |
|
"eval_runtime": 58.8178, |
|
"eval_samples_per_second": 39.818, |
|
"eval_steps_per_second": 19.909, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.10857451015639044, |
|
"grad_norm": 0.2277640551328659, |
|
"learning_rate": 1.9136088935510362e-05, |
|
"loss": 0.2502, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.10929354664749236, |
|
"grad_norm": 0.2793837785720825, |
|
"learning_rate": 1.8414449687337464e-05, |
|
"loss": 0.2861, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.11001258313859429, |
|
"grad_norm": 0.44137507677078247, |
|
"learning_rate": 1.7703596875660645e-05, |
|
"loss": 0.405, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.1107316196296962, |
|
"grad_norm": 0.2606457769870758, |
|
"learning_rate": 1.700377325606388e-05, |
|
"loss": 0.2649, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.11145065612079813, |
|
"grad_norm": 0.33907219767570496, |
|
"learning_rate": 1.631521781767214e-05, |
|
"loss": 0.403, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.11216969261190006, |
|
"grad_norm": 0.28054168820381165, |
|
"learning_rate": 1.5638165701536868e-05, |
|
"loss": 0.3893, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.11288872910300198, |
|
"grad_norm": 0.31116437911987305, |
|
"learning_rate": 1.4972848120335453e-05, |
|
"loss": 0.4007, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.1136077655941039, |
|
"grad_norm": 0.28493228554725647, |
|
"learning_rate": 1.4319492279412388e-05, |
|
"loss": 0.3631, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.11432680208520582, |
|
"grad_norm": 0.3235074281692505, |
|
"learning_rate": 1.3678321299188801e-05, |
|
"loss": 0.3939, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.11504583857630775, |
|
"grad_norm": 0.30169492959976196, |
|
"learning_rate": 1.3049554138967051e-05, |
|
"loss": 0.329, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.11576487506740966, |
|
"grad_norm": 0.9609299302101135, |
|
"learning_rate": 1.2433405522156332e-05, |
|
"loss": 0.5237, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.1164839115585116, |
|
"grad_norm": 0.24112538993358612, |
|
"learning_rate": 1.183008586294485e-05, |
|
"loss": 0.3021, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.11720294804961352, |
|
"grad_norm": 0.24137409031391144, |
|
"learning_rate": 1.1239801194443506e-05, |
|
"loss": 0.2978, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.11792198454071544, |
|
"grad_norm": 0.28550300002098083, |
|
"learning_rate": 1.066275309832584e-05, |
|
"loss": 0.3217, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.11864102103181737, |
|
"grad_norm": 0.29699841141700745, |
|
"learning_rate": 1.0099138635988026e-05, |
|
"loss": 0.2758, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.11936005752291928, |
|
"grad_norm": 0.27138757705688477, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.2664, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.12007909401402121, |
|
"grad_norm": 0.2857905626296997, |
|
"learning_rate": 9.012975854638949e-06, |
|
"loss": 0.3008, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.12079813050512314, |
|
"grad_norm": 0.21144956350326538, |
|
"learning_rate": 8.490798459222476e-06, |
|
"loss": 0.2829, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.12151716699622506, |
|
"grad_norm": 0.22459468245506287, |
|
"learning_rate": 7.982796418105371e-06, |
|
"loss": 0.2816, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.12223620348732699, |
|
"grad_norm": 0.19300833344459534, |
|
"learning_rate": 7.489143213519301e-06, |
|
"loss": 0.3023, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1229552399784289, |
|
"grad_norm": 0.2020920366048813, |
|
"learning_rate": 7.010007427581378e-06, |
|
"loss": 0.319, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.12367427646953083, |
|
"grad_norm": 0.20110531151294708, |
|
"learning_rate": 6.5455526847235825e-06, |
|
"loss": 0.32, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.12439331296063275, |
|
"grad_norm": 0.17036013305187225, |
|
"learning_rate": 6.0959375958151045e-06, |
|
"loss": 0.3051, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.1251123494517347, |
|
"grad_norm": 0.17303591966629028, |
|
"learning_rate": 5.6613157039969055e-06, |
|
"loss": 0.3138, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.1258313859428366, |
|
"grad_norm": 0.19305406510829926, |
|
"learning_rate": 5.241835432246889e-06, |
|
"loss": 0.3315, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.12655042243393852, |
|
"grad_norm": 0.19402852654457092, |
|
"learning_rate": 4.837640032693558e-06, |
|
"loss": 0.3325, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.12726945892504044, |
|
"grad_norm": 0.19571861624717712, |
|
"learning_rate": 4.448867537695578e-06, |
|
"loss": 0.3536, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.12798849541614238, |
|
"grad_norm": 0.17924807965755463, |
|
"learning_rate": 4.075650712703849e-06, |
|
"loss": 0.3514, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.1287075319072443, |
|
"grad_norm": 0.1848725527524948, |
|
"learning_rate": 3.71811701092219e-06, |
|
"loss": 0.3485, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.1294265683983462, |
|
"grad_norm": 0.1518690586090088, |
|
"learning_rate": 3.376388529782215e-06, |
|
"loss": 0.3308, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13014560488944815, |
|
"grad_norm": 0.19320723414421082, |
|
"learning_rate": 3.0505819692471792e-06, |
|
"loss": 0.3754, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.13086464138055007, |
|
"grad_norm": 0.15887463092803955, |
|
"learning_rate": 2.7408085919590264e-06, |
|
"loss": 0.3554, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.13158367787165198, |
|
"grad_norm": 0.19461137056350708, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.3837, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1323027143627539, |
|
"grad_norm": 0.20374387502670288, |
|
"learning_rate": 2.1697790249779636e-06, |
|
"loss": 0.3782, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.13302175085385584, |
|
"grad_norm": 0.17451471090316772, |
|
"learning_rate": 1.908717841359048e-06, |
|
"loss": 0.3439, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.13374078734495776, |
|
"grad_norm": 0.18102188408374786, |
|
"learning_rate": 1.6640797865406288e-06, |
|
"loss": 0.3599, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.13445982383605967, |
|
"grad_norm": 0.19148917496204376, |
|
"learning_rate": 1.4359484041943038e-06, |
|
"loss": 0.3331, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.13517886032716162, |
|
"grad_norm": 0.21265220642089844, |
|
"learning_rate": 1.2244016009781701e-06, |
|
"loss": 0.3481, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.13589789681826353, |
|
"grad_norm": 0.21602880954742432, |
|
"learning_rate": 1.0295116199317057e-06, |
|
"loss": 0.3241, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.13661693330936545, |
|
"grad_norm": 0.2161773294210434, |
|
"learning_rate": 8.513450158049108e-07, |
|
"loss": 0.3491, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.13733596980046736, |
|
"grad_norm": 0.2377278059720993, |
|
"learning_rate": 6.899626323298713e-07, |
|
"loss": 0.3494, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.1380550062915693, |
|
"grad_norm": 0.20273280143737793, |
|
"learning_rate": 5.454195814427021e-07, |
|
"loss": 0.3559, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.13877404278267122, |
|
"grad_norm": 0.26119890809059143, |
|
"learning_rate": 4.177652244628627e-07, |
|
"loss": 0.3343, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.13949307927377314, |
|
"grad_norm": 0.24087214469909668, |
|
"learning_rate": 3.0704315523631953e-07, |
|
"loss": 0.3304, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.14021211576487508, |
|
"grad_norm": 0.23309563100337982, |
|
"learning_rate": 2.1329118524827662e-07, |
|
"loss": 0.3333, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.140931152255977, |
|
"grad_norm": 0.20350925624370575, |
|
"learning_rate": 1.3654133071059893e-07, |
|
"loss": 0.2935, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.1416501887470789, |
|
"grad_norm": 0.22840029001235962, |
|
"learning_rate": 7.681980162830282e-08, |
|
"loss": 0.299, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.14236922523818082, |
|
"grad_norm": 0.24860765039920807, |
|
"learning_rate": 3.4146992848854695e-08, |
|
"loss": 0.2984, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.14308826172928277, |
|
"grad_norm": 0.28271716833114624, |
|
"learning_rate": 8.537477097364522e-09, |
|
"loss": 0.2937, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.14380729822038468, |
|
"grad_norm": 0.37126749753952026, |
|
"learning_rate": 0.0, |
|
"loss": 0.2713, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14380729822038468, |
|
"eval_loss": 0.3401404023170471, |
|
"eval_runtime": 58.8186, |
|
"eval_samples_per_second": 39.817, |
|
"eval_steps_per_second": 19.909, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6229883634057216.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|