|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9444444444444446, |
|
"eval_steps": 27, |
|
"global_step": 324, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.009259259259259259, |
|
"grad_norm": 3.136354684829712, |
|
"learning_rate": 3.0000000000000004e-08, |
|
"loss": 7.126, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009259259259259259, |
|
"eval_loss": 5.272334098815918, |
|
"eval_runtime": 10.9491, |
|
"eval_samples_per_second": 9.59, |
|
"eval_steps_per_second": 9.59, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.018518518518518517, |
|
"grad_norm": 1.6424509286880493, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 6.2434, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.027777777777777776, |
|
"grad_norm": 1.3273869752883911, |
|
"learning_rate": 9e-08, |
|
"loss": 5.851, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.037037037037037035, |
|
"grad_norm": 1.9482982158660889, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 7.0582, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.046296296296296294, |
|
"grad_norm": 2.214090347290039, |
|
"learning_rate": 1.5000000000000002e-07, |
|
"loss": 6.2122, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05555555555555555, |
|
"grad_norm": 1.6521217823028564, |
|
"learning_rate": 1.8e-07, |
|
"loss": 6.0318, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06481481481481481, |
|
"grad_norm": 1.2911734580993652, |
|
"learning_rate": 2.1000000000000003e-07, |
|
"loss": 6.1264, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 1.2651853561401367, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 5.6821, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08333333333333333, |
|
"grad_norm": 1.2058266401290894, |
|
"learning_rate": 2.7e-07, |
|
"loss": 5.7295, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.09259259259259259, |
|
"grad_norm": 2.896589994430542, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 7.4781, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10185185185185185, |
|
"grad_norm": 1.313854694366455, |
|
"learning_rate": 3.3e-07, |
|
"loss": 6.1254, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 2.281994104385376, |
|
"learning_rate": 3.6e-07, |
|
"loss": 6.683, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.12037037037037036, |
|
"grad_norm": 1.2343209981918335, |
|
"learning_rate": 3.9e-07, |
|
"loss": 6.2304, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12962962962962962, |
|
"grad_norm": 1.3297009468078613, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 6.2013, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1388888888888889, |
|
"grad_norm": 1.9952207803726196, |
|
"learning_rate": 4.5e-07, |
|
"loss": 6.9842, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 1.3968197107315063, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 6.2524, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1574074074074074, |
|
"grad_norm": 4.7724127769470215, |
|
"learning_rate": 5.100000000000001e-07, |
|
"loss": 8.551, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.9988604187965393, |
|
"learning_rate": 5.4e-07, |
|
"loss": 5.5391, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.17592592592592593, |
|
"grad_norm": 1.2599544525146484, |
|
"learning_rate": 5.7e-07, |
|
"loss": 6.4559, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.18518518518518517, |
|
"grad_norm": 1.984928846359253, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 6.954, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.19444444444444445, |
|
"grad_norm": 1.2283947467803955, |
|
"learning_rate": 6.3e-07, |
|
"loss": 6.1821, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2037037037037037, |
|
"grad_norm": 2.2317938804626465, |
|
"learning_rate": 6.6e-07, |
|
"loss": 6.9164, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.21296296296296297, |
|
"grad_norm": 1.5473928451538086, |
|
"learning_rate": 6.900000000000001e-07, |
|
"loss": 6.5494, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 1.508975625038147, |
|
"learning_rate": 7.2e-07, |
|
"loss": 6.4025, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.23148148148148148, |
|
"grad_norm": 2.4396748542785645, |
|
"learning_rate": 7.5e-07, |
|
"loss": 7.2659, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.24074074074074073, |
|
"grad_norm": 1.1266586780548096, |
|
"learning_rate": 7.8e-07, |
|
"loss": 6.0911, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.515765905380249, |
|
"learning_rate": 8.100000000000001e-07, |
|
"loss": 6.503, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 5.270267963409424, |
|
"eval_runtime": 10.4888, |
|
"eval_samples_per_second": 10.011, |
|
"eval_steps_per_second": 10.011, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.25925925925925924, |
|
"grad_norm": 1.2845295667648315, |
|
"learning_rate": 8.400000000000001e-07, |
|
"loss": 6.3435, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.26851851851851855, |
|
"grad_norm": 1.528168797492981, |
|
"learning_rate": 8.699999999999999e-07, |
|
"loss": 6.8436, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2777777777777778, |
|
"grad_norm": 1.555953860282898, |
|
"learning_rate": 9e-07, |
|
"loss": 6.404, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.28703703703703703, |
|
"grad_norm": 1.293718934059143, |
|
"learning_rate": 9.3e-07, |
|
"loss": 6.0386, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 1.0446598529815674, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 5.677, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3055555555555556, |
|
"grad_norm": 1.3030468225479126, |
|
"learning_rate": 9.9e-07, |
|
"loss": 5.8789, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3148148148148148, |
|
"grad_norm": 1.108596682548523, |
|
"learning_rate": 1.0200000000000002e-06, |
|
"loss": 5.8663, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.32407407407407407, |
|
"grad_norm": 1.833085536956787, |
|
"learning_rate": 1.05e-06, |
|
"loss": 5.7367, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 1.561715006828308, |
|
"learning_rate": 1.08e-06, |
|
"loss": 6.2266, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3425925925925926, |
|
"grad_norm": 1.6098933219909668, |
|
"learning_rate": 1.11e-06, |
|
"loss": 6.3335, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.35185185185185186, |
|
"grad_norm": 1.6193208694458008, |
|
"learning_rate": 1.14e-06, |
|
"loss": 6.3869, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3611111111111111, |
|
"grad_norm": 1.5229904651641846, |
|
"learning_rate": 1.17e-06, |
|
"loss": 6.0453, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 1.511690616607666, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 6.7203, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.37962962962962965, |
|
"grad_norm": 0.9691258668899536, |
|
"learning_rate": 1.2299999999999999e-06, |
|
"loss": 5.8176, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.3888888888888889, |
|
"grad_norm": 1.1707255840301514, |
|
"learning_rate": 1.26e-06, |
|
"loss": 6.2779, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.39814814814814814, |
|
"grad_norm": 1.1281574964523315, |
|
"learning_rate": 1.29e-06, |
|
"loss": 5.8402, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4074074074074074, |
|
"grad_norm": 3.945136308670044, |
|
"learning_rate": 1.32e-06, |
|
"loss": 7.5325, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 1.8681273460388184, |
|
"learning_rate": 1.35e-06, |
|
"loss": 6.3374, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.42592592592592593, |
|
"grad_norm": 1.300478458404541, |
|
"learning_rate": 1.3800000000000001e-06, |
|
"loss": 5.9715, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4351851851851852, |
|
"grad_norm": 1.2660181522369385, |
|
"learning_rate": 1.41e-06, |
|
"loss": 6.0421, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 1.817872405052185, |
|
"learning_rate": 1.44e-06, |
|
"loss": 7.0875, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4537037037037037, |
|
"grad_norm": 1.6538351774215698, |
|
"learning_rate": 1.4700000000000001e-06, |
|
"loss": 6.1825, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.46296296296296297, |
|
"grad_norm": 1.5821293592453003, |
|
"learning_rate": 1.5e-06, |
|
"loss": 6.0065, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4722222222222222, |
|
"grad_norm": 1.5061155557632446, |
|
"learning_rate": 1.53e-06, |
|
"loss": 5.8156, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.48148148148148145, |
|
"grad_norm": 1.495235562324524, |
|
"learning_rate": 1.56e-06, |
|
"loss": 5.792, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.49074074074074076, |
|
"grad_norm": 1.2974352836608887, |
|
"learning_rate": 1.59e-06, |
|
"loss": 6.3621, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.2943974733352661, |
|
"learning_rate": 1.6200000000000002e-06, |
|
"loss": 5.9853, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 5.2575860023498535, |
|
"eval_runtime": 10.6855, |
|
"eval_samples_per_second": 9.826, |
|
"eval_steps_per_second": 9.826, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5092592592592593, |
|
"grad_norm": 1.0820438861846924, |
|
"learning_rate": 1.65e-06, |
|
"loss": 5.7026, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 1.9304187297821045, |
|
"learning_rate": 1.6800000000000002e-06, |
|
"loss": 6.5805, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5277777777777778, |
|
"grad_norm": 1.238295555114746, |
|
"learning_rate": 1.71e-06, |
|
"loss": 5.9397, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5370370370370371, |
|
"grad_norm": 1.7372101545333862, |
|
"learning_rate": 1.7399999999999999e-06, |
|
"loss": 5.8761, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5462962962962963, |
|
"grad_norm": 1.6708195209503174, |
|
"learning_rate": 1.77e-06, |
|
"loss": 6.0646, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 1.5075998306274414, |
|
"learning_rate": 1.8e-06, |
|
"loss": 6.276, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5648148148148148, |
|
"grad_norm": 1.4556330442428589, |
|
"learning_rate": 1.83e-06, |
|
"loss": 6.4906, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5740740740740741, |
|
"grad_norm": 1.2507421970367432, |
|
"learning_rate": 1.86e-06, |
|
"loss": 6.2285, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5833333333333334, |
|
"grad_norm": 1.1143450736999512, |
|
"learning_rate": 1.8900000000000001e-06, |
|
"loss": 5.7344, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 2.05438232421875, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 6.85, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6018518518518519, |
|
"grad_norm": 1.104891300201416, |
|
"learning_rate": 1.95e-06, |
|
"loss": 6.1067, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6111111111111112, |
|
"grad_norm": 1.34933602809906, |
|
"learning_rate": 1.98e-06, |
|
"loss": 6.1659, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6203703703703703, |
|
"grad_norm": 1.5707837343215942, |
|
"learning_rate": 2.0100000000000002e-06, |
|
"loss": 6.1741, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6296296296296297, |
|
"grad_norm": 0.9905802607536316, |
|
"learning_rate": 2.0400000000000004e-06, |
|
"loss": 5.9072, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6388888888888888, |
|
"grad_norm": 1.134399652481079, |
|
"learning_rate": 2.07e-06, |
|
"loss": 5.8005, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6481481481481481, |
|
"grad_norm": 1.7359548807144165, |
|
"learning_rate": 2.1e-06, |
|
"loss": 6.7931, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6574074074074074, |
|
"grad_norm": 1.9637112617492676, |
|
"learning_rate": 2.13e-06, |
|
"loss": 6.0, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 1.908047080039978, |
|
"learning_rate": 2.16e-06, |
|
"loss": 6.9322, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6759259259259259, |
|
"grad_norm": 1.6848489046096802, |
|
"learning_rate": 2.19e-06, |
|
"loss": 6.3743, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6851851851851852, |
|
"grad_norm": 1.158486247062683, |
|
"learning_rate": 2.22e-06, |
|
"loss": 5.7395, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6944444444444444, |
|
"grad_norm": 2.9409470558166504, |
|
"learning_rate": 2.25e-06, |
|
"loss": 7.1889, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7037037037037037, |
|
"grad_norm": 1.5408318042755127, |
|
"learning_rate": 2.28e-06, |
|
"loss": 6.5631, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7129629629629629, |
|
"grad_norm": 1.3003695011138916, |
|
"learning_rate": 2.31e-06, |
|
"loss": 5.8228, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7222222222222222, |
|
"grad_norm": 1.6015976667404175, |
|
"learning_rate": 2.34e-06, |
|
"loss": 6.0852, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7314814814814815, |
|
"grad_norm": 1.282989501953125, |
|
"learning_rate": 2.37e-06, |
|
"loss": 6.0135, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 1.3845688104629517, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 5.9315, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.431152105331421, |
|
"learning_rate": 2.43e-06, |
|
"loss": 5.7324, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 5.23203182220459, |
|
"eval_runtime": 10.3071, |
|
"eval_samples_per_second": 10.187, |
|
"eval_steps_per_second": 10.187, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7592592592592593, |
|
"grad_norm": 1.7849000692367554, |
|
"learning_rate": 2.4599999999999997e-06, |
|
"loss": 6.3697, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7685185185185185, |
|
"grad_norm": 1.2546958923339844, |
|
"learning_rate": 2.49e-06, |
|
"loss": 6.4323, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 1.3903110027313232, |
|
"learning_rate": 2.52e-06, |
|
"loss": 6.1479, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7870370370370371, |
|
"grad_norm": 1.8783302307128906, |
|
"learning_rate": 2.55e-06, |
|
"loss": 5.7606, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7962962962962963, |
|
"grad_norm": 1.2064532041549683, |
|
"learning_rate": 2.58e-06, |
|
"loss": 6.0706, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8055555555555556, |
|
"grad_norm": 1.9301965236663818, |
|
"learning_rate": 2.61e-06, |
|
"loss": 6.2393, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8148148148148148, |
|
"grad_norm": 1.259318232536316, |
|
"learning_rate": 2.64e-06, |
|
"loss": 5.8821, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.8240740740740741, |
|
"grad_norm": 1.88141667842865, |
|
"learning_rate": 2.6700000000000003e-06, |
|
"loss": 6.8863, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 1.1480059623718262, |
|
"learning_rate": 2.7e-06, |
|
"loss": 5.7941, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8425925925925926, |
|
"grad_norm": 1.1357027292251587, |
|
"learning_rate": 2.73e-06, |
|
"loss": 5.7938, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8518518518518519, |
|
"grad_norm": 1.2564626932144165, |
|
"learning_rate": 2.7600000000000003e-06, |
|
"loss": 5.8955, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8611111111111112, |
|
"grad_norm": 1.5674831867218018, |
|
"learning_rate": 2.7900000000000004e-06, |
|
"loss": 6.0711, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.8703703703703703, |
|
"grad_norm": 2.784726142883301, |
|
"learning_rate": 2.82e-06, |
|
"loss": 6.9214, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8796296296296297, |
|
"grad_norm": 1.2815853357315063, |
|
"learning_rate": 2.85e-06, |
|
"loss": 5.9669, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 3.059992551803589, |
|
"learning_rate": 2.88e-06, |
|
"loss": 7.6911, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8981481481481481, |
|
"grad_norm": 1.2001030445098877, |
|
"learning_rate": 2.91e-06, |
|
"loss": 6.1174, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.9074074074074074, |
|
"grad_norm": 1.685596227645874, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 6.4517, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.9166666666666666, |
|
"grad_norm": 1.5668795108795166, |
|
"learning_rate": 2.97e-06, |
|
"loss": 6.3579, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.9259259259259259, |
|
"grad_norm": 1.2336190938949585, |
|
"learning_rate": 3e-06, |
|
"loss": 6.2871, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9351851851851852, |
|
"grad_norm": 1.331573247909546, |
|
"learning_rate": 2.9998524776393626e-06, |
|
"loss": 5.9677, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9444444444444444, |
|
"grad_norm": 1.5391007661819458, |
|
"learning_rate": 2.9994099395745795e-06, |
|
"loss": 6.6068, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9537037037037037, |
|
"grad_norm": 4.519767761230469, |
|
"learning_rate": 2.9986724728513302e-06, |
|
"loss": 8.1238, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9629629629629629, |
|
"grad_norm": 1.5254385471343994, |
|
"learning_rate": 2.997640222526725e-06, |
|
"loss": 6.4137, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.9722222222222222, |
|
"grad_norm": 1.28110671043396, |
|
"learning_rate": 2.996313391640769e-06, |
|
"loss": 5.887, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.9814814814814815, |
|
"grad_norm": 3.4707837104797363, |
|
"learning_rate": 2.994692241176429e-06, |
|
"loss": 7.9841, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9907407407407407, |
|
"grad_norm": 1.5300663709640503, |
|
"learning_rate": 2.9927770900082955e-06, |
|
"loss": 6.2007, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.747676134109497, |
|
"learning_rate": 2.9905683148398643e-06, |
|
"loss": 6.5292, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 5.185390949249268, |
|
"eval_runtime": 10.6897, |
|
"eval_samples_per_second": 9.823, |
|
"eval_steps_per_second": 9.823, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.0092592592592593, |
|
"grad_norm": 1.57361900806427, |
|
"learning_rate": 2.988066350129437e-06, |
|
"loss": 6.1537, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.0185185185185186, |
|
"grad_norm": 5.155310153961182, |
|
"learning_rate": 2.9852716880046687e-06, |
|
"loss": 7.9648, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.0277777777777777, |
|
"grad_norm": 1.0643362998962402, |
|
"learning_rate": 2.982184878165763e-06, |
|
"loss": 5.1748, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.0092592592592593, |
|
"grad_norm": 1.0376684665679932, |
|
"learning_rate": 2.9788065277773537e-06, |
|
"loss": 5.6556, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0185185185185186, |
|
"grad_norm": 1.228864073753357, |
|
"learning_rate": 2.9751373013490727e-06, |
|
"loss": 5.7668, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0277777777777777, |
|
"grad_norm": 1.7278510332107544, |
|
"learning_rate": 2.971177920604846e-06, |
|
"loss": 6.3775, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.037037037037037, |
|
"grad_norm": 1.1904284954071045, |
|
"learning_rate": 2.9669291643409314e-06, |
|
"loss": 5.7487, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.0462962962962963, |
|
"grad_norm": 2.941854238510132, |
|
"learning_rate": 2.9623918682727352e-06, |
|
"loss": 7.2423, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.0555555555555556, |
|
"grad_norm": 1.5035470724105835, |
|
"learning_rate": 2.9575669248704265e-06, |
|
"loss": 6.1212, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.0648148148148149, |
|
"grad_norm": 1.4733242988586426, |
|
"learning_rate": 2.9524552831833926e-06, |
|
"loss": 6.3059, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.074074074074074, |
|
"grad_norm": 1.3305742740631104, |
|
"learning_rate": 2.947057948653564e-06, |
|
"loss": 6.1691, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0833333333333333, |
|
"grad_norm": 1.2160289287567139, |
|
"learning_rate": 2.9413759829176495e-06, |
|
"loss": 5.9701, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0925925925925926, |
|
"grad_norm": 1.9012755155563354, |
|
"learning_rate": 2.935410503598313e-06, |
|
"loss": 6.6399, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.1018518518518519, |
|
"grad_norm": 1.3863564729690552, |
|
"learning_rate": 2.9291626840843446e-06, |
|
"loss": 6.0122, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 1.1440134048461914, |
|
"learning_rate": 2.9226337532998544e-06, |
|
"loss": 5.9895, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1203703703703705, |
|
"grad_norm": 1.4871304035186768, |
|
"learning_rate": 2.9158249954625514e-06, |
|
"loss": 6.3919, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.1296296296296295, |
|
"grad_norm": 1.574532389640808, |
|
"learning_rate": 2.908737749831142e-06, |
|
"loss": 6.489, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.1388888888888888, |
|
"grad_norm": 2.674992322921753, |
|
"learning_rate": 2.9013734104419e-06, |
|
"loss": 6.965, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.1481481481481481, |
|
"grad_norm": 1.4959555864334106, |
|
"learning_rate": 2.8937334258344676e-06, |
|
"loss": 5.8652, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.1574074074074074, |
|
"grad_norm": 3.064631223678589, |
|
"learning_rate": 2.88581929876693e-06, |
|
"loss": 7.1246, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.1666666666666667, |
|
"grad_norm": 1.7988754510879517, |
|
"learning_rate": 2.8776325859202315e-06, |
|
"loss": 6.1511, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.175925925925926, |
|
"grad_norm": 1.4231789112091064, |
|
"learning_rate": 2.869174897591978e-06, |
|
"loss": 6.0368, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.1851851851851851, |
|
"grad_norm": 2.6319122314453125, |
|
"learning_rate": 2.8604478973797005e-06, |
|
"loss": 7.1228, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.1944444444444444, |
|
"grad_norm": 1.4405936002731323, |
|
"learning_rate": 2.851453301853629e-06, |
|
"loss": 5.9765, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.2037037037037037, |
|
"grad_norm": 1.669622540473938, |
|
"learning_rate": 2.842192880219049e-06, |
|
"loss": 6.4033, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.212962962962963, |
|
"grad_norm": 1.1244466304779053, |
|
"learning_rate": 2.832668453968309e-06, |
|
"loss": 5.5845, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"grad_norm": 1.0342087745666504, |
|
"learning_rate": 2.8228818965225326e-06, |
|
"loss": 5.6106, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"eval_loss": 5.1238203048706055, |
|
"eval_runtime": 10.8032, |
|
"eval_samples_per_second": 9.719, |
|
"eval_steps_per_second": 9.719, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.2314814814814814, |
|
"grad_norm": 2.5150041580200195, |
|
"learning_rate": 2.8128351328631308e-06, |
|
"loss": 6.5318, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.2407407407407407, |
|
"grad_norm": 1.7575809955596924, |
|
"learning_rate": 2.8025301391531584e-06, |
|
"loss": 6.225, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.5736333131790161, |
|
"learning_rate": 2.7919689423486137e-06, |
|
"loss": 5.6247, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.2592592592592593, |
|
"grad_norm": 0.9995443820953369, |
|
"learning_rate": 2.781153619799741e-06, |
|
"loss": 5.4866, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.2685185185185186, |
|
"grad_norm": 1.7936508655548096, |
|
"learning_rate": 2.7700862988424264e-06, |
|
"loss": 6.4679, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2777777777777777, |
|
"grad_norm": 1.4072571992874146, |
|
"learning_rate": 2.7587691563797533e-06, |
|
"loss": 6.0924, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.287037037037037, |
|
"grad_norm": 1.9437594413757324, |
|
"learning_rate": 2.747204418453818e-06, |
|
"loss": 6.3988, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.2962962962962963, |
|
"grad_norm": 1.1792351007461548, |
|
"learning_rate": 2.735394359807872e-06, |
|
"loss": 6.0245, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.3055555555555556, |
|
"grad_norm": 1.2097288370132446, |
|
"learning_rate": 2.7233413034388904e-06, |
|
"loss": 6.0232, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.3148148148148149, |
|
"grad_norm": 1.5997315645217896, |
|
"learning_rate": 2.711047620140644e-06, |
|
"loss": 5.9724, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.324074074074074, |
|
"grad_norm": 1.9537619352340698, |
|
"learning_rate": 2.6985157280373736e-06, |
|
"loss": 6.336, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 1.4596198797225952, |
|
"learning_rate": 2.685748092108155e-06, |
|
"loss": 6.2286, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.3425925925925926, |
|
"grad_norm": 1.2177029848098755, |
|
"learning_rate": 2.6727472237020448e-06, |
|
"loss": 5.8753, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.3518518518518519, |
|
"grad_norm": 1.5970029830932617, |
|
"learning_rate": 2.6595156800441053e-06, |
|
"loss": 6.2522, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.3611111111111112, |
|
"grad_norm": 1.1032902002334595, |
|
"learning_rate": 2.646056063732411e-06, |
|
"loss": 5.795, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.3703703703703702, |
|
"grad_norm": 1.10838782787323, |
|
"learning_rate": 2.632371022226124e-06, |
|
"loss": 5.4648, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.3796296296296298, |
|
"grad_norm": 1.3732211589813232, |
|
"learning_rate": 2.6184632473247484e-06, |
|
"loss": 5.568, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3888888888888888, |
|
"grad_norm": 1.188904881477356, |
|
"learning_rate": 2.6043354746386645e-06, |
|
"loss": 5.8007, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.3981481481481481, |
|
"grad_norm": 1.3125028610229492, |
|
"learning_rate": 2.5899904830510406e-06, |
|
"loss": 5.7965, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.4074074074074074, |
|
"grad_norm": 1.2234677076339722, |
|
"learning_rate": 2.575431094171241e-06, |
|
"loss": 5.9759, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.4166666666666667, |
|
"grad_norm": 2.0235507488250732, |
|
"learning_rate": 2.5606601717798212e-06, |
|
"loss": 6.5055, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.425925925925926, |
|
"grad_norm": 1.3678350448608398, |
|
"learning_rate": 2.5456806212652348e-06, |
|
"loss": 5.8768, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.4351851851851851, |
|
"grad_norm": 1.309804081916809, |
|
"learning_rate": 2.5304953890523524e-06, |
|
"loss": 6.0516, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.4444444444444444, |
|
"grad_norm": 1.4316556453704834, |
|
"learning_rate": 2.5151074620229114e-06, |
|
"loss": 6.2186, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.4537037037037037, |
|
"grad_norm": 1.308398723602295, |
|
"learning_rate": 2.499519866928006e-06, |
|
"loss": 5.9185, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.462962962962963, |
|
"grad_norm": 1.7772057056427002, |
|
"learning_rate": 2.4837356697927366e-06, |
|
"loss": 6.4643, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.4722222222222223, |
|
"grad_norm": 1.4515641927719116, |
|
"learning_rate": 2.4677579753131316e-06, |
|
"loss": 6.3981, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.4722222222222223, |
|
"eval_loss": 5.054426193237305, |
|
"eval_runtime": 10.7718, |
|
"eval_samples_per_second": 9.748, |
|
"eval_steps_per_second": 9.748, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.4814814814814814, |
|
"grad_norm": 2.9240331649780273, |
|
"learning_rate": 2.4515899262454685e-06, |
|
"loss": 6.8221, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.4907407407407407, |
|
"grad_norm": 3.2064929008483887, |
|
"learning_rate": 2.4352347027881005e-06, |
|
"loss": 7.6826, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.6117157936096191, |
|
"learning_rate": 2.4186955219559254e-06, |
|
"loss": 5.5413, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.5092592592592593, |
|
"grad_norm": 1.5916537046432495, |
|
"learning_rate": 2.401975636947609e-06, |
|
"loss": 6.1638, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.5185185185185186, |
|
"grad_norm": 1.6315852403640747, |
|
"learning_rate": 2.3850783365056922e-06, |
|
"loss": 6.3381, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.5277777777777777, |
|
"grad_norm": 1.1859897375106812, |
|
"learning_rate": 2.368006944269709e-06, |
|
"loss": 5.7726, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.5370370370370372, |
|
"grad_norm": 1.2986359596252441, |
|
"learning_rate": 2.3507648181224336e-06, |
|
"loss": 5.5423, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.5462962962962963, |
|
"grad_norm": 1.3099842071533203, |
|
"learning_rate": 2.3333553495294033e-06, |
|
"loss": 6.2081, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 1.8448351621627808, |
|
"learning_rate": 2.315781962871823e-06, |
|
"loss": 6.4411, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.5648148148148149, |
|
"grad_norm": 1.263071894645691, |
|
"learning_rate": 2.298048114773005e-06, |
|
"loss": 5.769, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.574074074074074, |
|
"grad_norm": 1.4397268295288086, |
|
"learning_rate": 2.280157293418462e-06, |
|
"loss": 6.1988, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.5833333333333335, |
|
"grad_norm": 1.3311412334442139, |
|
"learning_rate": 2.262113017869794e-06, |
|
"loss": 5.9678, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.5925925925925926, |
|
"grad_norm": 1.5137203931808472, |
|
"learning_rate": 2.243918837372502e-06, |
|
"loss": 6.4093, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.6018518518518519, |
|
"grad_norm": 2.532318115234375, |
|
"learning_rate": 2.22557833065786e-06, |
|
"loss": 6.573, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.6111111111111112, |
|
"grad_norm": 1.7535362243652344, |
|
"learning_rate": 2.207095105238997e-06, |
|
"loss": 6.1462, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.6203703703703702, |
|
"grad_norm": 1.3252042531967163, |
|
"learning_rate": 2.1884727967013063e-06, |
|
"loss": 5.7489, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.6296296296296298, |
|
"grad_norm": 2.172076463699341, |
|
"learning_rate": 2.1697150679873442e-06, |
|
"loss": 6.3797, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.6388888888888888, |
|
"grad_norm": 0.8812606334686279, |
|
"learning_rate": 2.150825608676337e-06, |
|
"loss": 5.4227, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.6481481481481481, |
|
"grad_norm": 1.4979366064071655, |
|
"learning_rate": 2.1318081342584566e-06, |
|
"loss": 5.811, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.6574074074074074, |
|
"grad_norm": 1.277028203010559, |
|
"learning_rate": 2.1126663854039945e-06, |
|
"loss": 5.9072, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 1.668641448020935, |
|
"learning_rate": 2.0934041272275884e-06, |
|
"loss": 6.1502, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.675925925925926, |
|
"grad_norm": 0.8518059849739075, |
|
"learning_rate": 2.074025148547635e-06, |
|
"loss": 5.2586, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.6851851851851851, |
|
"grad_norm": 1.7262235879898071, |
|
"learning_rate": 2.0545332611410427e-06, |
|
"loss": 5.9965, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.6944444444444444, |
|
"grad_norm": 4.665543079376221, |
|
"learning_rate": 2.0349322989934695e-06, |
|
"loss": 7.4106, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.7037037037037037, |
|
"grad_norm": 1.1285862922668457, |
|
"learning_rate": 2.0152261175451915e-06, |
|
"loss": 5.6628, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.7129629629629628, |
|
"grad_norm": 1.108992338180542, |
|
"learning_rate": 1.9954185929327507e-06, |
|
"loss": 5.4876, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.7222222222222223, |
|
"grad_norm": 1.1134352684020996, |
|
"learning_rate": 1.9755136212265323e-06, |
|
"loss": 5.602, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.7222222222222223, |
|
"eval_loss": 4.992879867553711, |
|
"eval_runtime": 10.5803, |
|
"eval_samples_per_second": 9.924, |
|
"eval_steps_per_second": 9.924, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.7314814814814814, |
|
"grad_norm": 1.215661883354187, |
|
"learning_rate": 1.9555151176644222e-06, |
|
"loss": 5.7391, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.7407407407407407, |
|
"grad_norm": 1.2140839099884033, |
|
"learning_rate": 1.9354270158816935e-06, |
|
"loss": 5.4932, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.3728704452514648, |
|
"learning_rate": 1.915253267137274e-06, |
|
"loss": 5.7001, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.7592592592592593, |
|
"grad_norm": 5.121810436248779, |
|
"learning_rate": 1.894997839536547e-06, |
|
"loss": 7.9136, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.7685185185185186, |
|
"grad_norm": 1.3116612434387207, |
|
"learning_rate": 1.8746647172508404e-06, |
|
"loss": 5.7987, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 1.1180241107940674, |
|
"learning_rate": 1.8542578997337519e-06, |
|
"loss": 5.9064, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.7870370370370372, |
|
"grad_norm": 5.294866561889648, |
|
"learning_rate": 1.8337814009344715e-06, |
|
"loss": 8.2684, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.7962962962962963, |
|
"grad_norm": 1.0381337404251099, |
|
"learning_rate": 1.813239248508255e-06, |
|
"loss": 5.487, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.8055555555555556, |
|
"grad_norm": 1.2923859357833862, |
|
"learning_rate": 1.7926354830241926e-06, |
|
"loss": 6.0108, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.8148148148148149, |
|
"grad_norm": 1.3527953624725342, |
|
"learning_rate": 1.7719741571704477e-06, |
|
"loss": 6.3468, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.824074074074074, |
|
"grad_norm": 1.4213413000106812, |
|
"learning_rate": 1.7512593349571046e-06, |
|
"loss": 6.0251, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.8333333333333335, |
|
"grad_norm": 1.3854726552963257, |
|
"learning_rate": 1.730495090916795e-06, |
|
"loss": 5.8197, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.8425925925925926, |
|
"grad_norm": 0.9397538304328918, |
|
"learning_rate": 1.7096855093032494e-06, |
|
"loss": 5.4813, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.8518518518518519, |
|
"grad_norm": 0.7849190831184387, |
|
"learning_rate": 1.6888346832879382e-06, |
|
"loss": 5.3213, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.8611111111111112, |
|
"grad_norm": 1.316622018814087, |
|
"learning_rate": 1.6679467141549617e-06, |
|
"loss": 5.358, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.8703703703703702, |
|
"grad_norm": 1.1983349323272705, |
|
"learning_rate": 1.6470257104943413e-06, |
|
"loss": 5.8328, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.8796296296296298, |
|
"grad_norm": 1.1115864515304565, |
|
"learning_rate": 1.6260757873938715e-06, |
|
"loss": 5.6616, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.8888888888888888, |
|
"grad_norm": 1.6418949365615845, |
|
"learning_rate": 1.6051010656297009e-06, |
|
"loss": 5.8618, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.8981481481481481, |
|
"grad_norm": 1.2098896503448486, |
|
"learning_rate": 1.5841056708557877e-06, |
|
"loss": 5.7551, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.9074074074074074, |
|
"grad_norm": 2.523620843887329, |
|
"learning_rate": 1.5630937327923979e-06, |
|
"loss": 6.0457, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.9166666666666665, |
|
"grad_norm": 1.2053682804107666, |
|
"learning_rate": 1.5420693844138035e-06, |
|
"loss": 5.7501, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.925925925925926, |
|
"grad_norm": 1.0068210363388062, |
|
"learning_rate": 1.5210367611353436e-06, |
|
"loss": 5.2868, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.9351851851851851, |
|
"grad_norm": 1.0384793281555176, |
|
"learning_rate": 1.5e-06, |
|
"loss": 5.5413, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.9444444444444444, |
|
"grad_norm": 0.9215264916419983, |
|
"learning_rate": 1.4789632388646567e-06, |
|
"loss": 5.3834, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.9537037037037037, |
|
"grad_norm": 1.250044584274292, |
|
"learning_rate": 1.4579306155861968e-06, |
|
"loss": 5.9769, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.9629629629629628, |
|
"grad_norm": 0.9464189410209656, |
|
"learning_rate": 1.4369062672076026e-06, |
|
"loss": 5.3573, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.9722222222222223, |
|
"grad_norm": 0.9586074352264404, |
|
"learning_rate": 1.4158943291442122e-06, |
|
"loss": 5.3998, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.9722222222222223, |
|
"eval_loss": 4.946777820587158, |
|
"eval_runtime": 10.6655, |
|
"eval_samples_per_second": 9.845, |
|
"eval_steps_per_second": 9.845, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.9814814814814814, |
|
"grad_norm": 1.4538060426712036, |
|
"learning_rate": 1.394898934370299e-06, |
|
"loss": 6.2383, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.9907407407407407, |
|
"grad_norm": 1.114857792854309, |
|
"learning_rate": 1.3739242126061288e-06, |
|
"loss": 5.3067, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.250162959098816, |
|
"learning_rate": 1.352974289505659e-06, |
|
"loss": 5.6571, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.0092592592592595, |
|
"grad_norm": 1.8700934648513794, |
|
"learning_rate": 1.3320532858450384e-06, |
|
"loss": 6.4708, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.0185185185185186, |
|
"grad_norm": 1.8928983211517334, |
|
"learning_rate": 1.3111653167120623e-06, |
|
"loss": 5.9964, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.0277777777777777, |
|
"grad_norm": 1.5304040908813477, |
|
"learning_rate": 1.2903144906967513e-06, |
|
"loss": 4.7131, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.009259259259259, |
|
"grad_norm": 1.2288047075271606, |
|
"learning_rate": 1.2695049090832047e-06, |
|
"loss": 5.8606, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.0185185185185186, |
|
"grad_norm": 1.6722135543823242, |
|
"learning_rate": 1.2487406650428957e-06, |
|
"loss": 5.824, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.0277777777777777, |
|
"grad_norm": 1.4619513750076294, |
|
"learning_rate": 1.2280258428295526e-06, |
|
"loss": 5.7594, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.037037037037037, |
|
"grad_norm": 1.3389699459075928, |
|
"learning_rate": 1.2073645169758077e-06, |
|
"loss": 5.9157, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.0462962962962963, |
|
"grad_norm": 1.0063234567642212, |
|
"learning_rate": 1.1867607514917454e-06, |
|
"loss": 5.7464, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.0555555555555554, |
|
"grad_norm": 0.9406788945198059, |
|
"learning_rate": 1.1662185990655286e-06, |
|
"loss": 5.2862, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.064814814814815, |
|
"grad_norm": 1.4217416048049927, |
|
"learning_rate": 1.1457421002662489e-06, |
|
"loss": 5.7137, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.074074074074074, |
|
"grad_norm": 1.1130520105361938, |
|
"learning_rate": 1.1253352827491595e-06, |
|
"loss": 5.3391, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.0833333333333335, |
|
"grad_norm": 1.0606895685195923, |
|
"learning_rate": 1.1050021604634526e-06, |
|
"loss": 5.6953, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.0925925925925926, |
|
"grad_norm": 2.6643784046173096, |
|
"learning_rate": 1.084746732862726e-06, |
|
"loss": 6.9906, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.1018518518518516, |
|
"grad_norm": 1.0088460445404053, |
|
"learning_rate": 1.0645729841183068e-06, |
|
"loss": 5.5864, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.111111111111111, |
|
"grad_norm": 0.9418116211891174, |
|
"learning_rate": 1.0444848823355779e-06, |
|
"loss": 5.502, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.1203703703703702, |
|
"grad_norm": 1.597672462463379, |
|
"learning_rate": 1.0244863787734682e-06, |
|
"loss": 6.141, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.1296296296296298, |
|
"grad_norm": 1.1460810899734497, |
|
"learning_rate": 1.0045814070672498e-06, |
|
"loss": 5.5834, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.138888888888889, |
|
"grad_norm": 1.4463963508605957, |
|
"learning_rate": 9.847738824548082e-07, |
|
"loss": 5.867, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.148148148148148, |
|
"grad_norm": 1.1600419282913208, |
|
"learning_rate": 9.650677010065305e-07, |
|
"loss": 5.7531, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.1574074074074074, |
|
"grad_norm": 0.9038326144218445, |
|
"learning_rate": 9.454667388589576e-07, |
|
"loss": 5.2356, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.1666666666666665, |
|
"grad_norm": 1.5344514846801758, |
|
"learning_rate": 9.259748514523654e-07, |
|
"loss": 6.1354, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.175925925925926, |
|
"grad_norm": 1.1884032487869263, |
|
"learning_rate": 9.065958727724121e-07, |
|
"loss": 5.7438, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.185185185185185, |
|
"grad_norm": 0.8460777401924133, |
|
"learning_rate": 8.87333614596006e-07, |
|
"loss": 5.4901, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.1944444444444446, |
|
"grad_norm": 0.8021849393844604, |
|
"learning_rate": 8.681918657415443e-07, |
|
"loss": 5.1841, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.1944444444444446, |
|
"eval_loss": 4.917133331298828, |
|
"eval_runtime": 10.6391, |
|
"eval_samples_per_second": 9.869, |
|
"eval_steps_per_second": 9.869, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.2037037037037037, |
|
"grad_norm": 5.063238620758057, |
|
"learning_rate": 8.49174391323663e-07, |
|
"loss": 7.8621, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.212962962962963, |
|
"grad_norm": 1.4641467332839966, |
|
"learning_rate": 8.302849320126558e-07, |
|
"loss": 5.8469, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"grad_norm": 1.4936150312423706, |
|
"learning_rate": 8.115272032986932e-07, |
|
"loss": 5.7606, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.2314814814814814, |
|
"grad_norm": 0.9228253364562988, |
|
"learning_rate": 7.929048947610034e-07, |
|
"loss": 5.4861, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.240740740740741, |
|
"grad_norm": 0.9733036160469055, |
|
"learning_rate": 7.744216693421403e-07, |
|
"loss": 5.5735, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.7804141044616699, |
|
"learning_rate": 7.560811626274989e-07, |
|
"loss": 5.2257, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.259259259259259, |
|
"grad_norm": 0.8710982799530029, |
|
"learning_rate": 7.378869821302062e-07, |
|
"loss": 5.5369, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.2685185185185186, |
|
"grad_norm": 1.0422347784042358, |
|
"learning_rate": 7.19842706581538e-07, |
|
"loss": 5.4223, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.2777777777777777, |
|
"grad_norm": 0.8307288885116577, |
|
"learning_rate": 7.019518852269954e-07, |
|
"loss": 5.2439, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.287037037037037, |
|
"grad_norm": 2.0869851112365723, |
|
"learning_rate": 6.842180371281773e-07, |
|
"loss": 6.6325, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.2962962962962963, |
|
"grad_norm": 0.8685571551322937, |
|
"learning_rate": 6.666446504705971e-07, |
|
"loss": 5.5038, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.3055555555555554, |
|
"grad_norm": 1.5917941331863403, |
|
"learning_rate": 6.492351818775662e-07, |
|
"loss": 6.0099, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.314814814814815, |
|
"grad_norm": 2.5297560691833496, |
|
"learning_rate": 6.319930557302914e-07, |
|
"loss": 6.3499, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.324074074074074, |
|
"grad_norm": 1.365795612335205, |
|
"learning_rate": 6.14921663494308e-07, |
|
"loss": 5.8755, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.3333333333333335, |
|
"grad_norm": 1.1725842952728271, |
|
"learning_rate": 5.980243630523911e-07, |
|
"loss": 5.8095, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.3425925925925926, |
|
"grad_norm": 1.5456892251968384, |
|
"learning_rate": 5.813044780440748e-07, |
|
"loss": 6.1433, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.351851851851852, |
|
"grad_norm": 0.8776403665542603, |
|
"learning_rate": 5.647652972118998e-07, |
|
"loss": 5.4915, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.361111111111111, |
|
"grad_norm": 1.428396224975586, |
|
"learning_rate": 5.48410073754532e-07, |
|
"loss": 6.2122, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.3703703703703702, |
|
"grad_norm": 1.048996090888977, |
|
"learning_rate": 5.322420246868689e-07, |
|
"loss": 5.6461, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.3796296296296298, |
|
"grad_norm": 0.9285644888877869, |
|
"learning_rate": 5.162643302072639e-07, |
|
"loss": 5.4518, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.388888888888889, |
|
"grad_norm": 0.8765868544578552, |
|
"learning_rate": 5.004801330719941e-07, |
|
"loss": 5.3568, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.398148148148148, |
|
"grad_norm": 5.609752178192139, |
|
"learning_rate": 4.848925379770883e-07, |
|
"loss": 6.9303, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.4074074074074074, |
|
"grad_norm": 1.008718729019165, |
|
"learning_rate": 4.6950461094764745e-07, |
|
"loss": 5.4784, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.4166666666666665, |
|
"grad_norm": 0.8718838095664978, |
|
"learning_rate": 4.5431937873476516e-07, |
|
"loss": 5.5294, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.425925925925926, |
|
"grad_norm": 1.0140756368637085, |
|
"learning_rate": 4.3933982822017883e-07, |
|
"loss": 5.3527, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.435185185185185, |
|
"grad_norm": 1.2150001525878906, |
|
"learning_rate": 4.245689058287592e-07, |
|
"loss": 5.4179, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.4444444444444446, |
|
"grad_norm": 1.3026584386825562, |
|
"learning_rate": 4.100095169489597e-07, |
|
"loss": 6.0764, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.4444444444444446, |
|
"eval_loss": 4.900852203369141, |
|
"eval_runtime": 10.63, |
|
"eval_samples_per_second": 9.878, |
|
"eval_steps_per_second": 9.878, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.4537037037037037, |
|
"grad_norm": 1.6737775802612305, |
|
"learning_rate": 3.956645253613363e-07, |
|
"loss": 5.8735, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.462962962962963, |
|
"grad_norm": 0.9497735500335693, |
|
"learning_rate": 3.8153675267525163e-07, |
|
"loss": 5.509, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.4722222222222223, |
|
"grad_norm": 0.8875224590301514, |
|
"learning_rate": 3.6762897777387626e-07, |
|
"loss": 5.2877, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.4814814814814814, |
|
"grad_norm": 0.774486780166626, |
|
"learning_rate": 3.539439362675888e-07, |
|
"loss": 5.1427, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.490740740740741, |
|
"grad_norm": 0.9570311307907104, |
|
"learning_rate": 3.404843199558945e-07, |
|
"loss": 5.379, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.908612847328186, |
|
"learning_rate": 3.272527762979553e-07, |
|
"loss": 5.4819, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.5092592592592595, |
|
"grad_norm": 2.0357730388641357, |
|
"learning_rate": 3.142519078918449e-07, |
|
"loss": 6.3469, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.5185185185185186, |
|
"grad_norm": 2.1806082725524902, |
|
"learning_rate": 3.014842719626268e-07, |
|
"loss": 5.5354, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.5277777777777777, |
|
"grad_norm": 0.9985087513923645, |
|
"learning_rate": 2.8895237985935643e-07, |
|
"loss": 5.5649, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.537037037037037, |
|
"grad_norm": 0.9574006199836731, |
|
"learning_rate": 2.7665869656110975e-07, |
|
"loss": 5.5855, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.5462962962962963, |
|
"grad_norm": 0.6676331758499146, |
|
"learning_rate": 2.6460564019212803e-07, |
|
"loss": 5.3543, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.5555555555555554, |
|
"grad_norm": 1.2132998704910278, |
|
"learning_rate": 2.52795581546182e-07, |
|
"loss": 5.5964, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.564814814814815, |
|
"grad_norm": 1.157404899597168, |
|
"learning_rate": 2.412308436202468e-07, |
|
"loss": 5.7387, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.574074074074074, |
|
"grad_norm": 1.2162729501724243, |
|
"learning_rate": 2.299137011575738e-07, |
|
"loss": 5.4369, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.5833333333333335, |
|
"grad_norm": 1.804641842842102, |
|
"learning_rate": 2.1884638020025895e-07, |
|
"loss": 6.428, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.5925925925925926, |
|
"grad_norm": 0.9699422717094421, |
|
"learning_rate": 2.080310576513866e-07, |
|
"loss": 5.6471, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.601851851851852, |
|
"grad_norm": 0.9756860136985779, |
|
"learning_rate": 1.9746986084684186e-07, |
|
"loss": 5.647, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.611111111111111, |
|
"grad_norm": 1.4492071866989136, |
|
"learning_rate": 1.8716486713686948e-07, |
|
"loss": 5.7286, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.6203703703703702, |
|
"grad_norm": 0.8920571208000183, |
|
"learning_rate": 1.771181034774676e-07, |
|
"loss": 5.25, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.6296296296296298, |
|
"grad_norm": 1.1408250331878662, |
|
"learning_rate": 1.6733154603169176e-07, |
|
"loss": 5.9142, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.638888888888889, |
|
"grad_norm": 1.3304352760314941, |
|
"learning_rate": 1.5780711978095092e-07, |
|
"loss": 5.8842, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.648148148148148, |
|
"grad_norm": 0.9941169619560242, |
|
"learning_rate": 1.4854669814637145e-07, |
|
"loss": 5.5157, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.6574074074074074, |
|
"grad_norm": 2.4659430980682373, |
|
"learning_rate": 1.3955210262029917e-07, |
|
"loss": 5.5099, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 1.0898157358169556, |
|
"learning_rate": 1.3082510240802165e-07, |
|
"loss": 5.6562, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.675925925925926, |
|
"grad_norm": 1.2926898002624512, |
|
"learning_rate": 1.2236741407976837e-07, |
|
"loss": 5.7729, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.685185185185185, |
|
"grad_norm": 3.4110615253448486, |
|
"learning_rate": 1.141807012330699e-07, |
|
"loss": 5.7517, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.6944444444444446, |
|
"grad_norm": 1.9456055164337158, |
|
"learning_rate": 1.0626657416553281e-07, |
|
"loss": 5.2345, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.6944444444444446, |
|
"eval_loss": 4.896117210388184, |
|
"eval_runtime": 10.4256, |
|
"eval_samples_per_second": 10.071, |
|
"eval_steps_per_second": 10.071, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.7037037037037037, |
|
"grad_norm": 1.2618635892868042, |
|
"learning_rate": 9.862658955810045e-08, |
|
"loss": 5.7653, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.712962962962963, |
|
"grad_norm": 1.0603694915771484, |
|
"learning_rate": 9.126225016885858e-08, |
|
"loss": 5.7454, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.7222222222222223, |
|
"grad_norm": 0.9327253699302673, |
|
"learning_rate": 8.417500453744864e-08, |
|
"loss": 5.6063, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.7314814814814814, |
|
"grad_norm": 1.8524595499038696, |
|
"learning_rate": 7.736624670014602e-08, |
|
"loss": 6.2174, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.7407407407407405, |
|
"grad_norm": 1.1083251237869263, |
|
"learning_rate": 7.083731591565551e-08, |
|
"loss": 5.5776, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 1.899492621421814, |
|
"learning_rate": 6.458949640168676e-08, |
|
"loss": 6.3714, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.7592592592592595, |
|
"grad_norm": 2.46284556388855, |
|
"learning_rate": 5.8624017082350765e-08, |
|
"loss": 6.619, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.7685185185185186, |
|
"grad_norm": 2.8916397094726562, |
|
"learning_rate": 5.2942051346436315e-08, |
|
"loss": 6.5435, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.7777777777777777, |
|
"grad_norm": 1.0509790182113647, |
|
"learning_rate": 4.754471681660799e-08, |
|
"loss": 5.4202, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.787037037037037, |
|
"grad_norm": 0.9211875200271606, |
|
"learning_rate": 4.243307512957351e-08, |
|
"loss": 5.4689, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.7962962962962963, |
|
"grad_norm": 0.8755972385406494, |
|
"learning_rate": 3.7608131727264573e-08, |
|
"loss": 5.2441, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.8055555555555554, |
|
"grad_norm": 1.1118243932724, |
|
"learning_rate": 3.3070835659068596e-08, |
|
"loss": 5.7712, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.814814814814815, |
|
"grad_norm": 1.214991569519043, |
|
"learning_rate": 2.8822079395154353e-08, |
|
"loss": 5.6607, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.824074074074074, |
|
"grad_norm": 0.8643071055412292, |
|
"learning_rate": 2.4862698650927385e-08, |
|
"loss": 5.4248, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.8333333333333335, |
|
"grad_norm": 1.9691087007522583, |
|
"learning_rate": 2.1193472222646172e-08, |
|
"loss": 6.127, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.8425925925925926, |
|
"grad_norm": 0.7883575558662415, |
|
"learning_rate": 1.7815121834236837e-08, |
|
"loss": 5.4039, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.851851851851852, |
|
"grad_norm": 1.6514503955841064, |
|
"learning_rate": 1.4728311995331533e-08, |
|
"loss": 5.8646, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.861111111111111, |
|
"grad_norm": 2.683927297592163, |
|
"learning_rate": 1.1933649870563002e-08, |
|
"loss": 6.0561, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.8703703703703702, |
|
"grad_norm": 1.158765435218811, |
|
"learning_rate": 9.431685160136094e-09, |
|
"loss": 5.594, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.8796296296296298, |
|
"grad_norm": 1.821207880973816, |
|
"learning_rate": 7.222909991704774e-09, |
|
"loss": 6.2595, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.888888888888889, |
|
"grad_norm": 1.1909257173538208, |
|
"learning_rate": 5.307758823571374e-09, |
|
"loss": 5.8222, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.898148148148148, |
|
"grad_norm": 1.9552392959594727, |
|
"learning_rate": 3.6866083592309587e-09, |
|
"loss": 6.2407, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.9074074074074074, |
|
"grad_norm": 1.438214898109436, |
|
"learning_rate": 2.359777473275093e-09, |
|
"loss": 5.8622, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.9166666666666665, |
|
"grad_norm": 5.569436073303223, |
|
"learning_rate": 1.3275271486696826e-09, |
|
"loss": 8.2884, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.925925925925926, |
|
"grad_norm": 0.8624269366264343, |
|
"learning_rate": 5.900604254207021e-10, |
|
"loss": 5.4493, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.935185185185185, |
|
"grad_norm": 1.1584603786468506, |
|
"learning_rate": 1.4752236063747536e-10, |
|
"loss": 5.7549, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.9444444444444446, |
|
"grad_norm": 1.3483120203018188, |
|
"learning_rate": 0.0, |
|
"loss": 5.4896, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.9444444444444446, |
|
"eval_loss": 4.894683837890625, |
|
"eval_runtime": 10.7727, |
|
"eval_samples_per_second": 9.747, |
|
"eval_steps_per_second": 9.747, |
|
"step": 324 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 324, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 108, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.044304041803776e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|