|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9722222222222223, |
|
"eval_steps": 27, |
|
"global_step": 216, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.009259259259259259, |
|
"grad_norm": 3.136354684829712, |
|
"learning_rate": 3.0000000000000004e-08, |
|
"loss": 7.126, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009259259259259259, |
|
"eval_loss": 5.272334098815918, |
|
"eval_runtime": 10.9491, |
|
"eval_samples_per_second": 9.59, |
|
"eval_steps_per_second": 9.59, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.018518518518518517, |
|
"grad_norm": 1.6424509286880493, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 6.2434, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.027777777777777776, |
|
"grad_norm": 1.3273869752883911, |
|
"learning_rate": 9e-08, |
|
"loss": 5.851, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.037037037037037035, |
|
"grad_norm": 1.9482982158660889, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 7.0582, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.046296296296296294, |
|
"grad_norm": 2.214090347290039, |
|
"learning_rate": 1.5000000000000002e-07, |
|
"loss": 6.2122, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05555555555555555, |
|
"grad_norm": 1.6521217823028564, |
|
"learning_rate": 1.8e-07, |
|
"loss": 6.0318, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06481481481481481, |
|
"grad_norm": 1.2911734580993652, |
|
"learning_rate": 2.1000000000000003e-07, |
|
"loss": 6.1264, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 1.2651853561401367, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 5.6821, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08333333333333333, |
|
"grad_norm": 1.2058266401290894, |
|
"learning_rate": 2.7e-07, |
|
"loss": 5.7295, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.09259259259259259, |
|
"grad_norm": 2.896589994430542, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 7.4781, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10185185185185185, |
|
"grad_norm": 1.313854694366455, |
|
"learning_rate": 3.3e-07, |
|
"loss": 6.1254, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 2.281994104385376, |
|
"learning_rate": 3.6e-07, |
|
"loss": 6.683, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.12037037037037036, |
|
"grad_norm": 1.2343209981918335, |
|
"learning_rate": 3.9e-07, |
|
"loss": 6.2304, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12962962962962962, |
|
"grad_norm": 1.3297009468078613, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 6.2013, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1388888888888889, |
|
"grad_norm": 1.9952207803726196, |
|
"learning_rate": 4.5e-07, |
|
"loss": 6.9842, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 1.3968197107315063, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 6.2524, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1574074074074074, |
|
"grad_norm": 4.7724127769470215, |
|
"learning_rate": 5.100000000000001e-07, |
|
"loss": 8.551, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.9988604187965393, |
|
"learning_rate": 5.4e-07, |
|
"loss": 5.5391, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.17592592592592593, |
|
"grad_norm": 1.2599544525146484, |
|
"learning_rate": 5.7e-07, |
|
"loss": 6.4559, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.18518518518518517, |
|
"grad_norm": 1.984928846359253, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 6.954, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.19444444444444445, |
|
"grad_norm": 1.2283947467803955, |
|
"learning_rate": 6.3e-07, |
|
"loss": 6.1821, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2037037037037037, |
|
"grad_norm": 2.2317938804626465, |
|
"learning_rate": 6.6e-07, |
|
"loss": 6.9164, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.21296296296296297, |
|
"grad_norm": 1.5473928451538086, |
|
"learning_rate": 6.900000000000001e-07, |
|
"loss": 6.5494, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 1.508975625038147, |
|
"learning_rate": 7.2e-07, |
|
"loss": 6.4025, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.23148148148148148, |
|
"grad_norm": 2.4396748542785645, |
|
"learning_rate": 7.5e-07, |
|
"loss": 7.2659, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.24074074074074073, |
|
"grad_norm": 1.1266586780548096, |
|
"learning_rate": 7.8e-07, |
|
"loss": 6.0911, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.515765905380249, |
|
"learning_rate": 8.100000000000001e-07, |
|
"loss": 6.503, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 5.270267963409424, |
|
"eval_runtime": 10.4888, |
|
"eval_samples_per_second": 10.011, |
|
"eval_steps_per_second": 10.011, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.25925925925925924, |
|
"grad_norm": 1.2845295667648315, |
|
"learning_rate": 8.400000000000001e-07, |
|
"loss": 6.3435, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.26851851851851855, |
|
"grad_norm": 1.528168797492981, |
|
"learning_rate": 8.699999999999999e-07, |
|
"loss": 6.8436, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2777777777777778, |
|
"grad_norm": 1.555953860282898, |
|
"learning_rate": 9e-07, |
|
"loss": 6.404, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.28703703703703703, |
|
"grad_norm": 1.293718934059143, |
|
"learning_rate": 9.3e-07, |
|
"loss": 6.0386, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 1.0446598529815674, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 5.677, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3055555555555556, |
|
"grad_norm": 1.3030468225479126, |
|
"learning_rate": 9.9e-07, |
|
"loss": 5.8789, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3148148148148148, |
|
"grad_norm": 1.108596682548523, |
|
"learning_rate": 1.0200000000000002e-06, |
|
"loss": 5.8663, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.32407407407407407, |
|
"grad_norm": 1.833085536956787, |
|
"learning_rate": 1.05e-06, |
|
"loss": 5.7367, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 1.561715006828308, |
|
"learning_rate": 1.08e-06, |
|
"loss": 6.2266, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3425925925925926, |
|
"grad_norm": 1.6098933219909668, |
|
"learning_rate": 1.11e-06, |
|
"loss": 6.3335, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.35185185185185186, |
|
"grad_norm": 1.6193208694458008, |
|
"learning_rate": 1.14e-06, |
|
"loss": 6.3869, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3611111111111111, |
|
"grad_norm": 1.5229904651641846, |
|
"learning_rate": 1.17e-06, |
|
"loss": 6.0453, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 1.511690616607666, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 6.7203, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.37962962962962965, |
|
"grad_norm": 0.9691258668899536, |
|
"learning_rate": 1.2299999999999999e-06, |
|
"loss": 5.8176, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.3888888888888889, |
|
"grad_norm": 1.1707255840301514, |
|
"learning_rate": 1.26e-06, |
|
"loss": 6.2779, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.39814814814814814, |
|
"grad_norm": 1.1281574964523315, |
|
"learning_rate": 1.29e-06, |
|
"loss": 5.8402, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4074074074074074, |
|
"grad_norm": 3.945136308670044, |
|
"learning_rate": 1.32e-06, |
|
"loss": 7.5325, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 1.8681273460388184, |
|
"learning_rate": 1.35e-06, |
|
"loss": 6.3374, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.42592592592592593, |
|
"grad_norm": 1.300478458404541, |
|
"learning_rate": 1.3800000000000001e-06, |
|
"loss": 5.9715, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4351851851851852, |
|
"grad_norm": 1.2660181522369385, |
|
"learning_rate": 1.41e-06, |
|
"loss": 6.0421, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 1.817872405052185, |
|
"learning_rate": 1.44e-06, |
|
"loss": 7.0875, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4537037037037037, |
|
"grad_norm": 1.6538351774215698, |
|
"learning_rate": 1.4700000000000001e-06, |
|
"loss": 6.1825, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.46296296296296297, |
|
"grad_norm": 1.5821293592453003, |
|
"learning_rate": 1.5e-06, |
|
"loss": 6.0065, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4722222222222222, |
|
"grad_norm": 1.5061155557632446, |
|
"learning_rate": 1.53e-06, |
|
"loss": 5.8156, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.48148148148148145, |
|
"grad_norm": 1.495235562324524, |
|
"learning_rate": 1.56e-06, |
|
"loss": 5.792, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.49074074074074076, |
|
"grad_norm": 1.2974352836608887, |
|
"learning_rate": 1.59e-06, |
|
"loss": 6.3621, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.2943974733352661, |
|
"learning_rate": 1.6200000000000002e-06, |
|
"loss": 5.9853, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 5.2575860023498535, |
|
"eval_runtime": 10.6855, |
|
"eval_samples_per_second": 9.826, |
|
"eval_steps_per_second": 9.826, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5092592592592593, |
|
"grad_norm": 1.0820438861846924, |
|
"learning_rate": 1.65e-06, |
|
"loss": 5.7026, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 1.9304187297821045, |
|
"learning_rate": 1.6800000000000002e-06, |
|
"loss": 6.5805, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5277777777777778, |
|
"grad_norm": 1.238295555114746, |
|
"learning_rate": 1.71e-06, |
|
"loss": 5.9397, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5370370370370371, |
|
"grad_norm": 1.7372101545333862, |
|
"learning_rate": 1.7399999999999999e-06, |
|
"loss": 5.8761, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5462962962962963, |
|
"grad_norm": 1.6708195209503174, |
|
"learning_rate": 1.77e-06, |
|
"loss": 6.0646, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 1.5075998306274414, |
|
"learning_rate": 1.8e-06, |
|
"loss": 6.276, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5648148148148148, |
|
"grad_norm": 1.4556330442428589, |
|
"learning_rate": 1.83e-06, |
|
"loss": 6.4906, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5740740740740741, |
|
"grad_norm": 1.2507421970367432, |
|
"learning_rate": 1.86e-06, |
|
"loss": 6.2285, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5833333333333334, |
|
"grad_norm": 1.1143450736999512, |
|
"learning_rate": 1.8900000000000001e-06, |
|
"loss": 5.7344, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 2.05438232421875, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 6.85, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6018518518518519, |
|
"grad_norm": 1.104891300201416, |
|
"learning_rate": 1.95e-06, |
|
"loss": 6.1067, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6111111111111112, |
|
"grad_norm": 1.34933602809906, |
|
"learning_rate": 1.98e-06, |
|
"loss": 6.1659, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6203703703703703, |
|
"grad_norm": 1.5707837343215942, |
|
"learning_rate": 2.0100000000000002e-06, |
|
"loss": 6.1741, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6296296296296297, |
|
"grad_norm": 0.9905802607536316, |
|
"learning_rate": 2.0400000000000004e-06, |
|
"loss": 5.9072, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6388888888888888, |
|
"grad_norm": 1.134399652481079, |
|
"learning_rate": 2.07e-06, |
|
"loss": 5.8005, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6481481481481481, |
|
"grad_norm": 1.7359548807144165, |
|
"learning_rate": 2.1e-06, |
|
"loss": 6.7931, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6574074074074074, |
|
"grad_norm": 1.9637112617492676, |
|
"learning_rate": 2.13e-06, |
|
"loss": 6.0, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 1.908047080039978, |
|
"learning_rate": 2.16e-06, |
|
"loss": 6.9322, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6759259259259259, |
|
"grad_norm": 1.6848489046096802, |
|
"learning_rate": 2.19e-06, |
|
"loss": 6.3743, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6851851851851852, |
|
"grad_norm": 1.158486247062683, |
|
"learning_rate": 2.22e-06, |
|
"loss": 5.7395, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6944444444444444, |
|
"grad_norm": 2.9409470558166504, |
|
"learning_rate": 2.25e-06, |
|
"loss": 7.1889, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7037037037037037, |
|
"grad_norm": 1.5408318042755127, |
|
"learning_rate": 2.28e-06, |
|
"loss": 6.5631, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7129629629629629, |
|
"grad_norm": 1.3003695011138916, |
|
"learning_rate": 2.31e-06, |
|
"loss": 5.8228, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7222222222222222, |
|
"grad_norm": 1.6015976667404175, |
|
"learning_rate": 2.34e-06, |
|
"loss": 6.0852, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7314814814814815, |
|
"grad_norm": 1.282989501953125, |
|
"learning_rate": 2.37e-06, |
|
"loss": 6.0135, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 1.3845688104629517, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 5.9315, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.431152105331421, |
|
"learning_rate": 2.43e-06, |
|
"loss": 5.7324, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 5.23203182220459, |
|
"eval_runtime": 10.3071, |
|
"eval_samples_per_second": 10.187, |
|
"eval_steps_per_second": 10.187, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7592592592592593, |
|
"grad_norm": 1.7849000692367554, |
|
"learning_rate": 2.4599999999999997e-06, |
|
"loss": 6.3697, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7685185185185185, |
|
"grad_norm": 1.2546958923339844, |
|
"learning_rate": 2.49e-06, |
|
"loss": 6.4323, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 1.3903110027313232, |
|
"learning_rate": 2.52e-06, |
|
"loss": 6.1479, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7870370370370371, |
|
"grad_norm": 1.8783302307128906, |
|
"learning_rate": 2.55e-06, |
|
"loss": 5.7606, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7962962962962963, |
|
"grad_norm": 1.2064532041549683, |
|
"learning_rate": 2.58e-06, |
|
"loss": 6.0706, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8055555555555556, |
|
"grad_norm": 1.9301965236663818, |
|
"learning_rate": 2.61e-06, |
|
"loss": 6.2393, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8148148148148148, |
|
"grad_norm": 1.259318232536316, |
|
"learning_rate": 2.64e-06, |
|
"loss": 5.8821, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.8240740740740741, |
|
"grad_norm": 1.88141667842865, |
|
"learning_rate": 2.6700000000000003e-06, |
|
"loss": 6.8863, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 1.1480059623718262, |
|
"learning_rate": 2.7e-06, |
|
"loss": 5.7941, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8425925925925926, |
|
"grad_norm": 1.1357027292251587, |
|
"learning_rate": 2.73e-06, |
|
"loss": 5.7938, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8518518518518519, |
|
"grad_norm": 1.2564626932144165, |
|
"learning_rate": 2.7600000000000003e-06, |
|
"loss": 5.8955, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8611111111111112, |
|
"grad_norm": 1.5674831867218018, |
|
"learning_rate": 2.7900000000000004e-06, |
|
"loss": 6.0711, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.8703703703703703, |
|
"grad_norm": 2.784726142883301, |
|
"learning_rate": 2.82e-06, |
|
"loss": 6.9214, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8796296296296297, |
|
"grad_norm": 1.2815853357315063, |
|
"learning_rate": 2.85e-06, |
|
"loss": 5.9669, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 3.059992551803589, |
|
"learning_rate": 2.88e-06, |
|
"loss": 7.6911, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8981481481481481, |
|
"grad_norm": 1.2001030445098877, |
|
"learning_rate": 2.91e-06, |
|
"loss": 6.1174, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.9074074074074074, |
|
"grad_norm": 1.685596227645874, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 6.4517, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.9166666666666666, |
|
"grad_norm": 1.5668795108795166, |
|
"learning_rate": 2.97e-06, |
|
"loss": 6.3579, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.9259259259259259, |
|
"grad_norm": 1.2336190938949585, |
|
"learning_rate": 3e-06, |
|
"loss": 6.2871, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9351851851851852, |
|
"grad_norm": 1.331573247909546, |
|
"learning_rate": 2.9998524776393626e-06, |
|
"loss": 5.9677, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9444444444444444, |
|
"grad_norm": 1.5391007661819458, |
|
"learning_rate": 2.9994099395745795e-06, |
|
"loss": 6.6068, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9537037037037037, |
|
"grad_norm": 4.519767761230469, |
|
"learning_rate": 2.9986724728513302e-06, |
|
"loss": 8.1238, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9629629629629629, |
|
"grad_norm": 1.5254385471343994, |
|
"learning_rate": 2.997640222526725e-06, |
|
"loss": 6.4137, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.9722222222222222, |
|
"grad_norm": 1.28110671043396, |
|
"learning_rate": 2.996313391640769e-06, |
|
"loss": 5.887, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.9814814814814815, |
|
"grad_norm": 3.4707837104797363, |
|
"learning_rate": 2.994692241176429e-06, |
|
"loss": 7.9841, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9907407407407407, |
|
"grad_norm": 1.5300663709640503, |
|
"learning_rate": 2.9927770900082955e-06, |
|
"loss": 6.2007, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.747676134109497, |
|
"learning_rate": 2.9905683148398643e-06, |
|
"loss": 6.5292, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 5.185390949249268, |
|
"eval_runtime": 10.6897, |
|
"eval_samples_per_second": 9.823, |
|
"eval_steps_per_second": 9.823, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.0092592592592593, |
|
"grad_norm": 1.57361900806427, |
|
"learning_rate": 2.988066350129437e-06, |
|
"loss": 6.1537, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.0185185185185186, |
|
"grad_norm": 5.155310153961182, |
|
"learning_rate": 2.9852716880046687e-06, |
|
"loss": 7.9648, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.0277777777777777, |
|
"grad_norm": 1.0643362998962402, |
|
"learning_rate": 2.982184878165763e-06, |
|
"loss": 5.1748, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.0092592592592593, |
|
"grad_norm": 1.0376684665679932, |
|
"learning_rate": 2.9788065277773537e-06, |
|
"loss": 5.6556, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0185185185185186, |
|
"grad_norm": 1.228864073753357, |
|
"learning_rate": 2.9751373013490727e-06, |
|
"loss": 5.7668, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0277777777777777, |
|
"grad_norm": 1.7278510332107544, |
|
"learning_rate": 2.971177920604846e-06, |
|
"loss": 6.3775, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.037037037037037, |
|
"grad_norm": 1.1904284954071045, |
|
"learning_rate": 2.9669291643409314e-06, |
|
"loss": 5.7487, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.0462962962962963, |
|
"grad_norm": 2.941854238510132, |
|
"learning_rate": 2.9623918682727352e-06, |
|
"loss": 7.2423, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.0555555555555556, |
|
"grad_norm": 1.5035470724105835, |
|
"learning_rate": 2.9575669248704265e-06, |
|
"loss": 6.1212, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.0648148148148149, |
|
"grad_norm": 1.4733242988586426, |
|
"learning_rate": 2.9524552831833926e-06, |
|
"loss": 6.3059, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.074074074074074, |
|
"grad_norm": 1.3305742740631104, |
|
"learning_rate": 2.947057948653564e-06, |
|
"loss": 6.1691, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0833333333333333, |
|
"grad_norm": 1.2160289287567139, |
|
"learning_rate": 2.9413759829176495e-06, |
|
"loss": 5.9701, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0925925925925926, |
|
"grad_norm": 1.9012755155563354, |
|
"learning_rate": 2.935410503598313e-06, |
|
"loss": 6.6399, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.1018518518518519, |
|
"grad_norm": 1.3863564729690552, |
|
"learning_rate": 2.9291626840843446e-06, |
|
"loss": 6.0122, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 1.1440134048461914, |
|
"learning_rate": 2.9226337532998544e-06, |
|
"loss": 5.9895, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1203703703703705, |
|
"grad_norm": 1.4871304035186768, |
|
"learning_rate": 2.9158249954625514e-06, |
|
"loss": 6.3919, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.1296296296296295, |
|
"grad_norm": 1.574532389640808, |
|
"learning_rate": 2.908737749831142e-06, |
|
"loss": 6.489, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.1388888888888888, |
|
"grad_norm": 2.674992322921753, |
|
"learning_rate": 2.9013734104419e-06, |
|
"loss": 6.965, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.1481481481481481, |
|
"grad_norm": 1.4959555864334106, |
|
"learning_rate": 2.8937334258344676e-06, |
|
"loss": 5.8652, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.1574074074074074, |
|
"grad_norm": 3.064631223678589, |
|
"learning_rate": 2.88581929876693e-06, |
|
"loss": 7.1246, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.1666666666666667, |
|
"grad_norm": 1.7988754510879517, |
|
"learning_rate": 2.8776325859202315e-06, |
|
"loss": 6.1511, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.175925925925926, |
|
"grad_norm": 1.4231789112091064, |
|
"learning_rate": 2.869174897591978e-06, |
|
"loss": 6.0368, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.1851851851851851, |
|
"grad_norm": 2.6319122314453125, |
|
"learning_rate": 2.8604478973797005e-06, |
|
"loss": 7.1228, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.1944444444444444, |
|
"grad_norm": 1.4405936002731323, |
|
"learning_rate": 2.851453301853629e-06, |
|
"loss": 5.9765, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.2037037037037037, |
|
"grad_norm": 1.669622540473938, |
|
"learning_rate": 2.842192880219049e-06, |
|
"loss": 6.4033, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.212962962962963, |
|
"grad_norm": 1.1244466304779053, |
|
"learning_rate": 2.832668453968309e-06, |
|
"loss": 5.5845, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"grad_norm": 1.0342087745666504, |
|
"learning_rate": 2.8228818965225326e-06, |
|
"loss": 5.6106, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"eval_loss": 5.1238203048706055, |
|
"eval_runtime": 10.8032, |
|
"eval_samples_per_second": 9.719, |
|
"eval_steps_per_second": 9.719, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.2314814814814814, |
|
"grad_norm": 2.5150041580200195, |
|
"learning_rate": 2.8128351328631308e-06, |
|
"loss": 6.5318, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.2407407407407407, |
|
"grad_norm": 1.7575809955596924, |
|
"learning_rate": 2.8025301391531584e-06, |
|
"loss": 6.225, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.5736333131790161, |
|
"learning_rate": 2.7919689423486137e-06, |
|
"loss": 5.6247, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.2592592592592593, |
|
"grad_norm": 0.9995443820953369, |
|
"learning_rate": 2.781153619799741e-06, |
|
"loss": 5.4866, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.2685185185185186, |
|
"grad_norm": 1.7936508655548096, |
|
"learning_rate": 2.7700862988424264e-06, |
|
"loss": 6.4679, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2777777777777777, |
|
"grad_norm": 1.4072571992874146, |
|
"learning_rate": 2.7587691563797533e-06, |
|
"loss": 6.0924, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.287037037037037, |
|
"grad_norm": 1.9437594413757324, |
|
"learning_rate": 2.747204418453818e-06, |
|
"loss": 6.3988, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.2962962962962963, |
|
"grad_norm": 1.1792351007461548, |
|
"learning_rate": 2.735394359807872e-06, |
|
"loss": 6.0245, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.3055555555555556, |
|
"grad_norm": 1.2097288370132446, |
|
"learning_rate": 2.7233413034388904e-06, |
|
"loss": 6.0232, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.3148148148148149, |
|
"grad_norm": 1.5997315645217896, |
|
"learning_rate": 2.711047620140644e-06, |
|
"loss": 5.9724, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.324074074074074, |
|
"grad_norm": 1.9537619352340698, |
|
"learning_rate": 2.6985157280373736e-06, |
|
"loss": 6.336, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 1.4596198797225952, |
|
"learning_rate": 2.685748092108155e-06, |
|
"loss": 6.2286, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.3425925925925926, |
|
"grad_norm": 1.2177029848098755, |
|
"learning_rate": 2.6727472237020448e-06, |
|
"loss": 5.8753, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.3518518518518519, |
|
"grad_norm": 1.5970029830932617, |
|
"learning_rate": 2.6595156800441053e-06, |
|
"loss": 6.2522, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.3611111111111112, |
|
"grad_norm": 1.1032902002334595, |
|
"learning_rate": 2.646056063732411e-06, |
|
"loss": 5.795, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.3703703703703702, |
|
"grad_norm": 1.10838782787323, |
|
"learning_rate": 2.632371022226124e-06, |
|
"loss": 5.4648, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.3796296296296298, |
|
"grad_norm": 1.3732211589813232, |
|
"learning_rate": 2.6184632473247484e-06, |
|
"loss": 5.568, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3888888888888888, |
|
"grad_norm": 1.188904881477356, |
|
"learning_rate": 2.6043354746386645e-06, |
|
"loss": 5.8007, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.3981481481481481, |
|
"grad_norm": 1.3125028610229492, |
|
"learning_rate": 2.5899904830510406e-06, |
|
"loss": 5.7965, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.4074074074074074, |
|
"grad_norm": 1.2234677076339722, |
|
"learning_rate": 2.575431094171241e-06, |
|
"loss": 5.9759, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.4166666666666667, |
|
"grad_norm": 2.0235507488250732, |
|
"learning_rate": 2.5606601717798212e-06, |
|
"loss": 6.5055, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.425925925925926, |
|
"grad_norm": 1.3678350448608398, |
|
"learning_rate": 2.5456806212652348e-06, |
|
"loss": 5.8768, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.4351851851851851, |
|
"grad_norm": 1.309804081916809, |
|
"learning_rate": 2.5304953890523524e-06, |
|
"loss": 6.0516, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.4444444444444444, |
|
"grad_norm": 1.4316556453704834, |
|
"learning_rate": 2.5151074620229114e-06, |
|
"loss": 6.2186, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.4537037037037037, |
|
"grad_norm": 1.308398723602295, |
|
"learning_rate": 2.499519866928006e-06, |
|
"loss": 5.9185, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.462962962962963, |
|
"grad_norm": 1.7772057056427002, |
|
"learning_rate": 2.4837356697927366e-06, |
|
"loss": 6.4643, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.4722222222222223, |
|
"grad_norm": 1.4515641927719116, |
|
"learning_rate": 2.4677579753131316e-06, |
|
"loss": 6.3981, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.4722222222222223, |
|
"eval_loss": 5.054426193237305, |
|
"eval_runtime": 10.7718, |
|
"eval_samples_per_second": 9.748, |
|
"eval_steps_per_second": 9.748, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.4814814814814814, |
|
"grad_norm": 2.9240331649780273, |
|
"learning_rate": 2.4515899262454685e-06, |
|
"loss": 6.8221, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.4907407407407407, |
|
"grad_norm": 3.2064929008483887, |
|
"learning_rate": 2.4352347027881005e-06, |
|
"loss": 7.6826, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.6117157936096191, |
|
"learning_rate": 2.4186955219559254e-06, |
|
"loss": 5.5413, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.5092592592592593, |
|
"grad_norm": 1.5916537046432495, |
|
"learning_rate": 2.401975636947609e-06, |
|
"loss": 6.1638, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.5185185185185186, |
|
"grad_norm": 1.6315852403640747, |
|
"learning_rate": 2.3850783365056922e-06, |
|
"loss": 6.3381, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.5277777777777777, |
|
"grad_norm": 1.1859897375106812, |
|
"learning_rate": 2.368006944269709e-06, |
|
"loss": 5.7726, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.5370370370370372, |
|
"grad_norm": 1.2986359596252441, |
|
"learning_rate": 2.3507648181224336e-06, |
|
"loss": 5.5423, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.5462962962962963, |
|
"grad_norm": 1.3099842071533203, |
|
"learning_rate": 2.3333553495294033e-06, |
|
"loss": 6.2081, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 1.8448351621627808, |
|
"learning_rate": 2.315781962871823e-06, |
|
"loss": 6.4411, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.5648148148148149, |
|
"grad_norm": 1.263071894645691, |
|
"learning_rate": 2.298048114773005e-06, |
|
"loss": 5.769, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.574074074074074, |
|
"grad_norm": 1.4397268295288086, |
|
"learning_rate": 2.280157293418462e-06, |
|
"loss": 6.1988, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.5833333333333335, |
|
"grad_norm": 1.3311412334442139, |
|
"learning_rate": 2.262113017869794e-06, |
|
"loss": 5.9678, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.5925925925925926, |
|
"grad_norm": 1.5137203931808472, |
|
"learning_rate": 2.243918837372502e-06, |
|
"loss": 6.4093, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.6018518518518519, |
|
"grad_norm": 2.532318115234375, |
|
"learning_rate": 2.22557833065786e-06, |
|
"loss": 6.573, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.6111111111111112, |
|
"grad_norm": 1.7535362243652344, |
|
"learning_rate": 2.207095105238997e-06, |
|
"loss": 6.1462, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.6203703703703702, |
|
"grad_norm": 1.3252042531967163, |
|
"learning_rate": 2.1884727967013063e-06, |
|
"loss": 5.7489, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.6296296296296298, |
|
"grad_norm": 2.172076463699341, |
|
"learning_rate": 2.1697150679873442e-06, |
|
"loss": 6.3797, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.6388888888888888, |
|
"grad_norm": 0.8812606334686279, |
|
"learning_rate": 2.150825608676337e-06, |
|
"loss": 5.4227, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.6481481481481481, |
|
"grad_norm": 1.4979366064071655, |
|
"learning_rate": 2.1318081342584566e-06, |
|
"loss": 5.811, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.6574074074074074, |
|
"grad_norm": 1.277028203010559, |
|
"learning_rate": 2.1126663854039945e-06, |
|
"loss": 5.9072, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 1.668641448020935, |
|
"learning_rate": 2.0934041272275884e-06, |
|
"loss": 6.1502, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.675925925925926, |
|
"grad_norm": 0.8518059849739075, |
|
"learning_rate": 2.074025148547635e-06, |
|
"loss": 5.2586, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.6851851851851851, |
|
"grad_norm": 1.7262235879898071, |
|
"learning_rate": 2.0545332611410427e-06, |
|
"loss": 5.9965, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.6944444444444444, |
|
"grad_norm": 4.665543079376221, |
|
"learning_rate": 2.0349322989934695e-06, |
|
"loss": 7.4106, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.7037037037037037, |
|
"grad_norm": 1.1285862922668457, |
|
"learning_rate": 2.0152261175451915e-06, |
|
"loss": 5.6628, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.7129629629629628, |
|
"grad_norm": 1.108992338180542, |
|
"learning_rate": 1.9954185929327507e-06, |
|
"loss": 5.4876, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.7222222222222223, |
|
"grad_norm": 1.1134352684020996, |
|
"learning_rate": 1.9755136212265323e-06, |
|
"loss": 5.602, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.7222222222222223, |
|
"eval_loss": 4.992879867553711, |
|
"eval_runtime": 10.5803, |
|
"eval_samples_per_second": 9.924, |
|
"eval_steps_per_second": 9.924, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.7314814814814814, |
|
"grad_norm": 1.215661883354187, |
|
"learning_rate": 1.9555151176644222e-06, |
|
"loss": 5.7391, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.7407407407407407, |
|
"grad_norm": 1.2140839099884033, |
|
"learning_rate": 1.9354270158816935e-06, |
|
"loss": 5.4932, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.3728704452514648, |
|
"learning_rate": 1.915253267137274e-06, |
|
"loss": 5.7001, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.7592592592592593, |
|
"grad_norm": 5.121810436248779, |
|
"learning_rate": 1.894997839536547e-06, |
|
"loss": 7.9136, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.7685185185185186, |
|
"grad_norm": 1.3116612434387207, |
|
"learning_rate": 1.8746647172508404e-06, |
|
"loss": 5.7987, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 1.1180241107940674, |
|
"learning_rate": 1.8542578997337519e-06, |
|
"loss": 5.9064, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.7870370370370372, |
|
"grad_norm": 5.294866561889648, |
|
"learning_rate": 1.8337814009344715e-06, |
|
"loss": 8.2684, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.7962962962962963, |
|
"grad_norm": 1.0381337404251099, |
|
"learning_rate": 1.813239248508255e-06, |
|
"loss": 5.487, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.8055555555555556, |
|
"grad_norm": 1.2923859357833862, |
|
"learning_rate": 1.7926354830241926e-06, |
|
"loss": 6.0108, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.8148148148148149, |
|
"grad_norm": 1.3527953624725342, |
|
"learning_rate": 1.7719741571704477e-06, |
|
"loss": 6.3468, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.824074074074074, |
|
"grad_norm": 1.4213413000106812, |
|
"learning_rate": 1.7512593349571046e-06, |
|
"loss": 6.0251, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.8333333333333335, |
|
"grad_norm": 1.3854726552963257, |
|
"learning_rate": 1.730495090916795e-06, |
|
"loss": 5.8197, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.8425925925925926, |
|
"grad_norm": 0.9397538304328918, |
|
"learning_rate": 1.7096855093032494e-06, |
|
"loss": 5.4813, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.8518518518518519, |
|
"grad_norm": 0.7849190831184387, |
|
"learning_rate": 1.6888346832879382e-06, |
|
"loss": 5.3213, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.8611111111111112, |
|
"grad_norm": 1.316622018814087, |
|
"learning_rate": 1.6679467141549617e-06, |
|
"loss": 5.358, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.8703703703703702, |
|
"grad_norm": 1.1983349323272705, |
|
"learning_rate": 1.6470257104943413e-06, |
|
"loss": 5.8328, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.8796296296296298, |
|
"grad_norm": 1.1115864515304565, |
|
"learning_rate": 1.6260757873938715e-06, |
|
"loss": 5.6616, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.8888888888888888, |
|
"grad_norm": 1.6418949365615845, |
|
"learning_rate": 1.6051010656297009e-06, |
|
"loss": 5.8618, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.8981481481481481, |
|
"grad_norm": 1.2098896503448486, |
|
"learning_rate": 1.5841056708557877e-06, |
|
"loss": 5.7551, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.9074074074074074, |
|
"grad_norm": 2.523620843887329, |
|
"learning_rate": 1.5630937327923979e-06, |
|
"loss": 6.0457, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.9166666666666665, |
|
"grad_norm": 1.2053682804107666, |
|
"learning_rate": 1.5420693844138035e-06, |
|
"loss": 5.7501, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.925925925925926, |
|
"grad_norm": 1.0068210363388062, |
|
"learning_rate": 1.5210367611353436e-06, |
|
"loss": 5.2868, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.9351851851851851, |
|
"grad_norm": 1.0384793281555176, |
|
"learning_rate": 1.5e-06, |
|
"loss": 5.5413, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.9444444444444444, |
|
"grad_norm": 0.9215264916419983, |
|
"learning_rate": 1.4789632388646567e-06, |
|
"loss": 5.3834, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.9537037037037037, |
|
"grad_norm": 1.250044584274292, |
|
"learning_rate": 1.4579306155861968e-06, |
|
"loss": 5.9769, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.9629629629629628, |
|
"grad_norm": 0.9464189410209656, |
|
"learning_rate": 1.4369062672076026e-06, |
|
"loss": 5.3573, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.9722222222222223, |
|
"grad_norm": 0.9586074352264404, |
|
"learning_rate": 1.4158943291442122e-06, |
|
"loss": 5.3998, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.9722222222222223, |
|
"eval_loss": 4.946777820587158, |
|
"eval_runtime": 10.6655, |
|
"eval_samples_per_second": 9.845, |
|
"eval_steps_per_second": 9.845, |
|
"step": 216 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 324, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 108, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.029536027869184e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|