|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 259, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 1.7394, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 1.7697, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6e-07, |
|
"loss": 1.7627, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 1.735, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.7082, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.2e-07, |
|
"loss": 1.6211, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.400000000000001e-07, |
|
"loss": 1.6883, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 1.5584, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.08e-06, |
|
"loss": 1.5924, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.5434, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.32e-06, |
|
"loss": 1.5874, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.44e-06, |
|
"loss": 1.5128, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.56e-06, |
|
"loss": 1.476, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6800000000000002e-06, |
|
"loss": 1.4491, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.8e-06, |
|
"loss": 1.4457, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 1.5084, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.0400000000000004e-06, |
|
"loss": 1.4479, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.16e-06, |
|
"loss": 1.4411, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.28e-06, |
|
"loss": 1.4247, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.4575, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.52e-06, |
|
"loss": 1.4149, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.64e-06, |
|
"loss": 1.3653, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.7600000000000003e-06, |
|
"loss": 1.38, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.88e-06, |
|
"loss": 1.3726, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3e-06, |
|
"loss": 1.3391, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.12e-06, |
|
"loss": 1.3389, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.2400000000000003e-06, |
|
"loss": 1.3534, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.3600000000000004e-06, |
|
"loss": 1.3254, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.4799999999999997e-06, |
|
"loss": 1.3376, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 3.6e-06, |
|
"loss": 1.3301, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 3.72e-06, |
|
"loss": 1.2873, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 3.8400000000000005e-06, |
|
"loss": 1.3017, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.96e-06, |
|
"loss": 1.2682, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.080000000000001e-06, |
|
"loss": 1.2898, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.2e-06, |
|
"loss": 1.3131, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.32e-06, |
|
"loss": 1.2749, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.44e-06, |
|
"loss": 1.2548, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.56e-06, |
|
"loss": 1.3048, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.68e-06, |
|
"loss": 1.2622, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.2622, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9199999999999995e-06, |
|
"loss": 1.2856, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.04e-06, |
|
"loss": 1.2536, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.16e-06, |
|
"loss": 1.2843, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.28e-06, |
|
"loss": 1.2241, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.4e-06, |
|
"loss": 1.2465, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 5.5200000000000005e-06, |
|
"loss": 1.2621, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 5.64e-06, |
|
"loss": 1.2394, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 5.76e-06, |
|
"loss": 1.2315, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 5.8800000000000005e-06, |
|
"loss": 1.1924, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6e-06, |
|
"loss": 1.2384, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 5.9999719894583576e-06, |
|
"loss": 1.2289, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 5.9998879583564896e-06, |
|
"loss": 1.241, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 5.999747908263568e-06, |
|
"loss": 1.1919, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 5.999551841794844e-06, |
|
"loss": 1.2528, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 5.999299762611606e-06, |
|
"loss": 1.2073, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.998991675421101e-06, |
|
"loss": 1.256, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.998627585976455e-06, |
|
"loss": 1.256, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.998207501076565e-06, |
|
"loss": 1.2178, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.997731428565966e-06, |
|
"loss": 1.226, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.99719937733469e-06, |
|
"loss": 1.2135, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.996611357318102e-06, |
|
"loss": 1.2189, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.995967379496706e-06, |
|
"loss": 1.2547, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.9952674558959485e-06, |
|
"loss": 1.1776, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.994511599585988e-06, |
|
"loss": 1.1828, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.993699824681454e-06, |
|
"loss": 1.2118, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.992832146341183e-06, |
|
"loss": 1.2142, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 5.991908580767936e-06, |
|
"loss": 1.1621, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 5.990929145208094e-06, |
|
"loss": 1.1942, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.989893857951337e-06, |
|
"loss": 1.22, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.988802738330303e-06, |
|
"loss": 1.1666, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.9876558067202265e-06, |
|
"loss": 1.2408, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.986453084538559e-06, |
|
"loss": 1.1819, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.985194594244564e-06, |
|
"loss": 1.183, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.983880359338908e-06, |
|
"loss": 1.2063, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.982510404363209e-06, |
|
"loss": 1.168, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.981084754899591e-06, |
|
"loss": 1.2035, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.979603437570195e-06, |
|
"loss": 1.1778, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.978066480036686e-06, |
|
"loss": 1.2116, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.976473910999742e-06, |
|
"loss": 1.1549, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.974825760198511e-06, |
|
"loss": 1.1782, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.973122058410054e-06, |
|
"loss": 1.2074, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.971362837448781e-06, |
|
"loss": 1.208, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.969548130165845e-06, |
|
"loss": 1.1907, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.967677970448536e-06, |
|
"loss": 1.1988, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.965752393219645e-06, |
|
"loss": 1.17, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.963771434436812e-06, |
|
"loss": 1.1714, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.961735131091857e-06, |
|
"loss": 1.1543, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.959643521210085e-06, |
|
"loss": 1.226, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.957496643849582e-06, |
|
"loss": 1.1877, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 5.955294539100477e-06, |
|
"loss": 1.1982, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 5.953037248084204e-06, |
|
"loss": 1.1428, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 5.950724812952725e-06, |
|
"loss": 1.1791, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 5.948357276887745e-06, |
|
"loss": 1.1913, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 5.945934684099912e-06, |
|
"loss": 1.1774, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.94345707982798e-06, |
|
"loss": 1.1502, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.9409245103379775e-06, |
|
"loss": 1.1519, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.938337022922332e-06, |
|
"loss": 1.1538, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 5.935694665898992e-06, |
|
"loss": 1.1838, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 5.932997488610526e-06, |
|
"loss": 1.1512, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.9302455414231975e-06, |
|
"loss": 1.1567, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.927438875726029e-06, |
|
"loss": 1.1236, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.924577543929837e-06, |
|
"loss": 1.1731, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.921661599466257e-06, |
|
"loss": 1.1685, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.918691096786745e-06, |
|
"loss": 1.1885, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.915666091361561e-06, |
|
"loss": 1.2044, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.912586639678732e-06, |
|
"loss": 1.1825, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.909452799242996e-06, |
|
"loss": 1.1341, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.906264628574734e-06, |
|
"loss": 1.1393, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.90302218720887e-06, |
|
"loss": 1.1612, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.899725535693762e-06, |
|
"loss": 1.1081, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.896374735590074e-06, |
|
"loss": 1.1474, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.892969849469624e-06, |
|
"loss": 1.2151, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.889510940914214e-06, |
|
"loss": 1.1313, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.885998074514447e-06, |
|
"loss": 1.1518, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.882431315868514e-06, |
|
"loss": 1.1827, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.878810731580978e-06, |
|
"loss": 1.1554, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.875136389261522e-06, |
|
"loss": 1.1353, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.871408357523693e-06, |
|
"loss": 1.2146, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.867626705983617e-06, |
|
"loss": 1.1441, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.863791505258699e-06, |
|
"loss": 1.1537, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.859902826966304e-06, |
|
"loss": 1.1486, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.855960743722423e-06, |
|
"loss": 1.1022, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.851965329140315e-06, |
|
"loss": 1.1632, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.847916657829129e-06, |
|
"loss": 1.1447, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.8438148053925185e-06, |
|
"loss": 1.1589, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.839659848427221e-06, |
|
"loss": 1.1597, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.835451864521632e-06, |
|
"loss": 1.1661, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.831190932254359e-06, |
|
"loss": 1.1426, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.826877131192748e-06, |
|
"loss": 1.1405, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.822510541891403e-06, |
|
"loss": 1.1292, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.818091245890677e-06, |
|
"loss": 1.1677, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.8136193257151535e-06, |
|
"loss": 1.1283, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.809094864872104e-06, |
|
"loss": 1.1596, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.804517947849927e-06, |
|
"loss": 1.1647, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.799888660116573e-06, |
|
"loss": 1.1448, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.795207088117946e-06, |
|
"loss": 1.1514, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.790473319276292e-06, |
|
"loss": 1.1466, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.785687441988563e-06, |
|
"loss": 1.15, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.780849545624768e-06, |
|
"loss": 1.1518, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.775959720526308e-06, |
|
"loss": 1.0718, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.7710180580042804e-06, |
|
"loss": 1.1932, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.766024650337782e-06, |
|
"loss": 1.1303, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.760979590772182e-06, |
|
"loss": 1.141, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.75588297351738e-06, |
|
"loss": 1.1325, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.750734893746051e-06, |
|
"loss": 1.1484, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.7455354475918615e-06, |
|
"loss": 1.1534, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.740284732147681e-06, |
|
"loss": 1.15, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.7349828454637654e-06, |
|
"loss": 1.0994, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.7296298865459275e-06, |
|
"loss": 1.1786, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.724225955353685e-06, |
|
"loss": 1.1118, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.718771152798398e-06, |
|
"loss": 1.1282, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.713265580741383e-06, |
|
"loss": 1.0907, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.70770934199201e-06, |
|
"loss": 1.1562, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.7021025403057835e-06, |
|
"loss": 1.182, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.696445280382404e-06, |
|
"loss": 1.1148, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.690737667863816e-06, |
|
"loss": 1.1172, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.684979809332232e-06, |
|
"loss": 1.1513, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.6791718123081425e-06, |
|
"loss": 1.1334, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.673313785248308e-06, |
|
"loss": 1.118, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.667405837543736e-06, |
|
"loss": 1.1427, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.661448079517636e-06, |
|
"loss": 1.1403, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.6554406224233634e-06, |
|
"loss": 1.1438, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.649383578442334e-06, |
|
"loss": 1.1738, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.643277060681938e-06, |
|
"loss": 1.1444, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.63712118317342e-06, |
|
"loss": 1.0963, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.630916060869758e-06, |
|
"loss": 1.1497, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.624661809643507e-06, |
|
"loss": 1.1658, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.618358546284646e-06, |
|
"loss": 1.1281, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.612006388498386e-06, |
|
"loss": 1.124, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.605605454902983e-06, |
|
"loss": 1.0731, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.599155865027513e-06, |
|
"loss": 1.1296, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.592657739309649e-06, |
|
"loss": 1.1144, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.5861111990934034e-06, |
|
"loss": 1.1359, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.579516366626867e-06, |
|
"loss": 1.1321, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.572873365059928e-06, |
|
"loss": 1.1087, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.5661823184419655e-06, |
|
"loss": 1.0883, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.559443351719542e-06, |
|
"loss": 1.1151, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.5526565907340604e-06, |
|
"loss": 1.1163, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.545822162219423e-06, |
|
"loss": 1.1344, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.538940193799659e-06, |
|
"loss": 1.1188, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.532010813986544e-06, |
|
"loss": 1.084, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.5250341521772e-06, |
|
"loss": 1.1209, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.518010338651676e-06, |
|
"loss": 1.1116, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.51093950457052e-06, |
|
"loss": 1.1396, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.5038217819723285e-06, |
|
"loss": 1.1254, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 5.496657303771276e-06, |
|
"loss": 1.1158, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 5.489446203754641e-06, |
|
"loss": 1.162, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 5.482188616580301e-06, |
|
"loss": 1.1284, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 5.4748846777742195e-06, |
|
"loss": 1.1187, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 5.467534523727921e-06, |
|
"loss": 1.1254, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.4601382916959355e-06, |
|
"loss": 1.1071, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.452696119793238e-06, |
|
"loss": 1.1149, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.445208146992674e-06, |
|
"loss": 1.1392, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.437674513122359e-06, |
|
"loss": 1.1233, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.43009535886307e-06, |
|
"loss": 1.0951, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.422470825745617e-06, |
|
"loss": 1.0691, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.414801056148203e-06, |
|
"loss": 1.0983, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.407086193293761e-06, |
|
"loss": 1.1608, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.399326381247282e-06, |
|
"loss": 1.1355, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.391521764913127e-06, |
|
"loss": 1.1247, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.383672490032313e-06, |
|
"loss": 1.0917, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.375778703179803e-06, |
|
"loss": 1.1515, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.3678405517617595e-06, |
|
"loss": 1.1357, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.359858184012799e-06, |
|
"loss": 1.1481, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.3518317489932144e-06, |
|
"loss": 1.1765, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 5.343761396586203e-06, |
|
"loss": 1.1124, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 5.335647277495057e-06, |
|
"loss": 1.1391, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 5.327489543240359e-06, |
|
"loss": 1.113, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 5.3192883461571465e-06, |
|
"loss": 1.1074, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 5.311043839392064e-06, |
|
"loss": 1.1039, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 5.302756176900516e-06, |
|
"loss": 1.1237, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 5.294425513443777e-06, |
|
"loss": 1.1101, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 5.2860520045861104e-06, |
|
"loss": 1.0947, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 5.277635806691863e-06, |
|
"loss": 1.1155, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 5.2691770769225414e-06, |
|
"loss": 1.1326, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 5.260675973233882e-06, |
|
"loss": 1.1411, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 5.252132654372897e-06, |
|
"loss": 1.112, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 5.243547279874911e-06, |
|
"loss": 1.1079, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 5.234920010060586e-06, |
|
"loss": 1.0891, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 5.22625100603292e-06, |
|
"loss": 1.1221, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 5.217540429674246e-06, |
|
"loss": 1.1151, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.208788443643207e-06, |
|
"loss": 1.1116, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.1999952113717135e-06, |
|
"loss": 1.0972, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.191160897061898e-06, |
|
"loss": 1.124, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.1822856656830485e-06, |
|
"loss": 1.1053, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.173369682968522e-06, |
|
"loss": 1.1473, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.1644131154126544e-06, |
|
"loss": 1.1131, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.1554161302676544e-06, |
|
"loss": 1.0935, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.146378895540472e-06, |
|
"loss": 1.0677, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.137301579989665e-06, |
|
"loss": 1.1269, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.128184353122252e-06, |
|
"loss": 1.0783, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.119027385190542e-06, |
|
"loss": 1.1059, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.109830847188955e-06, |
|
"loss": 1.0468, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.100594910850832e-06, |
|
"loss": 1.0851, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.091319748645226e-06, |
|
"loss": 1.0387, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.082005533773682e-06, |
|
"loss": 1.1361, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.072652440167001e-06, |
|
"loss": 1.1285, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.063260642481997e-06, |
|
"loss": 1.0934, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.053830316098228e-06, |
|
"loss": 1.1059, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.044361637114729e-06, |
|
"loss": 1.1031, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.034854782346716e-06, |
|
"loss": 1.0757, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.0253099293222925e-06, |
|
"loss": 1.1218, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 5.015727256279126e-06, |
|
"loss": 1.12, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 5.0061069421611245e-06, |
|
"loss": 1.0985, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.996449166615094e-06, |
|
"loss": 1.1192, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.986754109987384e-06, |
|
"loss": 1.0662, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.97702195332052e-06, |
|
"loss": 1.0671, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.967252878349821e-06, |
|
"loss": 1.1043, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.9574470675000085e-06, |
|
"loss": 1.1036, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.947604703881797e-06, |
|
"loss": 1.0729, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.937725971288479e-06, |
|
"loss": 1.1265, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.927811054192486e-06, |
|
"loss": 1.0854, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.9178601377419506e-06, |
|
"loss": 1.1334, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.907873407757248e-06, |
|
"loss": 1.1184, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.89785105072752e-06, |
|
"loss": 1.105, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.8877932538072e-06, |
|
"loss": 1.1124, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.877700204812516e-06, |
|
"loss": 1.0862, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.867572092217977e-06, |
|
"loss": 1.0856, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.857409105152865e-06, |
|
"loss": 1.0905, |
|
"step": 259 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 777, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 5.793364463679701e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|