|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 20.0, |
|
"eval_steps": 500, |
|
"global_step": 160, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2e-05, |
|
"loss": 2.6536, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4e-05, |
|
"loss": 3.3995, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6e-05, |
|
"loss": 4.281, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8e-05, |
|
"loss": 2.4605, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001, |
|
"loss": 2.538, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.935483870967742e-05, |
|
"loss": 2.629, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.870967741935484e-05, |
|
"loss": 2.1875, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.806451612903226e-05, |
|
"loss": 1.9768, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.741935483870968e-05, |
|
"loss": 2.0199, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.677419354838711e-05, |
|
"loss": 2.0266, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.612903225806452e-05, |
|
"loss": 1.7383, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 9.548387096774195e-05, |
|
"loss": 1.7494, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.483870967741936e-05, |
|
"loss": 1.8509, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 9.419354838709677e-05, |
|
"loss": 1.8364, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 9.35483870967742e-05, |
|
"loss": 1.6808, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.290322580645162e-05, |
|
"loss": 1.5642, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.225806451612904e-05, |
|
"loss": 1.6336, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 9.161290322580646e-05, |
|
"loss": 1.6452, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 9.096774193548387e-05, |
|
"loss": 1.1714, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 9.096774193548387e-05, |
|
"loss": 1.9018, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.032258064516129e-05, |
|
"loss": 1.6711, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.967741935483871e-05, |
|
"loss": 1.1646, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.903225806451614e-05, |
|
"loss": 1.0608, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.838709677419355e-05, |
|
"loss": 1.2916, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 8.774193548387098e-05, |
|
"loss": 0.9801, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 8.709677419354839e-05, |
|
"loss": 1.0964, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 8.645161290322581e-05, |
|
"loss": 1.1112, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.580645161290323e-05, |
|
"loss": 1.2538, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 8.516129032258064e-05, |
|
"loss": 1.0811, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 8.451612903225808e-05, |
|
"loss": 0.9926, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 8.387096774193549e-05, |
|
"loss": 1.3067, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.32258064516129e-05, |
|
"loss": 0.9473, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 8.258064516129033e-05, |
|
"loss": 0.8545, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 8.193548387096774e-05, |
|
"loss": 0.704, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 8.129032258064517e-05, |
|
"loss": 0.8111, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 8.064516129032258e-05, |
|
"loss": 0.9651, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 8e-05, |
|
"loss": 0.6063, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 7.935483870967743e-05, |
|
"loss": 0.5452, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 7.870967741935484e-05, |
|
"loss": 0.8213, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 7.806451612903226e-05, |
|
"loss": 0.7832, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 7.741935483870968e-05, |
|
"loss": 0.3778, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 7.67741935483871e-05, |
|
"loss": 0.3637, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 7.612903225806451e-05, |
|
"loss": 0.6275, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 7.548387096774195e-05, |
|
"loss": 0.6516, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 7.483870967741936e-05, |
|
"loss": 0.3812, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 7.419354838709677e-05, |
|
"loss": 0.3557, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 7.35483870967742e-05, |
|
"loss": 0.2963, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 7.290322580645161e-05, |
|
"loss": 0.2674, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 7.225806451612904e-05, |
|
"loss": 0.1111, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 7.161290322580646e-05, |
|
"loss": 0.1296, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 7.096774193548388e-05, |
|
"loss": 0.1281, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 7.03225806451613e-05, |
|
"loss": 0.238, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 6.967741935483871e-05, |
|
"loss": 0.1803, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 6.903225806451613e-05, |
|
"loss": 0.1911, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 6.838709677419355e-05, |
|
"loss": 0.2073, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 6.774193548387096e-05, |
|
"loss": 0.2415, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 6.709677419354839e-05, |
|
"loss": 0.1104, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 6.645161290322582e-05, |
|
"loss": 0.1154, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 6.580645161290323e-05, |
|
"loss": 0.0931, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 6.516129032258065e-05, |
|
"loss": 0.0671, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 6.451612903225807e-05, |
|
"loss": 0.1003, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 6.387096774193548e-05, |
|
"loss": 0.1646, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 6.32258064516129e-05, |
|
"loss": 0.1203, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 6.258064516129033e-05, |
|
"loss": 0.1888, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 6.193548387096774e-05, |
|
"loss": 0.0798, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 6.129032258064517e-05, |
|
"loss": 0.0921, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 6.064516129032258e-05, |
|
"loss": 0.1284, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 6e-05, |
|
"loss": 0.1008, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 5.935483870967742e-05, |
|
"loss": 0.0732, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 5.870967741935483e-05, |
|
"loss": 0.0628, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 5.8064516129032266e-05, |
|
"loss": 0.0694, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 5.7419354838709685e-05, |
|
"loss": 0.0856, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 5.67741935483871e-05, |
|
"loss": 0.0698, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 5.612903225806452e-05, |
|
"loss": 0.0619, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 5.5483870967741936e-05, |
|
"loss": 0.1077, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 5.4838709677419355e-05, |
|
"loss": 0.0524, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 5.419354838709678e-05, |
|
"loss": 0.0747, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 5.35483870967742e-05, |
|
"loss": 0.0805, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 5.290322580645162e-05, |
|
"loss": 0.0665, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 5.225806451612903e-05, |
|
"loss": 0.0688, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"learning_rate": 5.161290322580645e-05, |
|
"loss": 0.0421, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 5.096774193548387e-05, |
|
"loss": 0.0548, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 5.032258064516129e-05, |
|
"loss": 0.0552, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 4.967741935483871e-05, |
|
"loss": 0.0777, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"learning_rate": 4.903225806451613e-05, |
|
"loss": 0.0574, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 4.8387096774193554e-05, |
|
"loss": 0.1117, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 4.774193548387097e-05, |
|
"loss": 0.0512, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 4.7096774193548385e-05, |
|
"loss": 0.07, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 11.12, |
|
"learning_rate": 4.645161290322581e-05, |
|
"loss": 0.0381, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 4.580645161290323e-05, |
|
"loss": 0.0589, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"learning_rate": 4.516129032258064e-05, |
|
"loss": 0.0407, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 4.451612903225807e-05, |
|
"loss": 0.0484, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 4.387096774193549e-05, |
|
"loss": 0.0487, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 11.75, |
|
"learning_rate": 4.322580645161291e-05, |
|
"loss": 0.0616, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 4.258064516129032e-05, |
|
"loss": 0.0739, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 4.1935483870967746e-05, |
|
"loss": 0.0951, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 4.1290322580645165e-05, |
|
"loss": 0.0978, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"learning_rate": 4.0645161290322584e-05, |
|
"loss": 0.0436, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 12.38, |
|
"learning_rate": 4e-05, |
|
"loss": 0.0393, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 3.935483870967742e-05, |
|
"loss": 0.0601, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 12.62, |
|
"learning_rate": 3.870967741935484e-05, |
|
"loss": 0.0518, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 3.8064516129032254e-05, |
|
"loss": 0.0464, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 3.741935483870968e-05, |
|
"loss": 0.0483, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 3.67741935483871e-05, |
|
"loss": 0.0659, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 3.612903225806452e-05, |
|
"loss": 0.033, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 13.25, |
|
"learning_rate": 3.548387096774194e-05, |
|
"loss": 0.0395, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 13.38, |
|
"learning_rate": 3.483870967741936e-05, |
|
"loss": 0.0711, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"learning_rate": 3.4193548387096776e-05, |
|
"loss": 0.051, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 13.62, |
|
"learning_rate": 3.3548387096774195e-05, |
|
"loss": 0.0633, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 3.2903225806451614e-05, |
|
"loss": 0.0406, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 13.88, |
|
"learning_rate": 3.2258064516129034e-05, |
|
"loss": 0.0562, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 3.161290322580645e-05, |
|
"loss": 0.0406, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 14.12, |
|
"learning_rate": 3.096774193548387e-05, |
|
"loss": 0.0346, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 14.25, |
|
"learning_rate": 3.032258064516129e-05, |
|
"loss": 0.0555, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"learning_rate": 2.967741935483871e-05, |
|
"loss": 0.0435, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 2.9032258064516133e-05, |
|
"loss": 0.0486, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 14.62, |
|
"learning_rate": 2.838709677419355e-05, |
|
"loss": 0.0334, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"learning_rate": 2.7741935483870968e-05, |
|
"loss": 0.0571, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 14.88, |
|
"learning_rate": 2.709677419354839e-05, |
|
"loss": 0.0437, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 2.645161290322581e-05, |
|
"loss": 0.0578, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 15.12, |
|
"learning_rate": 2.5806451612903226e-05, |
|
"loss": 0.0373, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 15.25, |
|
"learning_rate": 2.5161290322580645e-05, |
|
"loss": 0.0642, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 15.38, |
|
"learning_rate": 2.4516129032258064e-05, |
|
"loss": 0.0428, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"learning_rate": 2.3870967741935486e-05, |
|
"loss": 0.0523, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"learning_rate": 2.3225806451612906e-05, |
|
"loss": 0.0311, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 15.75, |
|
"learning_rate": 2.258064516129032e-05, |
|
"loss": 0.038, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 15.88, |
|
"learning_rate": 2.1935483870967744e-05, |
|
"loss": 0.0385, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 2.129032258064516e-05, |
|
"loss": 0.0699, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 16.12, |
|
"learning_rate": 2.0645161290322582e-05, |
|
"loss": 0.061, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0431, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"learning_rate": 1.935483870967742e-05, |
|
"loss": 0.0665, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 1.870967741935484e-05, |
|
"loss": 0.032, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 16.62, |
|
"learning_rate": 1.806451612903226e-05, |
|
"loss": 0.045, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 16.75, |
|
"learning_rate": 1.741935483870968e-05, |
|
"loss": 0.0533, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"learning_rate": 1.6774193548387098e-05, |
|
"loss": 0.0393, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 1.6129032258064517e-05, |
|
"loss": 0.0462, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"learning_rate": 1.5483870967741936e-05, |
|
"loss": 0.042, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 17.25, |
|
"learning_rate": 1.4838709677419355e-05, |
|
"loss": 0.0694, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 17.38, |
|
"learning_rate": 1.4193548387096774e-05, |
|
"loss": 0.0253, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 1.3548387096774195e-05, |
|
"loss": 0.0367, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 17.62, |
|
"learning_rate": 1.2903225806451613e-05, |
|
"loss": 0.0391, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 17.75, |
|
"learning_rate": 1.2258064516129032e-05, |
|
"loss": 0.0508, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"learning_rate": 1.1612903225806453e-05, |
|
"loss": 0.0516, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 1.0967741935483872e-05, |
|
"loss": 0.0562, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 18.12, |
|
"learning_rate": 1.0322580645161291e-05, |
|
"loss": 0.0404, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 18.25, |
|
"learning_rate": 9.67741935483871e-06, |
|
"loss": 0.0427, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 18.38, |
|
"learning_rate": 9.03225806451613e-06, |
|
"loss": 0.0318, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"learning_rate": 8.387096774193549e-06, |
|
"loss": 0.052, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 18.62, |
|
"learning_rate": 7.741935483870968e-06, |
|
"loss": 0.0278, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 7.096774193548387e-06, |
|
"loss": 0.095, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 18.88, |
|
"learning_rate": 6.451612903225806e-06, |
|
"loss": 0.0408, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 5.806451612903226e-06, |
|
"loss": 0.0509, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 19.12, |
|
"learning_rate": 5.161290322580646e-06, |
|
"loss": 0.0297, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 19.25, |
|
"learning_rate": 4.516129032258065e-06, |
|
"loss": 0.0493, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 19.38, |
|
"learning_rate": 3.870967741935484e-06, |
|
"loss": 0.1001, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 3.225806451612903e-06, |
|
"loss": 0.0401, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 19.62, |
|
"learning_rate": 2.580645161290323e-06, |
|
"loss": 0.0434, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 19.75, |
|
"learning_rate": 1.935483870967742e-06, |
|
"loss": 0.0363, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"learning_rate": 1.2903225806451614e-06, |
|
"loss": 0.0432, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 6.451612903225807e-07, |
|
"loss": 0.0328, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 160, |
|
"total_flos": 7453507615457280.0, |
|
"train_loss": 0.4629385274136439, |
|
"train_runtime": 260.0227, |
|
"train_samples_per_second": 2.461, |
|
"train_steps_per_second": 0.615 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 160, |
|
"num_train_epochs": 20, |
|
"save_steps": 500, |
|
"total_flos": 7453507615457280.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|