|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9982222222222221, |
|
"global_step": 1686, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 7.352941176470589e-07, |
|
"loss": 1.4934, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.4705882352941177e-06, |
|
"loss": 1.4308, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.2058823529411767e-06, |
|
"loss": 1.5656, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.9411764705882355e-06, |
|
"loss": 1.4176, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6764705882352942e-06, |
|
"loss": 1.3197, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.411764705882353e-06, |
|
"loss": 1.1572, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.147058823529412e-06, |
|
"loss": 0.9889, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 0.7601, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.61764705882353e-06, |
|
"loss": 0.5436, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.3529411764705884e-06, |
|
"loss": 0.5049, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.088235294117648e-06, |
|
"loss": 0.4444, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.823529411764707e-06, |
|
"loss": 0.4241, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.558823529411764e-06, |
|
"loss": 0.406, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0294117647058824e-05, |
|
"loss": 0.3248, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1029411764705883e-05, |
|
"loss": 0.3274, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1764705882352942e-05, |
|
"loss": 0.3797, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.2718, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.323529411764706e-05, |
|
"loss": 0.26, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3970588235294118e-05, |
|
"loss": 0.4324, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4705882352941177e-05, |
|
"loss": 0.2265, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5441176470588237e-05, |
|
"loss": 0.3209, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6176470588235296e-05, |
|
"loss": 0.3329, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6911764705882355e-05, |
|
"loss": 0.2335, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7647058823529414e-05, |
|
"loss": 0.2215, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8382352941176472e-05, |
|
"loss": 0.1997, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9117647058823528e-05, |
|
"loss": 0.2286, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9852941176470586e-05, |
|
"loss": 0.2931, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.058823529411765e-05, |
|
"loss": 0.2361, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.1323529411764707e-05, |
|
"loss": 0.3129, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.2058823529411766e-05, |
|
"loss": 0.2784, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.2794117647058825e-05, |
|
"loss": 0.2612, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.3529411764705884e-05, |
|
"loss": 0.3002, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.4264705882352942e-05, |
|
"loss": 0.1918, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.2737, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.5735294117647057e-05, |
|
"loss": 0.3001, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.647058823529412e-05, |
|
"loss": 0.2636, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.7205882352941174e-05, |
|
"loss": 0.2934, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.7941176470588236e-05, |
|
"loss": 0.3193, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.8676470588235295e-05, |
|
"loss": 0.3171, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.9411764705882354e-05, |
|
"loss": 0.2971, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.0147058823529413e-05, |
|
"loss": 0.2394, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.0882352941176475e-05, |
|
"loss": 0.294, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.161764705882353e-05, |
|
"loss": 0.2625, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.235294117647059e-05, |
|
"loss": 0.3554, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.308823529411765e-05, |
|
"loss": 0.3236, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.382352941176471e-05, |
|
"loss": 0.2163, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.455882352941177e-05, |
|
"loss": 0.3002, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.529411764705883e-05, |
|
"loss": 0.3036, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.6029411764705886e-05, |
|
"loss": 0.2182, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.6764705882352945e-05, |
|
"loss": 0.2003, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.3377, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.8235294117647055e-05, |
|
"loss": 0.1783, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.897058823529412e-05, |
|
"loss": 0.2838, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.970588235294117e-05, |
|
"loss": 0.2711, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.044117647058824e-05, |
|
"loss": 0.2823, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.11764705882353e-05, |
|
"loss": 0.3191, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.1911764705882356e-05, |
|
"loss": 0.329, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.2647058823529415e-05, |
|
"loss": 0.1626, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.3382352941176474e-05, |
|
"loss": 0.3719, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.411764705882353e-05, |
|
"loss": 0.2546, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.485294117647059e-05, |
|
"loss": 0.2373, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.558823529411765e-05, |
|
"loss": 0.2828, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.632352941176471e-05, |
|
"loss": 0.2987, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.705882352941177e-05, |
|
"loss": 0.3749, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.7794117647058826e-05, |
|
"loss": 0.2856, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8529411764705885e-05, |
|
"loss": 0.367, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9264705882352944e-05, |
|
"loss": 0.3635, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2624, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9999952874867166e-05, |
|
"loss": 0.3697, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.999981149964632e-05, |
|
"loss": 0.2175, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9999575874870454e-05, |
|
"loss": 0.2936, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.999924600142787e-05, |
|
"loss": 0.225, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.999882188056219e-05, |
|
"loss": 0.3736, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.999830351387237e-05, |
|
"loss": 0.2763, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.999769090331264e-05, |
|
"loss": 0.3704, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.999698405119256e-05, |
|
"loss": 0.3845, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.999618296017697e-05, |
|
"loss": 0.275, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.999528763328598e-05, |
|
"loss": 0.331, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9994298073895e-05, |
|
"loss": 0.2806, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.999321428573467e-05, |
|
"loss": 0.2779, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.999203627289087e-05, |
|
"loss": 0.1724, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.999076403980475e-05, |
|
"loss": 0.3406, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.998939759127262e-05, |
|
"loss": 0.3398, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.998793693244601e-05, |
|
"loss": 0.3071, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.998638206883162e-05, |
|
"loss": 0.321, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9984733006291306e-05, |
|
"loss": 0.2683, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9982989751042046e-05, |
|
"loss": 0.3641, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.998115230965593e-05, |
|
"loss": 0.2668, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.997922068906015e-05, |
|
"loss": 0.3709, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9977194896536904e-05, |
|
"loss": 0.2223, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9975074939723476e-05, |
|
"loss": 0.3262, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.997286082661211e-05, |
|
"loss": 0.3202, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9970552565550056e-05, |
|
"loss": 0.5339, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9968150165239455e-05, |
|
"loss": 0.3457, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9965653634737414e-05, |
|
"loss": 0.2291, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9963062983455856e-05, |
|
"loss": 0.3465, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9960378221161566e-05, |
|
"loss": 0.2793, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9957599357976135e-05, |
|
"loss": 0.3772, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.995472640437591e-05, |
|
"loss": 0.286, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.995175937119194e-05, |
|
"loss": 0.236, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9948698269609984e-05, |
|
"loss": 0.2889, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.994554311117042e-05, |
|
"loss": 0.3081, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9942293907768244e-05, |
|
"loss": 0.2765, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.993895067165297e-05, |
|
"loss": 0.2452, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9935513415428636e-05, |
|
"loss": 0.2907, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9931982152053744e-05, |
|
"loss": 0.3427, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9928356894841195e-05, |
|
"loss": 0.4537, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.992463765745824e-05, |
|
"loss": 0.2331, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.992082445392644e-05, |
|
"loss": 0.248, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.991691729862161e-05, |
|
"loss": 0.3122, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.991291620627379e-05, |
|
"loss": 0.3075, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.990882119196712e-05, |
|
"loss": 0.2334, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.990463227113985e-05, |
|
"loss": 0.3013, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.990034945958427e-05, |
|
"loss": 0.3308, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.98959727734466e-05, |
|
"loss": 0.2707, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.989150222922702e-05, |
|
"loss": 0.1658, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.988693784377951e-05, |
|
"loss": 0.339, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9882279634311855e-05, |
|
"loss": 0.2905, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.987752761838557e-05, |
|
"loss": 0.2974, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.987268181391578e-05, |
|
"loss": 0.3136, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.986774223917123e-05, |
|
"loss": 0.299, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.986270891277418e-05, |
|
"loss": 0.1877, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.985758185370032e-05, |
|
"loss": 0.4754, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9852361081278704e-05, |
|
"loss": 0.2564, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.984704661519171e-05, |
|
"loss": 0.294, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9841638475474936e-05, |
|
"loss": 0.2843, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9836136682517106e-05, |
|
"loss": 0.3276, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.983054125706007e-05, |
|
"loss": 0.3818, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9824852220198615e-05, |
|
"loss": 0.2484, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9819069593380475e-05, |
|
"loss": 0.3457, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.981319339840622e-05, |
|
"loss": 0.2792, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.980722365742918e-05, |
|
"loss": 0.2812, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.980116039295531e-05, |
|
"loss": 0.3248, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9795003627843204e-05, |
|
"loss": 0.3296, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9788753385303936e-05, |
|
"loss": 0.3651, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.978240968890097e-05, |
|
"loss": 0.2841, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.977597256255012e-05, |
|
"loss": 0.321, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.976944203051942e-05, |
|
"loss": 0.4024, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9762818117429036e-05, |
|
"loss": 0.3881, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9756100848251194e-05, |
|
"loss": 0.3207, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.974929024831009e-05, |
|
"loss": 0.292, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.974238634328173e-05, |
|
"loss": 0.2295, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.973538915919392e-05, |
|
"loss": 0.2864, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9728298722426124e-05, |
|
"loss": 0.274, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.972111505970936e-05, |
|
"loss": 0.3188, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.971383819812612e-05, |
|
"loss": 0.2161, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.970646816511023e-05, |
|
"loss": 0.3466, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.969900498844681e-05, |
|
"loss": 0.3352, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.96914486962721e-05, |
|
"loss": 0.2267, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9683799317073425e-05, |
|
"loss": 0.329, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.967605687968901e-05, |
|
"loss": 0.2721, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.966822141330793e-05, |
|
"loss": 0.3088, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.966029294746997e-05, |
|
"loss": 0.2308, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.965227151206554e-05, |
|
"loss": 0.3343, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.964415713733553e-05, |
|
"loss": 0.3477, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9635949853871225e-05, |
|
"loss": 0.2654, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9627649692614165e-05, |
|
"loss": 0.2521, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.961925668485604e-05, |
|
"loss": 0.3123, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.96107708622386e-05, |
|
"loss": 0.3097, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.960219225675347e-05, |
|
"loss": 0.4019, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.959352090074209e-05, |
|
"loss": 0.3019, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.958475682689556e-05, |
|
"loss": 0.2934, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9575900068254526e-05, |
|
"loss": 0.2897, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9566950658209074e-05, |
|
"loss": 0.2571, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9557908630498573e-05, |
|
"loss": 0.362, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9548774019211566e-05, |
|
"loss": 0.2667, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.953954685878562e-05, |
|
"loss": 0.2559, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.953022718400724e-05, |
|
"loss": 0.2787, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.952081503001169e-05, |
|
"loss": 0.2514, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.951131043228291e-05, |
|
"loss": 0.3691, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.950171342665332e-05, |
|
"loss": 0.3312, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.949202404930372e-05, |
|
"loss": 0.4162, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.94822423367632e-05, |
|
"loss": 0.3788, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.947236832590888e-05, |
|
"loss": 0.3016, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9462402053965914e-05, |
|
"loss": 0.3429, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.945234355850725e-05, |
|
"loss": 0.2481, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9442192877453516e-05, |
|
"loss": 0.2757, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9431950049072895e-05, |
|
"loss": 0.2579, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.942161511198094e-05, |
|
"loss": 0.3994, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9411188105140505e-05, |
|
"loss": 0.248, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9400669067861486e-05, |
|
"loss": 0.3515, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.939005803980078e-05, |
|
"loss": 0.3575, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.937935506096207e-05, |
|
"loss": 0.2387, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.936856017169572e-05, |
|
"loss": 0.3204, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9357673412698544e-05, |
|
"loss": 0.3025, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.934669482501376e-05, |
|
"loss": 0.248, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.933562445003076e-05, |
|
"loss": 0.2191, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.932446232948497e-05, |
|
"loss": 0.3097, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.93132085054577e-05, |
|
"loss": 0.4397, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9301863020375996e-05, |
|
"loss": 0.2615, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.929042591701245e-05, |
|
"loss": 0.3, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9278897238485066e-05, |
|
"loss": 0.3201, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.926727702825708e-05, |
|
"loss": 0.2454, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.925556533013682e-05, |
|
"loss": 0.328, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9243762188277495e-05, |
|
"loss": 0.2571, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.923186764717709e-05, |
|
"loss": 0.3189, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9219881751678146e-05, |
|
"loss": 0.3703, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9207804546967613e-05, |
|
"loss": 0.442, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.9195636078576684e-05, |
|
"loss": 0.3707, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.918337639238062e-05, |
|
"loss": 0.2851, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.917102553459856e-05, |
|
"loss": 0.323, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.9158583551793367e-05, |
|
"loss": 0.2701, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.914605049087146e-05, |
|
"loss": 0.3427, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.91334263990826e-05, |
|
"loss": 0.329, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.912071132401975e-05, |
|
"loss": 0.3434, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.9107905313618894e-05, |
|
"loss": 0.3047, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9095008416158805e-05, |
|
"loss": 0.2534, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.908202068026093e-05, |
|
"loss": 0.2302, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.906894215488917e-05, |
|
"loss": 0.3817, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.905577288934971e-05, |
|
"loss": 0.2872, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.904251293329083e-05, |
|
"loss": 0.2572, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.902916233670269e-05, |
|
"loss": 0.2797, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9015721149917184e-05, |
|
"loss": 0.2026, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.900218942360772e-05, |
|
"loss": 0.2158, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.898856720878908e-05, |
|
"loss": 0.442, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.897485455681713e-05, |
|
"loss": 0.2516, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.896105151938872e-05, |
|
"loss": 0.2612, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.894715814854145e-05, |
|
"loss": 0.377, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.8933174496653476e-05, |
|
"loss": 0.2989, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.891910061644332e-05, |
|
"loss": 0.307, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.890493656096965e-05, |
|
"loss": 0.3227, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.8890682383631115e-05, |
|
"loss": 0.3266, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.8876338138166106e-05, |
|
"loss": 0.2701, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.886190387865258e-05, |
|
"loss": 0.1774, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.884737965950786e-05, |
|
"loss": 0.3749, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.8832765535488404e-05, |
|
"loss": 0.3485, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.881806156168961e-05, |
|
"loss": 0.3136, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.880326779354561e-05, |
|
"loss": 0.3249, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.878838428682908e-05, |
|
"loss": 0.2764, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.877341109765099e-05, |
|
"loss": 0.2733, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.875834828246042e-05, |
|
"loss": 0.3222, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.874319589804436e-05, |
|
"loss": 0.2733, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.872795400152743e-05, |
|
"loss": 0.2473, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.871262265037178e-05, |
|
"loss": 0.2949, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.869720190237673e-05, |
|
"loss": 0.1997, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.868169181567869e-05, |
|
"loss": 0.3084, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.866609244875084e-05, |
|
"loss": 0.3271, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.865040386040296e-05, |
|
"loss": 0.3074, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.86346261097812e-05, |
|
"loss": 0.2595, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.861875925636784e-05, |
|
"loss": 0.2023, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8602803359981084e-05, |
|
"loss": 0.2991, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.858675848077485e-05, |
|
"loss": 0.2447, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.857062467923848e-05, |
|
"loss": 0.33, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.855440201619659e-05, |
|
"loss": 0.3225, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.853809055280879e-05, |
|
"loss": 0.2931, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.852169035056947e-05, |
|
"loss": 0.3839, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.850520147130756e-05, |
|
"loss": 0.3014, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.848862397718632e-05, |
|
"loss": 0.3673, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8471957930703074e-05, |
|
"loss": 0.2698, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8455203394689e-05, |
|
"loss": 0.3481, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.843836043230886e-05, |
|
"loss": 0.3211, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.842142910706083e-05, |
|
"loss": 0.3669, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.840440948277616e-05, |
|
"loss": 0.3269, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.838730162361903e-05, |
|
"loss": 0.2302, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8370105594086236e-05, |
|
"loss": 0.3316, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8352821459007005e-05, |
|
"loss": 0.2298, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.833544928354271e-05, |
|
"loss": 0.2148, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8317989133186624e-05, |
|
"loss": 0.2909, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8300441073763717e-05, |
|
"loss": 0.2634, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8282805171430346e-05, |
|
"loss": 0.3172, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.826508149267406e-05, |
|
"loss": 0.2554, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.82472701043133e-05, |
|
"loss": 0.3023, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.822937107349721e-05, |
|
"loss": 0.3491, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8211384467705315e-05, |
|
"loss": 0.259, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.819331035474731e-05, |
|
"loss": 0.3185, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.81751488027628e-05, |
|
"loss": 0.3491, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.8156899880221016e-05, |
|
"loss": 0.4103, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.813856365592061e-05, |
|
"loss": 0.2375, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.812014019898932e-05, |
|
"loss": 0.3101, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.810162957888379e-05, |
|
"loss": 0.2408, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.808303186538925e-05, |
|
"loss": 0.3545, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.806434712861927e-05, |
|
"loss": 0.2684, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.8045575439015514e-05, |
|
"loss": 0.2132, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.8026716867347456e-05, |
|
"loss": 0.2239, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.80077714847121e-05, |
|
"loss": 0.2421, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.798873936253375e-05, |
|
"loss": 0.3354, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.79696205725637e-05, |
|
"loss": 0.2413, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.795041518688001e-05, |
|
"loss": 0.2617, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.793112327788716e-05, |
|
"loss": 0.3021, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.791174491831586e-05, |
|
"loss": 0.3067, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.789228018122275e-05, |
|
"loss": 0.386, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.787272913999008e-05, |
|
"loss": 0.3342, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.7853091868325485e-05, |
|
"loss": 0.3078, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.783336844026169e-05, |
|
"loss": 0.2851, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.781355893015623e-05, |
|
"loss": 0.2788, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.7793663412691164e-05, |
|
"loss": 0.3291, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.777368196287281e-05, |
|
"loss": 0.2228, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.775361465603143e-05, |
|
"loss": 0.1885, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.773346156782101e-05, |
|
"loss": 0.3855, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.771322277421889e-05, |
|
"loss": 0.2679, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.769289835152554e-05, |
|
"loss": 0.285, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.767248837636425e-05, |
|
"loss": 0.2531, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.7651992925680846e-05, |
|
"loss": 0.2426, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.7631412076743386e-05, |
|
"loss": 0.2714, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.761074590714189e-05, |
|
"loss": 0.2188, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.7589994494788056e-05, |
|
"loss": 0.2659, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.756915791791491e-05, |
|
"loss": 0.3369, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.754823625507657e-05, |
|
"loss": 0.2152, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.752722958514794e-05, |
|
"loss": 0.3117, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.7506137987324375e-05, |
|
"loss": 0.2674, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.748496154112142e-05, |
|
"loss": 0.2886, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.746370032637452e-05, |
|
"loss": 0.2769, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.744235442323866e-05, |
|
"loss": 0.2641, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.742092391218814e-05, |
|
"loss": 0.2774, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.7399408874016206e-05, |
|
"loss": 0.3404, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.737780938983478e-05, |
|
"loss": 0.2063, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.735612554107415e-05, |
|
"loss": 0.2887, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.733435740948265e-05, |
|
"loss": 0.4375, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.731250507712637e-05, |
|
"loss": 0.2218, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.7290568626388844e-05, |
|
"loss": 0.3174, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.726854813997071e-05, |
|
"loss": 0.3613, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.7246443700889453e-05, |
|
"loss": 0.2768, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.722425539247902e-05, |
|
"loss": 0.29, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.7201983298389595e-05, |
|
"loss": 0.2188, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.717962750258719e-05, |
|
"loss": 0.2425, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.71571880893534e-05, |
|
"loss": 0.2412, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.713466514328505e-05, |
|
"loss": 0.2124, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.7112058749293894e-05, |
|
"loss": 0.3265, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.708936899260626e-05, |
|
"loss": 0.1941, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.7066595958762794e-05, |
|
"loss": 0.2355, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.7043739733618066e-05, |
|
"loss": 0.304, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.702080040334027e-05, |
|
"loss": 0.3028, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.6997778054410956e-05, |
|
"loss": 0.2999, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.6974672773624606e-05, |
|
"loss": 0.3101, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.695148464808837e-05, |
|
"loss": 0.379, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.692821376522174e-05, |
|
"loss": 0.2935, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.690486021275619e-05, |
|
"loss": 0.2777, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.688142407873485e-05, |
|
"loss": 0.2829, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.6857905451512205e-05, |
|
"loss": 0.2161, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.683430441975373e-05, |
|
"loss": 0.286, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.6810621072435553e-05, |
|
"loss": 0.298, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.6786855498844165e-05, |
|
"loss": 0.2971, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.676300778857601e-05, |
|
"loss": 0.4267, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.6739078031537226e-05, |
|
"loss": 0.4661, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.6715066317943246e-05, |
|
"loss": 0.2975, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.669097273831847e-05, |
|
"loss": 0.3425, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.666679738349597e-05, |
|
"loss": 0.2916, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.664254034461709e-05, |
|
"loss": 0.2832, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.6618201713131116e-05, |
|
"loss": 0.3136, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.659378158079495e-05, |
|
"loss": 0.3325, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.6569280039672747e-05, |
|
"loss": 0.3296, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.654469718213558e-05, |
|
"loss": 0.2936, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.6520033100861084e-05, |
|
"loss": 0.2644, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.649528788883311e-05, |
|
"loss": 0.2932, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.6470461639341365e-05, |
|
"loss": 0.376, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.644555444598107e-05, |
|
"loss": 0.3354, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.6420566402652623e-05, |
|
"loss": 0.2584, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.63954976035612e-05, |
|
"loss": 0.2196, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.6370348143216436e-05, |
|
"loss": 0.3061, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.6345118116432085e-05, |
|
"loss": 0.3528, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.63198076183256e-05, |
|
"loss": 0.2717, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.629441674431783e-05, |
|
"loss": 0.4058, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.626894559013263e-05, |
|
"loss": 0.3199, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.6243394251796544e-05, |
|
"loss": 0.362, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.621776282563838e-05, |
|
"loss": 0.2788, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.6192051408288875e-05, |
|
"loss": 0.2861, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.616626009668036e-05, |
|
"loss": 0.2636, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.6140388988046345e-05, |
|
"loss": 0.2872, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.611443817992119e-05, |
|
"loss": 0.2773, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.6088407770139726e-05, |
|
"loss": 0.2935, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.606229785683686e-05, |
|
"loss": 0.3182, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.603610853844724e-05, |
|
"loss": 0.2632, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.600983991370489e-05, |
|
"loss": 0.2177, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.5983492081642784e-05, |
|
"loss": 0.3008, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.595706514159255e-05, |
|
"loss": 0.2536, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.593055919318402e-05, |
|
"loss": 0.2911, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.590397433634491e-05, |
|
"loss": 0.2796, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.5877310671300414e-05, |
|
"loss": 0.2053, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.585056829857281e-05, |
|
"loss": 0.3824, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.582374731898115e-05, |
|
"loss": 0.3492, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.579684783364081e-05, |
|
"loss": 0.2639, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.576986994396313e-05, |
|
"loss": 0.2315, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.5742813751655046e-05, |
|
"loss": 0.2623, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.571567935871868e-05, |
|
"loss": 0.2882, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.568846686745098e-05, |
|
"loss": 0.1957, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.5661176380443354e-05, |
|
"loss": 0.2981, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.56338080005812e-05, |
|
"loss": 0.3681, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.560636183104361e-05, |
|
"loss": 0.2598, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.557883797530295e-05, |
|
"loss": 0.3944, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.555123653712441e-05, |
|
"loss": 0.3041, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.552355762056575e-05, |
|
"loss": 0.2686, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.549580132997675e-05, |
|
"loss": 0.3159, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.546796776999893e-05, |
|
"loss": 0.1752, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.54400570455651e-05, |
|
"loss": 0.2823, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.5412069261899e-05, |
|
"loss": 0.2172, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.5384004524514875e-05, |
|
"loss": 0.3612, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.535586293921707e-05, |
|
"loss": 0.3427, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.532764461209965e-05, |
|
"loss": 0.3054, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.529934964954603e-05, |
|
"loss": 0.2617, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.527097815822852e-05, |
|
"loss": 0.2361, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.524253024510793e-05, |
|
"loss": 0.2966, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.5214006017433206e-05, |
|
"loss": 0.207, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.518540558274098e-05, |
|
"loss": 0.307, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.51567290488552e-05, |
|
"loss": 0.2536, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.5127976523886707e-05, |
|
"loss": 0.1883, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.509914811623282e-05, |
|
"loss": 0.7126, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.507024393457693e-05, |
|
"loss": 0.2616, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.504126408788814e-05, |
|
"loss": 0.234, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.501220868542075e-05, |
|
"loss": 0.3386, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.498307783671395e-05, |
|
"loss": 0.2801, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.4953871651591354e-05, |
|
"loss": 0.3349, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.492459024016058e-05, |
|
"loss": 0.4669, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.4895233712812866e-05, |
|
"loss": 0.1998, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.486580218022263e-05, |
|
"loss": 0.4003, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.483629575334707e-05, |
|
"loss": 0.3852, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.480671454342571e-05, |
|
"loss": 0.2936, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.477705866198004e-05, |
|
"loss": 0.2771, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.4747328220813054e-05, |
|
"loss": 0.2904, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.471752333200882e-05, |
|
"loss": 0.2923, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.468764410793208e-05, |
|
"loss": 0.3194, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.465769066122784e-05, |
|
"loss": 0.248, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.46276631048209e-05, |
|
"loss": 0.4047, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.459756155191549e-05, |
|
"loss": 0.2548, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.4567386115994756e-05, |
|
"loss": 0.3067, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.4537136910820426e-05, |
|
"loss": 0.2111, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.450681405043232e-05, |
|
"loss": 0.221, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.447641764914796e-05, |
|
"loss": 0.2154, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.4445947821562087e-05, |
|
"loss": 0.4225, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.441540468254626e-05, |
|
"loss": 0.2937, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.438478834724847e-05, |
|
"loss": 0.2467, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.43540989310926e-05, |
|
"loss": 0.2943, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.432333654977809e-05, |
|
"loss": 0.331, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.429250131927945e-05, |
|
"loss": 0.2773, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.426159335584581e-05, |
|
"loss": 0.262, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.423061277600053e-05, |
|
"loss": 0.3498, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.4199559696540716e-05, |
|
"loss": 0.4043, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.4168434234536825e-05, |
|
"loss": 0.2111, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.413723650733216e-05, |
|
"loss": 0.3302, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.4105966632542495e-05, |
|
"loss": 0.3073, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.407462472805559e-05, |
|
"loss": 0.2049, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.404321091203076e-05, |
|
"loss": 0.2015, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.401172530289842e-05, |
|
"loss": 0.262, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.3980168019359645e-05, |
|
"loss": 0.3501, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.3948539180385744e-05, |
|
"loss": 0.2372, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.391683890521777e-05, |
|
"loss": 0.307, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.388506731336609e-05, |
|
"loss": 0.2429, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.385322452460995e-05, |
|
"loss": 0.2586, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.382131065899701e-05, |
|
"loss": 0.278, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.378932583684286e-05, |
|
"loss": 0.4167, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.375727017873065e-05, |
|
"loss": 0.3226, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.372514380551052e-05, |
|
"loss": 0.2661, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.369294683829926e-05, |
|
"loss": 0.3259, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.366067939847977e-05, |
|
"loss": 0.2947, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.362834160770064e-05, |
|
"loss": 0.4092, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.359593358787569e-05, |
|
"loss": 0.2933, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.35634554611835e-05, |
|
"loss": 0.3961, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.353090735006695e-05, |
|
"loss": 0.298, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.349828937723276e-05, |
|
"loss": 0.3062, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.346560166565105e-05, |
|
"loss": 0.3218, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.343284433855481e-05, |
|
"loss": 0.3342, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.340001751943954e-05, |
|
"loss": 0.37, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.336712133206269e-05, |
|
"loss": 0.3375, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.333415590044323e-05, |
|
"loss": 0.2818, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.330112134886119e-05, |
|
"loss": 0.3518, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.3268017801857176e-05, |
|
"loss": 0.2463, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.323484538423192e-05, |
|
"loss": 0.2906, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.320160422104579e-05, |
|
"loss": 0.2432, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.316829443761832e-05, |
|
"loss": 0.2806, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.3134916159527735e-05, |
|
"loss": 0.1853, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.3101469512610525e-05, |
|
"loss": 0.2855, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.306795462296088e-05, |
|
"loss": 0.3396, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.303437161693031e-05, |
|
"loss": 0.3534, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.3000720621127096e-05, |
|
"loss": 0.3458, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.296700176241584e-05, |
|
"loss": 0.2457, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.293321516791701e-05, |
|
"loss": 0.2472, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.2899360965006424e-05, |
|
"loss": 0.4089, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.2865439281314785e-05, |
|
"loss": 0.3292, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.28314502447272e-05, |
|
"loss": 0.2968, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.2797393983382706e-05, |
|
"loss": 0.3879, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.276327062567376e-05, |
|
"loss": 0.2874, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.272908030024578e-05, |
|
"loss": 0.3083, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.269482313599666e-05, |
|
"loss": 0.224, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.266049926207629e-05, |
|
"loss": 0.2349, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.262610880788602e-05, |
|
"loss": 0.2517, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.259165190307823e-05, |
|
"loss": 0.3074, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.255712867755583e-05, |
|
"loss": 0.2581, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.252253926147174e-05, |
|
"loss": 0.2062, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.248788378522841e-05, |
|
"loss": 0.205, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.245316237947738e-05, |
|
"loss": 0.2171, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.2418375175118694e-05, |
|
"loss": 0.3009, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.2383522303300515e-05, |
|
"loss": 0.2514, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.23486038954185e-05, |
|
"loss": 0.1759, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.231362008311545e-05, |
|
"loss": 0.1742, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.227857099828069e-05, |
|
"loss": 0.3008, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.224345677304965e-05, |
|
"loss": 0.3944, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.2208277539803334e-05, |
|
"loss": 0.2892, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.217303343116782e-05, |
|
"loss": 0.3013, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.213772458001378e-05, |
|
"loss": 0.3311, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.210235111945594e-05, |
|
"loss": 0.4286, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.206691318285265e-05, |
|
"loss": 0.22, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.203141090380528e-05, |
|
"loss": 0.2426, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.199584441615782e-05, |
|
"loss": 0.3117, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.19602138539963e-05, |
|
"loss": 0.4433, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.19245193516483e-05, |
|
"loss": 0.4025, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.188876104368251e-05, |
|
"loss": 0.2673, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.18529390649081e-05, |
|
"loss": 0.3724, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.181705355037433e-05, |
|
"loss": 0.2437, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.178110463536995e-05, |
|
"loss": 0.2298, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.174509245542277e-05, |
|
"loss": 0.3817, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.1709017146299087e-05, |
|
"loss": 0.2266, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.167287884400319e-05, |
|
"loss": 0.2456, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.163667768477688e-05, |
|
"loss": 0.3525, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.160041380509889e-05, |
|
"loss": 0.2361, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.156408734168445e-05, |
|
"loss": 0.2911, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.152769843148471e-05, |
|
"loss": 0.2309, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.149124721168625e-05, |
|
"loss": 0.2448, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.145473381971054e-05, |
|
"loss": 0.2787, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.141815839321347e-05, |
|
"loss": 0.3903, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.138152107008478e-05, |
|
"loss": 0.2205, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.134482198844758e-05, |
|
"loss": 0.3232, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.130806128665779e-05, |
|
"loss": 0.2219, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.1271239103303636e-05, |
|
"loss": 0.2765, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.1234355577205164e-05, |
|
"loss": 0.2903, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.119741084741363e-05, |
|
"loss": 0.2991, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.1160405053211084e-05, |
|
"loss": 0.2697, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.112333833410975e-05, |
|
"loss": 0.1415, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.108621082985155e-05, |
|
"loss": 0.2174, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.104902268040758e-05, |
|
"loss": 0.394, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.101177402597755e-05, |
|
"loss": 0.2551, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.097446500698929e-05, |
|
"loss": 0.2233, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.09370957640982e-05, |
|
"loss": 0.2172, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.089966643818671e-05, |
|
"loss": 0.2568, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.0862177170363784e-05, |
|
"loss": 0.2724, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.0824628101964354e-05, |
|
"loss": 0.2839, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.078701937454883e-05, |
|
"loss": 0.2476, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.074935112990249e-05, |
|
"loss": 0.2791, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.071162351003502e-05, |
|
"loss": 0.2558, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.0673836657179953e-05, |
|
"loss": 0.2586, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.0635990713794124e-05, |
|
"loss": 0.2541, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.059808582255715e-05, |
|
"loss": 0.2765, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.0560122126370856e-05, |
|
"loss": 0.2609, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.052209976835879e-05, |
|
"loss": 0.236, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.048401889186565e-05, |
|
"loss": 0.1511, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.0445879640456744e-05, |
|
"loss": 0.2454, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.040768215791745e-05, |
|
"loss": 0.2546, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.036942658825267e-05, |
|
"loss": 0.2466, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.0331113075686344e-05, |
|
"loss": 0.2293, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.029274176466079e-05, |
|
"loss": 0.2017, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.025431279983627e-05, |
|
"loss": 0.3737, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.021582632609039e-05, |
|
"loss": 0.1394, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.017728248851756e-05, |
|
"loss": 0.244, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.013868143242847e-05, |
|
"loss": 0.3704, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.01000233033495e-05, |
|
"loss": 0.3224, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.00613082470222e-05, |
|
"loss": 0.252, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.002253640940277e-05, |
|
"loss": 0.2751, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.998370793666142e-05, |
|
"loss": 0.2885, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.994482297518192e-05, |
|
"loss": 0.2324, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9905881671560994e-05, |
|
"loss": 0.2592, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.986688417260776e-05, |
|
"loss": 0.4748, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.982783062534321e-05, |
|
"loss": 0.4343, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.978872117699962e-05, |
|
"loss": 0.2425, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.974955597502004e-05, |
|
"loss": 0.3459, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.971033516705769e-05, |
|
"loss": 0.324, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9671058900975446e-05, |
|
"loss": 0.1953, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.963172732484522e-05, |
|
"loss": 0.2776, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.95923405869475e-05, |
|
"loss": 0.2916, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.95528988357707e-05, |
|
"loss": 0.2366, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9513402220010634e-05, |
|
"loss": 0.2539, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.947385088856996e-05, |
|
"loss": 0.3312, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.943424499055763e-05, |
|
"loss": 0.2937, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9394584675288296e-05, |
|
"loss": 0.2306, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.935487009228176e-05, |
|
"loss": 0.2333, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.931510139126243e-05, |
|
"loss": 0.2186, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.9275278722158735e-05, |
|
"loss": 0.2813, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.9235402235102545e-05, |
|
"loss": 0.2282, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.919547208042866e-05, |
|
"loss": 0.3022, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.915548840867418e-05, |
|
"loss": 0.2804, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.911545137057796e-05, |
|
"loss": 0.306, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.907536111708008e-05, |
|
"loss": 0.2364, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.903521779932121e-05, |
|
"loss": 0.2579, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.899502156864209e-05, |
|
"loss": 0.3072, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.895477257658292e-05, |
|
"loss": 0.2572, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8914470974882846e-05, |
|
"loss": 0.2716, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.887411691547933e-05, |
|
"loss": 0.2007, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8833710550507605e-05, |
|
"loss": 0.2523, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.87932520323001e-05, |
|
"loss": 0.1348, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.875274151338584e-05, |
|
"loss": 0.4596, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.871217914648994e-05, |
|
"loss": 0.3232, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.867156508453293e-05, |
|
"loss": 0.332, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.863089948063027e-05, |
|
"loss": 0.3265, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.859018248809172e-05, |
|
"loss": 0.2594, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.854941426042076e-05, |
|
"loss": 0.233, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.850859495131405e-05, |
|
"loss": 0.2867, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.846772471466081e-05, |
|
"loss": 0.2477, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.84268037045423e-05, |
|
"loss": 0.2583, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8385832075231124e-05, |
|
"loss": 0.2449, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.834480998119078e-05, |
|
"loss": 0.2729, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.830373757707498e-05, |
|
"loss": 0.3224, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.826261501772715e-05, |
|
"loss": 0.2189, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.822144245817976e-05, |
|
"loss": 0.3783, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.818022005365379e-05, |
|
"loss": 0.2048, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.813894795955817e-05, |
|
"loss": 0.2587, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8097626331489106e-05, |
|
"loss": 0.3626, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.805625532522959e-05, |
|
"loss": 0.1753, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.801483509674875e-05, |
|
"loss": 0.268, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.797336580220129e-05, |
|
"loss": 0.2268, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.793184759792688e-05, |
|
"loss": 0.2217, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.7890280640449605e-05, |
|
"loss": 0.3057, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.7848665086477334e-05, |
|
"loss": 0.1427, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.780700109290115e-05, |
|
"loss": 0.3914, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.776528881679474e-05, |
|
"loss": 0.2081, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.772352841541384e-05, |
|
"loss": 0.1958, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.76817200461956e-05, |
|
"loss": 0.2173, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.763986386675802e-05, |
|
"loss": 0.3855, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7597960034899335e-05, |
|
"loss": 0.3793, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7556008708597446e-05, |
|
"loss": 0.3456, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.751401004600929e-05, |
|
"loss": 0.2731, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.747196420547029e-05, |
|
"loss": 0.2725, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.742987134549369e-05, |
|
"loss": 0.3186, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.738773162477001e-05, |
|
"loss": 0.2341, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7345545202166476e-05, |
|
"loss": 0.4402, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.730331223672633e-05, |
|
"loss": 0.2449, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.726103288766832e-05, |
|
"loss": 0.2957, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.721870731438601e-05, |
|
"loss": 0.2161, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.717633567644729e-05, |
|
"loss": 0.2128, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.713391813359366e-05, |
|
"loss": 0.2113, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7091454845739734e-05, |
|
"loss": 0.2892, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.704894597297254e-05, |
|
"loss": 0.2417, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.700639167555098e-05, |
|
"loss": 0.3562, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.696379211390522e-05, |
|
"loss": 0.1681, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.6921147448636045e-05, |
|
"loss": 0.3111, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.6878457840514316e-05, |
|
"loss": 0.3029, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.6835723450480283e-05, |
|
"loss": 0.1842, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.679294443964307e-05, |
|
"loss": 0.2543, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.675012096928e-05, |
|
"loss": 0.3299, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.670725320083601e-05, |
|
"loss": 0.2311, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.6664341295923035e-05, |
|
"loss": 0.2829, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.6621385416319425e-05, |
|
"loss": 0.2554, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.657838572396929e-05, |
|
"loss": 0.3379, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.653534238098194e-05, |
|
"loss": 0.1747, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.649225554963123e-05, |
|
"loss": 0.3012, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.644912539235496e-05, |
|
"loss": 0.2631, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.64059520717543e-05, |
|
"loss": 0.3087, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.636273575059312e-05, |
|
"loss": 0.2936, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.631947659179741e-05, |
|
"loss": 0.3079, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.627617475845466e-05, |
|
"loss": 0.1422, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.6232830413813235e-05, |
|
"loss": 0.2727, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.6189443721281786e-05, |
|
"loss": 0.2239, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.614601484442859e-05, |
|
"loss": 0.2528, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.6102543946980985e-05, |
|
"loss": 0.2446, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.605903119282472e-05, |
|
"loss": 0.2392, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.601547674600333e-05, |
|
"loss": 0.2591, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.5971880770717554e-05, |
|
"loss": 0.28, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.592824343132467e-05, |
|
"loss": 0.4283, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.588456489233792e-05, |
|
"loss": 0.2747, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.5840845318425855e-05, |
|
"loss": 0.2312, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.5797084874411735e-05, |
|
"loss": 0.2521, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.57532837252729e-05, |
|
"loss": 0.262, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.570944203614014e-05, |
|
"loss": 0.1989, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.56655599722971e-05, |
|
"loss": 0.2692, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.562163769917963e-05, |
|
"loss": 0.2294, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.5577675382375157e-05, |
|
"loss": 0.2673, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.5533673187622075e-05, |
|
"loss": 0.3452, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.548963128080915e-05, |
|
"loss": 0.2076, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.544554982797481e-05, |
|
"loss": 0.2218, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.540142899530662e-05, |
|
"loss": 0.254, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.535726894914058e-05, |
|
"loss": 0.1941, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.531306985596055e-05, |
|
"loss": 0.2272, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.526883188239755e-05, |
|
"loss": 0.3485, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.5224555195229234e-05, |
|
"loss": 0.1742, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.518023996137918e-05, |
|
"loss": 0.2912, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.513588634791628e-05, |
|
"loss": 0.1661, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.509149452205414e-05, |
|
"loss": 0.2866, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.504706465115042e-05, |
|
"loss": 0.3944, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.500259690270618e-05, |
|
"loss": 0.2728, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.495809144436533e-05, |
|
"loss": 0.2458, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.491354844391391e-05, |
|
"loss": 0.152, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.486896806927951e-05, |
|
"loss": 0.2026, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.48243504885306e-05, |
|
"loss": 0.2478, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.477969586987596e-05, |
|
"loss": 0.2907, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.4735004381663955e-05, |
|
"loss": 0.2569, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.4690276192381975e-05, |
|
"loss": 0.3092, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.464551147065578e-05, |
|
"loss": 0.1909, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.460071038524883e-05, |
|
"loss": 0.2399, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.455587310506171e-05, |
|
"loss": 0.2615, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.451099979913143e-05, |
|
"loss": 0.3121, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.446609063663084e-05, |
|
"loss": 0.2779, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.442114578686795e-05, |
|
"loss": 0.2808, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.4376165419285336e-05, |
|
"loss": 0.271, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.433114970345944e-05, |
|
"loss": 0.2158, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.4286098809100005e-05, |
|
"loss": 0.2295, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.424101290604938e-05, |
|
"loss": 0.3262, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.41958921642819e-05, |
|
"loss": 0.2281, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.415073675390322e-05, |
|
"loss": 0.3043, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.410554684514975e-05, |
|
"loss": 0.2987, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.4060322608387896e-05, |
|
"loss": 0.2203, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.401506421411354e-05, |
|
"loss": 0.252, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.3969771832951284e-05, |
|
"loss": 0.2814, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.39244456356539e-05, |
|
"loss": 0.2556, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.387908579310164e-05, |
|
"loss": 0.2388, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.3833692476301574e-05, |
|
"loss": 0.3186, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.3788265856387e-05, |
|
"loss": 0.2689, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.374280610461675e-05, |
|
"loss": 0.3176, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.3697313392374585e-05, |
|
"loss": 0.2756, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.365178789116849e-05, |
|
"loss": 0.2494, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.3606229772630125e-05, |
|
"loss": 0.2898, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.356063920851405e-05, |
|
"loss": 0.2514, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.351501637069719e-05, |
|
"loss": 0.3513, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.346936143117811e-05, |
|
"loss": 0.2898, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.3423674562076446e-05, |
|
"loss": 0.1813, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.337795593563217e-05, |
|
"loss": 0.1925, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.333220572420497e-05, |
|
"loss": 0.268, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.328642410027366e-05, |
|
"loss": 0.2425, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.3240611236435415e-05, |
|
"loss": 0.273, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.319476730540525e-05, |
|
"loss": 0.2221, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.314889248001525e-05, |
|
"loss": 0.1541, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.3102986933214e-05, |
|
"loss": 0.2146, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.305705083806592e-05, |
|
"loss": 0.2487, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.3011084367750544e-05, |
|
"loss": 0.2341, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.296508769556198e-05, |
|
"loss": 0.1824, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.291906099490815e-05, |
|
"loss": 0.2224, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.2873004439310216e-05, |
|
"loss": 0.252, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.282691820240188e-05, |
|
"loss": 0.3569, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.278080245792875e-05, |
|
"loss": 0.2553, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.273465737974767e-05, |
|
"loss": 0.397, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.268848314182606e-05, |
|
"loss": 0.2171, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.264227991824131e-05, |
|
"loss": 0.3236, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.259604788318005e-05, |
|
"loss": 0.2252, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.254978721093755e-05, |
|
"loss": 0.2994, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.250349807591704e-05, |
|
"loss": 0.2406, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.245718065262904e-05, |
|
"loss": 0.2831, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.2410835115690735e-05, |
|
"loss": 0.175, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.236446163982528e-05, |
|
"loss": 0.2545, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.23180603998612e-05, |
|
"loss": 0.2398, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.227163157073163e-05, |
|
"loss": 0.2577, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.222517532747377e-05, |
|
"loss": 0.2555, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.2178691845228136e-05, |
|
"loss": 0.2365, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.2132181299237975e-05, |
|
"loss": 0.3234, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.2085643864848504e-05, |
|
"loss": 0.223, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.203907971750637e-05, |
|
"loss": 0.321, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.1992489032758897e-05, |
|
"loss": 0.2785, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.194587198625347e-05, |
|
"loss": 0.2555, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.189922875373684e-05, |
|
"loss": 0.2354, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.1852559511054484e-05, |
|
"loss": 0.2835, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.180586443414996e-05, |
|
"loss": 0.2872, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.175914369906418e-05, |
|
"loss": 0.2141, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.171239748193483e-05, |
|
"loss": 0.228, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.166562595899565e-05, |
|
"loss": 0.143, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.161882930657576e-05, |
|
"loss": 0.2461, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.157200770109905e-05, |
|
"loss": 0.2437, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.152516131908347e-05, |
|
"loss": 0.2225, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.147829033714038e-05, |
|
"loss": 0.3087, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.143139493197387e-05, |
|
"loss": 0.1815, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.138447528038011e-05, |
|
"loss": 0.3681, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.133753155924671e-05, |
|
"loss": 0.1193, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.129056394555197e-05, |
|
"loss": 0.2512, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.1243572616364324e-05, |
|
"loss": 0.4096, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.1196557748841555e-05, |
|
"loss": 0.1937, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.1149519520230224e-05, |
|
"loss": 0.2525, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.1102458107864944e-05, |
|
"loss": 0.2583, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.105537368916775e-05, |
|
"loss": 0.2784, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.10082664416474e-05, |
|
"loss": 0.2542, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.096113654289873e-05, |
|
"loss": 0.3522, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.091398417060193e-05, |
|
"loss": 0.3038, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.086680950252196e-05, |
|
"loss": 0.2119, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.0819612716507825e-05, |
|
"loss": 0.2564, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.077239399049189e-05, |
|
"loss": 0.121, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.0725153502489285e-05, |
|
"loss": 0.3507, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.067789143059713e-05, |
|
"loss": 0.1827, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.063060795299394e-05, |
|
"loss": 0.2606, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.058330324793894e-05, |
|
"loss": 0.2924, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.053597749377135e-05, |
|
"loss": 0.2594, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.0488630868909786e-05, |
|
"loss": 0.2064, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.044126355185151e-05, |
|
"loss": 0.3151, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.0393875721171832e-05, |
|
"loss": 0.3077, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.034646755552336e-05, |
|
"loss": 0.2053, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.029903923363539e-05, |
|
"loss": 0.1773, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.0251590934313197e-05, |
|
"loss": 0.1823, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.0204122836437366e-05, |
|
"loss": 0.2068, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.0156635118963146e-05, |
|
"loss": 0.185, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.0109127960919725e-05, |
|
"loss": 0.2399, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.0061601541409595e-05, |
|
"loss": 0.2785, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.0014056039607875e-05, |
|
"loss": 0.3422, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.996649163476159e-05, |
|
"loss": 0.2575, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.991890850618907e-05, |
|
"loss": 0.2477, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.9871306833279205e-05, |
|
"loss": 0.2519, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.982368679549081e-05, |
|
"loss": 0.2906, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.977604857235194e-05, |
|
"loss": 0.2857, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.9728392343459205e-05, |
|
"loss": 0.2739, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.9680718288477078e-05, |
|
"loss": 0.18, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.963302658713726e-05, |
|
"loss": 0.361, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9585317419237977e-05, |
|
"loss": 0.273, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.953759096464329e-05, |
|
"loss": 0.2964, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9489847403282454e-05, |
|
"loss": 0.1882, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9442086915149198e-05, |
|
"loss": 0.2735, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9394309680301063e-05, |
|
"loss": 0.1811, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9346515878858728e-05, |
|
"loss": 0.1866, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.929870569100534e-05, |
|
"loss": 0.3179, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.9250879296985822e-05, |
|
"loss": 0.2438, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.9203036877106178e-05, |
|
"loss": 0.2603, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.915517861173284e-05, |
|
"loss": 0.2509, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.9107304681291987e-05, |
|
"loss": 0.1607, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.9059415266268835e-05, |
|
"loss": 0.2151, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.9011510547206983e-05, |
|
"loss": 0.1825, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.8963590704707744e-05, |
|
"loss": 0.2732, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.8915655919429424e-05, |
|
"loss": 0.2262, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.8867706372086684e-05, |
|
"loss": 0.2756, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.8819742243449815e-05, |
|
"loss": 0.244, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.877176371434409e-05, |
|
"loss": 0.2626, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.872377096564909e-05, |
|
"loss": 0.2115, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.867576417829797e-05, |
|
"loss": 0.2856, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.862774353327684e-05, |
|
"loss": 0.1677, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.8579709211624034e-05, |
|
"loss": 0.3426, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.8531661394429464e-05, |
|
"loss": 0.3588, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.8483600262833903e-05, |
|
"loss": 0.2081, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.8435525998028334e-05, |
|
"loss": 0.1989, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.838743878125324e-05, |
|
"loss": 0.1828, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.8339338793797942e-05, |
|
"loss": 0.1784, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.829122621699991e-05, |
|
"loss": 0.2827, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.8243101232244064e-05, |
|
"loss": 0.2907, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.8194964020962106e-05, |
|
"loss": 0.1445, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.8146814764631835e-05, |
|
"loss": 0.2718, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.8098653644776462e-05, |
|
"loss": 0.2541, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.8050480842963923e-05, |
|
"loss": 0.2368, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.8002296540806176e-05, |
|
"loss": 0.388, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.795410091995858e-05, |
|
"loss": 0.2741, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.7905894162119118e-05, |
|
"loss": 0.2027, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.785767644902778e-05, |
|
"loss": 0.2919, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.780944796246587e-05, |
|
"loss": 0.2176, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.7761208884255274e-05, |
|
"loss": 0.2243, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.7712959396257847e-05, |
|
"loss": 0.2093, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.7664699680374668e-05, |
|
"loss": 0.2598, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.7616429918545372e-05, |
|
"loss": 0.2837, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.7568150292747487e-05, |
|
"loss": 0.3447, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.7519860984995703e-05, |
|
"loss": 0.1362, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.747156217734122e-05, |
|
"loss": 0.2897, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.7423254051871067e-05, |
|
"loss": 0.3078, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.737493679070739e-05, |
|
"loss": 0.3228, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.732661057600678e-05, |
|
"loss": 0.2625, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.7278275589959567e-05, |
|
"loss": 0.3225, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.722993201478916e-05, |
|
"loss": 0.347, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.7181580032751375e-05, |
|
"loss": 0.1953, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.713321982613367e-05, |
|
"loss": 0.2488, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.7084851577254554e-05, |
|
"loss": 0.1825, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.7036475468462847e-05, |
|
"loss": 0.2255, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.6988091682136973e-05, |
|
"loss": 0.2152, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.6939700400684337e-05, |
|
"loss": 0.3146, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.6891301806540568e-05, |
|
"loss": 0.3407, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.6842896082168893e-05, |
|
"loss": 0.2713, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.6794483410059406e-05, |
|
"loss": 0.1729, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.674606397272839e-05, |
|
"loss": 0.2376, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.6697637952717646e-05, |
|
"loss": 0.2077, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.664920553259378e-05, |
|
"loss": 0.2652, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.2752176523208618, |
|
"eval_runtime": 119.1657, |
|
"eval_samples_per_second": 2.518, |
|
"eval_steps_per_second": 0.629, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.6600766894947532e-05, |
|
"loss": 0.2246, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.655232222239307e-05, |
|
"loss": 0.1462, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.650387169756734e-05, |
|
"loss": 0.2393, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.6455415503129326e-05, |
|
"loss": 0.1314, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.64069538217594e-05, |
|
"loss": 0.192, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.635848683615862e-05, |
|
"loss": 0.0874, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.6310014729048023e-05, |
|
"loss": 0.0998, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.6261537683167976e-05, |
|
"loss": 0.1193, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.621305588127746e-05, |
|
"loss": 0.1411, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.616456950615338e-05, |
|
"loss": 0.1109, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.6116078740589883e-05, |
|
"loss": 0.143, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.606758376739768e-05, |
|
"loss": 0.1552, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.6019084769403322e-05, |
|
"loss": 0.1401, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.5970581929448555e-05, |
|
"loss": 0.1238, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.592207543038961e-05, |
|
"loss": 0.157, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.587356545509648e-05, |
|
"loss": 0.1354, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.5825052186452307e-05, |
|
"loss": 0.0772, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.5776535807352624e-05, |
|
"loss": 0.1249, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.5728016500704698e-05, |
|
"loss": 0.1722, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.5679494449426822e-05, |
|
"loss": 0.1109, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.563096983644765e-05, |
|
"loss": 0.1392, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.5582442844705496e-05, |
|
"loss": 0.2738, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.5533913657147617e-05, |
|
"loss": 0.1032, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.548538245672959e-05, |
|
"loss": 0.1362, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.543684942641454e-05, |
|
"loss": 0.1145, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.5388314749172504e-05, |
|
"loss": 0.1221, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.5339778607979735e-05, |
|
"loss": 0.1397, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.5291241185818003e-05, |
|
"loss": 0.1286, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.5242702665673917e-05, |
|
"loss": 0.1395, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.5194163230538193e-05, |
|
"loss": 0.1255, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.5145623063405032e-05, |
|
"loss": 0.1144, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.5097082347271374e-05, |
|
"loss": 0.1663, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.5048541265136232e-05, |
|
"loss": 0.0835, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.1649, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.4951458734863774e-05, |
|
"loss": 0.1575, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.490291765272863e-05, |
|
"loss": 0.1065, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.4854376936594977e-05, |
|
"loss": 0.1426, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.4805836769461813e-05, |
|
"loss": 0.1179, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.475729733432609e-05, |
|
"loss": 0.0955, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.4708758814181992e-05, |
|
"loss": 0.1272, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.4660221392020274e-05, |
|
"loss": 0.1693, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.461168525082751e-05, |
|
"loss": 0.1303, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.456315057358547e-05, |
|
"loss": 0.1074, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.451461754327042e-05, |
|
"loss": 0.1135, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.4466086342852382e-05, |
|
"loss": 0.1152, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.441755715529452e-05, |
|
"loss": 0.1625, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.4369030163552357e-05, |
|
"loss": 0.1822, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.4320505550573183e-05, |
|
"loss": 0.1428, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.4271983499295305e-05, |
|
"loss": 0.1132, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.422346419264738e-05, |
|
"loss": 0.1193, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.4174947813547702e-05, |
|
"loss": 0.1895, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.4126434544903525e-05, |
|
"loss": 0.1162, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.4077924569610396e-05, |
|
"loss": 0.1293, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.402941807055144e-05, |
|
"loss": 0.0926, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.3980915230596684e-05, |
|
"loss": 0.1238, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.3932416232602327e-05, |
|
"loss": 0.1547, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.3883921259410123e-05, |
|
"loss": 0.1359, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.3835430493846626e-05, |
|
"loss": 0.1336, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.378694411872254e-05, |
|
"loss": 0.1205, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.3738462316832033e-05, |
|
"loss": 0.1257, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.3689985270951987e-05, |
|
"loss": 0.149, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.364151316384139e-05, |
|
"loss": 0.0938, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.3593046178240606e-05, |
|
"loss": 0.15, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.3544584496870677e-05, |
|
"loss": 0.2233, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.3496128302432667e-05, |
|
"loss": 0.1576, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.3447677777606935e-05, |
|
"loss": 0.0962, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.3399233105052477e-05, |
|
"loss": 0.1173, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.335079446740622e-05, |
|
"loss": 0.0992, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.330236204728236e-05, |
|
"loss": 0.135, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.3253936027271618e-05, |
|
"loss": 0.18, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.32055165899406e-05, |
|
"loss": 0.1581, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.315710391783111e-05, |
|
"loss": 0.1085, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.3108698193459434e-05, |
|
"loss": 0.1132, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.3060299599315676e-05, |
|
"loss": 0.1664, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.3011908317863033e-05, |
|
"loss": 0.2236, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.2963524531537163e-05, |
|
"loss": 0.1851, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.2915148422745445e-05, |
|
"loss": 0.1282, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.286678017386634e-05, |
|
"loss": 0.1503, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.2818419967248637e-05, |
|
"loss": 0.1979, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.2770067985210842e-05, |
|
"loss": 0.1662, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.272172441004044e-05, |
|
"loss": 0.0973, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.267338942399323e-05, |
|
"loss": 0.1029, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.262506320929262e-05, |
|
"loss": 0.1139, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.257674594812894e-05, |
|
"loss": 0.1147, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.2528437822658782e-05, |
|
"loss": 0.1123, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.2480139015004303e-05, |
|
"loss": 0.1194, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.243184970725252e-05, |
|
"loss": 0.1294, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.238357008145463e-05, |
|
"loss": 0.1303, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.233530031962534e-05, |
|
"loss": 0.1026, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.2287040603742155e-05, |
|
"loss": 0.1535, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.2238791115744725e-05, |
|
"loss": 0.085, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.2190552037534143e-05, |
|
"loss": 0.1252, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.2142323550972224e-05, |
|
"loss": 0.1267, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.2094105837880884e-05, |
|
"loss": 0.1736, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.2045899080041428e-05, |
|
"loss": 0.0939, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.1997703459193823e-05, |
|
"loss": 0.1203, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.1949519157036093e-05, |
|
"loss": 0.1051, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.1901346355223547e-05, |
|
"loss": 0.1457, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.1853185235368167e-05, |
|
"loss": 0.0554, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.1805035979037897e-05, |
|
"loss": 0.1718, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.1756898767755938e-05, |
|
"loss": 0.1159, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.17087737830001e-05, |
|
"loss": 0.1209, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.1660661206202064e-05, |
|
"loss": 0.1403, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.1612561218746764e-05, |
|
"loss": 0.1215, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.1564474001971668e-05, |
|
"loss": 0.1591, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.1516399737166103e-05, |
|
"loss": 0.1329, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.1468338605570542e-05, |
|
"loss": 0.1541, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.142029078837597e-05, |
|
"loss": 0.1101, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.1372256466723165e-05, |
|
"loss": 0.1102, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.132423582170203e-05, |
|
"loss": 0.1187, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.1276229034350924e-05, |
|
"loss": 0.0774, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.1228236285655915e-05, |
|
"loss": 0.1375, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.118025775655019e-05, |
|
"loss": 0.1574, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.1132293627913325e-05, |
|
"loss": 0.2015, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.1084344080570588e-05, |
|
"loss": 0.1211, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.1036409295292266e-05, |
|
"loss": 0.1638, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.0988489452793023e-05, |
|
"loss": 0.1607, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.094058473373117e-05, |
|
"loss": 0.1598, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.089269531870802e-05, |
|
"loss": 0.1297, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.0844821388267162e-05, |
|
"loss": 0.0894, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.079696312289383e-05, |
|
"loss": 0.0989, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.0749120703014184e-05, |
|
"loss": 0.117, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.070129430899466e-05, |
|
"loss": 0.1257, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.0653484121141274e-05, |
|
"loss": 0.0908, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.060569031969895e-05, |
|
"loss": 0.0963, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.055791308485081e-05, |
|
"loss": 0.1518, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.0510152596717548e-05, |
|
"loss": 0.1295, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.0462409035356707e-05, |
|
"loss": 0.1009, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.0414682580762035e-05, |
|
"loss": 0.1177, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.0366973412862747e-05, |
|
"loss": 0.1528, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.031928171152293e-05, |
|
"loss": 0.1452, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.02716076565408e-05, |
|
"loss": 0.1188, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.0223951427648057e-05, |
|
"loss": 0.1005, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.0176313204509194e-05, |
|
"loss": 0.1281, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.0128693166720804e-05, |
|
"loss": 0.1171, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.0081091493810937e-05, |
|
"loss": 0.0953, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.0033508365238414e-05, |
|
"loss": 0.1419, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.9985943960392134e-05, |
|
"loss": 0.1677, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.9938398458590407e-05, |
|
"loss": 0.1232, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9890872039080284e-05, |
|
"loss": 0.1195, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.984336488103686e-05, |
|
"loss": 0.1141, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9795877163562633e-05, |
|
"loss": 0.1568, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9748409065686816e-05, |
|
"loss": 0.2223, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9700960766364616e-05, |
|
"loss": 0.1446, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.965353244447664e-05, |
|
"loss": 0.1296, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9606124278828173e-05, |
|
"loss": 0.1106, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9558736448148485e-05, |
|
"loss": 0.1635, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9511369131090223e-05, |
|
"loss": 0.0826, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.9464022506228657e-05, |
|
"loss": 0.1426, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.941669675206107e-05, |
|
"loss": 0.0822, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.936939204700606e-05, |
|
"loss": 0.1312, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.932210856940288e-05, |
|
"loss": 0.1644, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.9274846497510727e-05, |
|
"loss": 0.1393, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.9227606009508113e-05, |
|
"loss": 0.1476, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.9180387283492185e-05, |
|
"loss": 0.096, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.9133190497478043e-05, |
|
"loss": 0.1272, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.908601582939808e-05, |
|
"loss": 0.0936, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.903886345710128e-05, |
|
"loss": 0.0965, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.89917335583526e-05, |
|
"loss": 0.1546, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.8944626310832248e-05, |
|
"loss": 0.0956, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.8897541892135055e-05, |
|
"loss": 0.0933, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.885048047976979e-05, |
|
"loss": 0.1077, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.8803442251158454e-05, |
|
"loss": 0.1396, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.8756427383635682e-05, |
|
"loss": 0.1161, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.8709436054448026e-05, |
|
"loss": 0.1937, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.86624684407533e-05, |
|
"loss": 0.1633, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.8615524719619894e-05, |
|
"loss": 0.1792, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.8568605068026142e-05, |
|
"loss": 0.1649, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.8521709662859626e-05, |
|
"loss": 0.0818, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.8474838680916527e-05, |
|
"loss": 0.1711, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.842799229890095e-05, |
|
"loss": 0.0896, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.8381170693424245e-05, |
|
"loss": 0.1483, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.8334374041004354e-05, |
|
"loss": 0.1491, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.8287602518065167e-05, |
|
"loss": 0.1216, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.8240856300935827e-05, |
|
"loss": 0.1505, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.8194135565850052e-05, |
|
"loss": 0.1163, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.8147440488945518e-05, |
|
"loss": 0.1217, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.810077124626317e-05, |
|
"loss": 0.1485, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.8054128013746535e-05, |
|
"loss": 0.1189, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.800751096724111e-05, |
|
"loss": 0.1427, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.7960920282493634e-05, |
|
"loss": 0.1004, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.79143561351515e-05, |
|
"loss": 0.1543, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.7867818700762034e-05, |
|
"loss": 0.1175, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.7821308154771866e-05, |
|
"loss": 0.0988, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.7774824672526244e-05, |
|
"loss": 0.1532, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.7728368429268377e-05, |
|
"loss": 0.1442, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.768193960013881e-05, |
|
"loss": 0.12, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.7635538360174714e-05, |
|
"loss": 0.1821, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.7589164884309274e-05, |
|
"loss": 0.0993, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.7542819347370966e-05, |
|
"loss": 0.1641, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.7496501924082968e-05, |
|
"loss": 0.1128, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.7450212789062447e-05, |
|
"loss": 0.1039, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.740395211681995e-05, |
|
"loss": 0.0791, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.73577200817587e-05, |
|
"loss": 0.1634, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.7311516858173943e-05, |
|
"loss": 0.1786, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.726534262025234e-05, |
|
"loss": 0.1224, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.7219197542071255e-05, |
|
"loss": 0.1128, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.7173081797598117e-05, |
|
"loss": 0.1182, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.712699556068979e-05, |
|
"loss": 0.1226, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.7080939005091855e-05, |
|
"loss": 0.1392, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.703491230443803e-05, |
|
"loss": 0.1443, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.6988915632249458e-05, |
|
"loss": 0.1276, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.694294916193409e-05, |
|
"loss": 0.1953, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.6897013066786e-05, |
|
"loss": 0.1054, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.6851107519984756e-05, |
|
"loss": 0.1029, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.680523269459476e-05, |
|
"loss": 0.1613, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.6759388763564584e-05, |
|
"loss": 0.1227, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.6713575899726356e-05, |
|
"loss": 0.125, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.6667794275795036e-05, |
|
"loss": 0.1452, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.662204406436784e-05, |
|
"loss": 0.1336, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.6576325437923556e-05, |
|
"loss": 0.2167, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.6530638568821894e-05, |
|
"loss": 0.1426, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.6484983629302822e-05, |
|
"loss": 0.1325, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.6439360791485954e-05, |
|
"loss": 0.1278, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.6393770227369874e-05, |
|
"loss": 0.0895, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.6348212108831502e-05, |
|
"loss": 0.1309, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.6302686607625428e-05, |
|
"loss": 0.1519, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.6257193895383253e-05, |
|
"loss": 0.1153, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.621173414361301e-05, |
|
"loss": 0.1797, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.616630752369843e-05, |
|
"loss": 0.0967, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.612091420689837e-05, |
|
"loss": 0.1474, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.6075554364346103e-05, |
|
"loss": 0.2184, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.6030228167048722e-05, |
|
"loss": 0.1102, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.5984935785886468e-05, |
|
"loss": 0.1438, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.59396773916121e-05, |
|
"loss": 0.0791, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.5894453154850264e-05, |
|
"loss": 0.1601, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.5849263246096786e-05, |
|
"loss": 0.1176, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.5804107835718114e-05, |
|
"loss": 0.1305, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.575898709395063e-05, |
|
"loss": 0.1591, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.5713901190899998e-05, |
|
"loss": 0.1057, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.566885029654057e-05, |
|
"loss": 0.1391, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.5623834580714676e-05, |
|
"loss": 0.1242, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.5578854213132048e-05, |
|
"loss": 0.1166, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.5533909363369164e-05, |
|
"loss": 0.1637, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.5489000200868568e-05, |
|
"loss": 0.1469, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.54441268949383e-05, |
|
"loss": 0.0992, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.5399289614751173e-05, |
|
"loss": 0.1564, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.535448852934423e-05, |
|
"loss": 0.1034, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.5309723807618027e-05, |
|
"loss": 0.151, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.5264995618336057e-05, |
|
"loss": 0.0875, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.5220304130124052e-05, |
|
"loss": 0.1088, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.5175649511469405e-05, |
|
"loss": 0.1347, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.5131031930720502e-05, |
|
"loss": 0.2023, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.5086451556086093e-05, |
|
"loss": 0.1685, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.5041908555634678e-05, |
|
"loss": 0.1629, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.4997403097293822e-05, |
|
"loss": 0.1254, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.4952935348849587e-05, |
|
"loss": 0.1185, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.490850547794586e-05, |
|
"loss": 0.0834, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.4864113652083728e-05, |
|
"loss": 0.1567, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.4819760038620829e-05, |
|
"loss": 0.0952, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.4775444804770772e-05, |
|
"loss": 0.1094, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.473116811760245e-05, |
|
"loss": 0.1304, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.4686930144039462e-05, |
|
"loss": 0.1338, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.464273105085942e-05, |
|
"loss": 0.0972, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.459857100469339e-05, |
|
"loss": 0.1665, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.4554450172025191e-05, |
|
"loss": 0.1335, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.4510368719190859e-05, |
|
"loss": 0.1649, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.4466326812377928e-05, |
|
"loss": 0.1451, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.4422324617624854e-05, |
|
"loss": 0.1221, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.437836230082038e-05, |
|
"loss": 0.122, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.4334440027702894e-05, |
|
"loss": 0.1342, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.4290557963859857e-05, |
|
"loss": 0.0989, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.4246716274727107e-05, |
|
"loss": 0.08, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.4202915125588273e-05, |
|
"loss": 0.1212, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.4159154681574155e-05, |
|
"loss": 0.1019, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.4115435107662082e-05, |
|
"loss": 0.0637, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.4071756568675331e-05, |
|
"loss": 0.1287, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.402811922928245e-05, |
|
"loss": 0.1413, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3984523253996672e-05, |
|
"loss": 0.2187, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3940968807175287e-05, |
|
"loss": 0.1599, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3897456053019013e-05, |
|
"loss": 0.0722, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3853985155571411e-05, |
|
"loss": 0.1208, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3810556278718223e-05, |
|
"loss": 0.17, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.376716958618677e-05, |
|
"loss": 0.1113, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.372382524154534e-05, |
|
"loss": 0.1129, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3680523408202589e-05, |
|
"loss": 0.097, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3637264249406882e-05, |
|
"loss": 0.1424, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3594047928245701e-05, |
|
"loss": 0.0904, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3550874607645043e-05, |
|
"loss": 0.0723, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3507744450368773e-05, |
|
"loss": 0.0766, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3464657619018062e-05, |
|
"loss": 0.0977, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.342161427603071e-05, |
|
"loss": 0.1093, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3378614583680583e-05, |
|
"loss": 0.1345, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.333565870407697e-05, |
|
"loss": 0.0753, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3292746799163991e-05, |
|
"loss": 0.1454, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.324987903072001e-05, |
|
"loss": 0.1266, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3207055560356931e-05, |
|
"loss": 0.1338, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3164276549519722e-05, |
|
"loss": 0.1396, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3121542159485697e-05, |
|
"loss": 0.1833, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3078852551363954e-05, |
|
"loss": 0.194, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3036207886094795e-05, |
|
"loss": 0.2313, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2993608324449025e-05, |
|
"loss": 0.0586, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.295105402702747e-05, |
|
"loss": 0.1231, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2908545154260268e-05, |
|
"loss": 0.1453, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2866081866406337e-05, |
|
"loss": 0.1503, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2823664323552717e-05, |
|
"loss": 0.1043, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2781292685613994e-05, |
|
"loss": 0.1002, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.273896711233169e-05, |
|
"loss": 0.1011, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2696687763273663e-05, |
|
"loss": 0.1465, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2654454797833531e-05, |
|
"loss": 0.1675, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.261226837522999e-05, |
|
"loss": 0.1005, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2570128654506324e-05, |
|
"loss": 0.168, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2528035794529724e-05, |
|
"loss": 0.1272, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2485989953990703e-05, |
|
"loss": 0.122, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2443991291402563e-05, |
|
"loss": 0.1225, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2402039965100667e-05, |
|
"loss": 0.126, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2360136133241986e-05, |
|
"loss": 0.113, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2318279953804408e-05, |
|
"loss": 0.1045, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.227647158458616e-05, |
|
"loss": 0.1646, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.223471118320527e-05, |
|
"loss": 0.1719, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.2192998907098855e-05, |
|
"loss": 0.1184, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.2151334913522668e-05, |
|
"loss": 0.1288, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.210971935955039e-05, |
|
"loss": 0.125, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.2068152402073128e-05, |
|
"loss": 0.12, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.2026634197798725e-05, |
|
"loss": 0.0967, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1985164903251252e-05, |
|
"loss": 0.1315, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1943744674770416e-05, |
|
"loss": 0.1556, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1902373668510888e-05, |
|
"loss": 0.0964, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.186105204044184e-05, |
|
"loss": 0.1493, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1819779946346204e-05, |
|
"loss": 0.1061, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1778557541820246e-05, |
|
"loss": 0.0629, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1737384982272855e-05, |
|
"loss": 0.1333, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1696262422925026e-05, |
|
"loss": 0.1346, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1655190018809236e-05, |
|
"loss": 0.1959, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1614167924768882e-05, |
|
"loss": 0.1594, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1573196295457708e-05, |
|
"loss": 0.1052, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1532275285339189e-05, |
|
"loss": 0.0575, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.149140504868596e-05, |
|
"loss": 0.1455, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1450585739579256e-05, |
|
"loss": 0.1342, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1409817511908289e-05, |
|
"loss": 0.1609, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1369100519369735e-05, |
|
"loss": 0.1078, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1328434915467068e-05, |
|
"loss": 0.1862, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1287820853510073e-05, |
|
"loss": 0.0874, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1247258486614169e-05, |
|
"loss": 0.1342, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.120674796769991e-05, |
|
"loss": 0.0909, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1166289449492401e-05, |
|
"loss": 0.1023, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1125883084520674e-05, |
|
"loss": 0.1757, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1085529025117161e-05, |
|
"loss": 0.1451, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1045227423417082e-05, |
|
"loss": 0.1336, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.100497843135792e-05, |
|
"loss": 0.1091, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.0964782200678795e-05, |
|
"loss": 0.1478, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.0924638882919925e-05, |
|
"loss": 0.1093, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.0884548629422042e-05, |
|
"loss": 0.1458, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.0844511591325823e-05, |
|
"loss": 0.1238, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.0804527919571341e-05, |
|
"loss": 0.0978, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.0764597764897455e-05, |
|
"loss": 0.1192, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.0724721277841276e-05, |
|
"loss": 0.0812, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.068489860873758e-05, |
|
"loss": 0.0981, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.0645129907718243e-05, |
|
"loss": 0.1202, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.0605415324711713e-05, |
|
"loss": 0.1237, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.0565755009442378e-05, |
|
"loss": 0.1078, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.0526149111430047e-05, |
|
"loss": 0.177, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.0486597779989379e-05, |
|
"loss": 0.1333, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.0447101164229304e-05, |
|
"loss": 0.1389, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.0407659413052501e-05, |
|
"loss": 0.1018, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.036827267515478e-05, |
|
"loss": 0.119, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.0328941099024566e-05, |
|
"loss": 0.1026, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.0289664832942306e-05, |
|
"loss": 0.113, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.025044402497996e-05, |
|
"loss": 0.118, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.0211278823000381e-05, |
|
"loss": 0.0616, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.0172169374656799e-05, |
|
"loss": 0.1393, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.0133115827392244e-05, |
|
"loss": 0.0941, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.0094118328439004e-05, |
|
"loss": 0.1501, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.0055177024818078e-05, |
|
"loss": 0.1502, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.0016292063338583e-05, |
|
"loss": 0.0988, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 9.977463590597242e-06, |
|
"loss": 0.1966, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 9.938691752977803e-06, |
|
"loss": 0.1601, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.899976696650504e-06, |
|
"loss": 0.0721, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.861318567571534e-06, |
|
"loss": 0.0712, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.82271751148244e-06, |
|
"loss": 0.1241, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.784173673909616e-06, |
|
"loss": 0.1512, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.745687200163736e-06, |
|
"loss": 0.1194, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.707258235339212e-06, |
|
"loss": 0.1073, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.668886924313668e-06, |
|
"loss": 0.1113, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.630573411747329e-06, |
|
"loss": 0.1102, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.592317842082563e-06, |
|
"loss": 0.1276, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.554120359543259e-06, |
|
"loss": 0.1438, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.51598110813435e-06, |
|
"loss": 0.1074, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.47790023164122e-06, |
|
"loss": 0.0751, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.43987787362915e-06, |
|
"loss": 0.1376, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.40191417744286e-06, |
|
"loss": 0.0956, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.364009286205874e-06, |
|
"loss": 0.1432, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.326163342820046e-06, |
|
"loss": 0.1, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.288376489964982e-06, |
|
"loss": 0.1296, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.250648870097518e-06, |
|
"loss": 0.0851, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.212980625451176e-06, |
|
"loss": 0.2346, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.175371898035636e-06, |
|
"loss": 0.1305, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.137822829636225e-06, |
|
"loss": 0.165, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.100333561813296e-06, |
|
"loss": 0.1135, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.062904235901808e-06, |
|
"loss": 0.1517, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.025534993010715e-06, |
|
"loss": 0.1258, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 8.988225974022446e-06, |
|
"loss": 0.2852, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 8.95097731959243e-06, |
|
"loss": 0.1114, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.913789170148454e-06, |
|
"loss": 0.1386, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.87666166589026e-06, |
|
"loss": 0.1488, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.839594946788915e-06, |
|
"loss": 0.2591, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.802589152586376e-06, |
|
"loss": 0.11, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.76564442279485e-06, |
|
"loss": 0.0713, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.728760896696362e-06, |
|
"loss": 0.1083, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.691938713342215e-06, |
|
"loss": 0.1353, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.655178011552414e-06, |
|
"loss": 0.0968, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.618478929915221e-06, |
|
"loss": 0.0819, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.581841606786531e-06, |
|
"loss": 0.1388, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.545266180289465e-06, |
|
"loss": 0.1, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.508752788313761e-06, |
|
"loss": 0.1425, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.472301568515287e-06, |
|
"loss": 0.1154, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.435912658315556e-06, |
|
"loss": 0.1349, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.39958619490111e-06, |
|
"loss": 0.1487, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.363322315223129e-06, |
|
"loss": 0.1043, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.327121155996814e-06, |
|
"loss": 0.1116, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.290982853700924e-06, |
|
"loss": 0.086, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.25490754457724e-06, |
|
"loss": 0.1255, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.218895364630052e-06, |
|
"loss": 0.1263, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.182946449625678e-06, |
|
"loss": 0.1045, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.147060935091894e-06, |
|
"loss": 0.153, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.111238956317496e-06, |
|
"loss": 0.0833, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.0754806483517e-06, |
|
"loss": 0.1391, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.039786146003705e-06, |
|
"loss": 0.1349, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 8.004155583842183e-06, |
|
"loss": 0.1256, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 7.968589096194715e-06, |
|
"loss": 0.1078, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 7.933086817147359e-06, |
|
"loss": 0.0909, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 7.897648880544056e-06, |
|
"loss": 0.0835, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 7.862275419986226e-06, |
|
"loss": 0.0972, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 7.826966568832184e-06, |
|
"loss": 0.0991, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 7.791722460196673e-06, |
|
"loss": 0.1872, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 7.756543226950357e-06, |
|
"loss": 0.1335, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.72142900171931e-06, |
|
"loss": 0.0709, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.686379916884557e-06, |
|
"loss": 0.0947, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.651396104581501e-06, |
|
"loss": 0.0985, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.616477696699498e-06, |
|
"loss": 0.1115, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.581624824881306e-06, |
|
"loss": 0.1039, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.5468376205226236e-06, |
|
"loss": 0.1254, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.512116214771589e-06, |
|
"loss": 0.1149, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.4774607385282695e-06, |
|
"loss": 0.1292, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.442871322444175e-06, |
|
"loss": 0.1437, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.408348096921772e-06, |
|
"loss": 0.1445, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.373891192113979e-06, |
|
"loss": 0.1095, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.339500737923713e-06, |
|
"loss": 0.1278, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.305176864003338e-06, |
|
"loss": 0.1386, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.2709196997542275e-06, |
|
"loss": 0.121, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.236729374326248e-06, |
|
"loss": 0.0748, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.2026060166173025e-06, |
|
"loss": 0.1057, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.168549755272805e-06, |
|
"loss": 0.1348, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.134560718685221e-06, |
|
"loss": 0.0988, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.100639034993583e-06, |
|
"loss": 0.1222, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.06678483208299e-06, |
|
"loss": 0.2182, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.032998237584163e-06, |
|
"loss": 0.0978, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 6.999279378872911e-06, |
|
"loss": 0.1155, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 6.965628383069695e-06, |
|
"loss": 0.1458, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 6.932045377039123e-06, |
|
"loss": 0.1299, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 6.898530487389476e-06, |
|
"loss": 0.0944, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 6.865083840472264e-06, |
|
"loss": 0.1597, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.831705562381691e-06, |
|
"loss": 0.1299, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.79839577895422e-06, |
|
"loss": 0.1078, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.765154615768088e-06, |
|
"loss": 0.0956, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.731982198142825e-06, |
|
"loss": 0.0884, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.698878651138826e-06, |
|
"loss": 0.1004, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.665844099556778e-06, |
|
"loss": 0.1263, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.632878667937318e-06, |
|
"loss": 0.1453, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.59998248056046e-06, |
|
"loss": 0.1626, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.56715566144519e-06, |
|
"loss": 0.11, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.534398334348968e-06, |
|
"loss": 0.1286, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.501710622767243e-06, |
|
"loss": 0.0673, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.469092649933056e-06, |
|
"loss": 0.1513, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.436544538816497e-06, |
|
"loss": 0.0717, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.404066412124307e-06, |
|
"loss": 0.1094, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.371658392299359e-06, |
|
"loss": 0.1099, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.339320601520235e-06, |
|
"loss": 0.1405, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.307053161700746e-06, |
|
"loss": 0.1109, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.274856194489478e-06, |
|
"loss": 0.1052, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.242729821269363e-06, |
|
"loss": 0.0979, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.210674163157137e-06, |
|
"loss": 0.1077, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.178689341002997e-06, |
|
"loss": 0.1181, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.146775475390051e-06, |
|
"loss": 0.0948, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.114932686633909e-06, |
|
"loss": 0.0884, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.083161094782242e-06, |
|
"loss": 0.1241, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.0514608196142605e-06, |
|
"loss": 0.1073, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 6.0198319806403585e-06, |
|
"loss": 0.0649, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 5.988274697101584e-06, |
|
"loss": 0.1385, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 5.956789087969241e-06, |
|
"loss": 0.1089, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 5.925375271944417e-06, |
|
"loss": 0.1236, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 5.894033367457505e-06, |
|
"loss": 0.1025, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 5.862763492667844e-06, |
|
"loss": 0.1173, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 5.831565765463176e-06, |
|
"loss": 0.1165, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 5.80044030345929e-06, |
|
"loss": 0.0823, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 5.769387223999473e-06, |
|
"loss": 0.1191, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 5.738406644154193e-06, |
|
"loss": 0.0739, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 5.707498680720555e-06, |
|
"loss": 0.1119, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 5.6766634502219e-06, |
|
"loss": 0.1317, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 5.645901068907402e-06, |
|
"loss": 0.1156, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 5.615211652751534e-06, |
|
"loss": 0.1069, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 5.584595317453742e-06, |
|
"loss": 0.1262, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 5.554052178437927e-06, |
|
"loss": 0.134, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 5.5235823508520496e-06, |
|
"loss": 0.1134, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 5.493185949567683e-06, |
|
"loss": 0.1353, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 5.462863089179576e-06, |
|
"loss": 0.1129, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 5.43261388400525e-06, |
|
"loss": 0.0818, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 5.4024384480845126e-06, |
|
"loss": 0.0989, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 5.372336895179103e-06, |
|
"loss": 0.11, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 5.34230933877217e-06, |
|
"loss": 0.1333, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 5.312355892067924e-06, |
|
"loss": 0.1065, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 5.282476667991193e-06, |
|
"loss": 0.1304, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 5.2526717791869486e-06, |
|
"loss": 0.116, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 5.222941338019966e-06, |
|
"loss": 0.1408, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 5.193285456574293e-06, |
|
"loss": 0.1197, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 5.163704246652939e-06, |
|
"loss": 0.0932, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 5.134197819777373e-06, |
|
"loss": 0.1074, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 5.1047662871871376e-06, |
|
"loss": 0.0862, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 5.075409759839425e-06, |
|
"loss": 0.1344, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 5.046128348408647e-06, |
|
"loss": 0.104, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 5.01692216328605e-06, |
|
"loss": 0.0985, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.987791314579254e-06, |
|
"loss": 0.1116, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.95873591211187e-06, |
|
"loss": 0.1118, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.929756065423074e-06, |
|
"loss": 0.1349, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.900851883767191e-06, |
|
"loss": 0.1698, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.872023476113302e-06, |
|
"loss": 0.0819, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.8432709511448046e-06, |
|
"loss": 0.0712, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.814594417259024e-06, |
|
"loss": 0.0786, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.785993982566802e-06, |
|
"loss": 0.1385, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.757469754892069e-06, |
|
"loss": 0.1181, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.729021841771483e-06, |
|
"loss": 0.1203, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.700650350453969e-06, |
|
"loss": 0.096, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.672355387900354e-06, |
|
"loss": 0.1278, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.644137060782938e-06, |
|
"loss": 0.1027, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.61599547548513e-06, |
|
"loss": 0.1045, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.5879307381009986e-06, |
|
"loss": 0.0892, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.559942954434901e-06, |
|
"loss": 0.0985, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.532032230001079e-06, |
|
"loss": 0.0946, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.504198670023255e-06, |
|
"loss": 0.1091, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.4764423794342566e-06, |
|
"loss": 0.092, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.4487634628755884e-06, |
|
"loss": 0.0996, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.421162024697062e-06, |
|
"loss": 0.0878, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.39363816895639e-06, |
|
"loss": 0.0931, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.3661919994188e-06, |
|
"loss": 0.2033, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.338823619556648e-06, |
|
"loss": 0.1361, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.311533132549017e-06, |
|
"loss": 0.1271, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.284320641281328e-06, |
|
"loss": 0.164, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.257186248344966e-06, |
|
"loss": 0.1327, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.2301300560368715e-06, |
|
"loss": 0.0989, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.203152166359192e-06, |
|
"loss": 0.1076, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.176252681018852e-06, |
|
"loss": 0.1741, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.149431701427195e-06, |
|
"loss": 0.1466, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.122689328699595e-06, |
|
"loss": 0.0996, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.09602566365509e-06, |
|
"loss": 0.0916, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.069440806815985e-06, |
|
"loss": 0.1181, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.042934858407454e-06, |
|
"loss": 0.114, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.016507918357218e-06, |
|
"loss": 0.1012, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.990160086295116e-06, |
|
"loss": 0.0982, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.963891461552762e-06, |
|
"loss": 0.1343, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.93770214316315e-06, |
|
"loss": 0.124, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.911592229860281e-06, |
|
"loss": 0.1191, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.885561820078809e-06, |
|
"loss": 0.1061, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.859611011953654e-06, |
|
"loss": 0.0841, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.833739903319644e-06, |
|
"loss": 0.0811, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.80794859171113e-06, |
|
"loss": 0.146, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.7822371743616305e-06, |
|
"loss": 0.1197, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.7566057482034622e-06, |
|
"loss": 0.1273, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.7310544098673684e-06, |
|
"loss": 0.1497, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.705583255682185e-06, |
|
"loss": 0.0806, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.6801923816744078e-06, |
|
"loss": 0.087, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.6548818835679203e-06, |
|
"loss": 0.1089, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.6296518567835614e-06, |
|
"loss": 0.0876, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.604502396438805e-06, |
|
"loss": 0.1278, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.5794335973473896e-06, |
|
"loss": 0.0956, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.5544455540189313e-06, |
|
"loss": 0.0795, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.5295383606586436e-06, |
|
"loss": 0.1412, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.5047121111668914e-06, |
|
"loss": 0.1463, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.4799668991389227e-06, |
|
"loss": 0.1356, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.4553028178644227e-06, |
|
"loss": 0.133, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.4307199603272554e-06, |
|
"loss": 0.0912, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.406218419205054e-06, |
|
"loss": 0.1031, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.381798286868881e-06, |
|
"loss": 0.1309, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.3574596553829134e-06, |
|
"loss": 0.1876, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.3332026165040247e-06, |
|
"loss": 0.061, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.309027261681527e-06, |
|
"loss": 0.1391, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.2849336820567623e-06, |
|
"loss": 0.0946, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.260921968462774e-06, |
|
"loss": 0.1705, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.236992211423995e-06, |
|
"loss": 0.1328, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.2131445011558403e-06, |
|
"loss": 0.0996, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.189378927564446e-06, |
|
"loss": 0.1193, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.165695580246278e-06, |
|
"loss": 0.0792, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.1420945484878004e-06, |
|
"loss": 0.1219, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.1185759212651542e-06, |
|
"loss": 0.0859, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.0951397872438144e-06, |
|
"loss": 0.1128, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.071786234778262e-06, |
|
"loss": 0.119, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.0485153519116264e-06, |
|
"loss": 0.1078, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.025327226375402e-06, |
|
"loss": 0.125, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.0022219455890455e-06, |
|
"loss": 0.0977, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.9791995966597264e-06, |
|
"loss": 0.1145, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.956260266381941e-06, |
|
"loss": 0.1194, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.9334040412372034e-06, |
|
"loss": 0.1032, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.910631007393741e-06, |
|
"loss": 0.1075, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.887941250706108e-06, |
|
"loss": 0.086, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.865334856714949e-06, |
|
"loss": 0.096, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.8428119106466033e-06, |
|
"loss": 0.0993, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.8203724974128153e-06, |
|
"loss": 0.1105, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.7980167016104148e-06, |
|
"loss": 0.0958, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.7757446075209804e-06, |
|
"loss": 0.0537, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.7535562991105545e-06, |
|
"loss": 0.1117, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.731451860029291e-06, |
|
"loss": 0.0974, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.7094313736111627e-06, |
|
"loss": 0.11, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.6874949228736345e-06, |
|
"loss": 0.0977, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.6656425905173534e-06, |
|
"loss": 0.0965, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.6438744589258575e-06, |
|
"loss": 0.2249, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.622190610165226e-06, |
|
"loss": 0.0798, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.6005911259838004e-06, |
|
"loss": 0.0977, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.57907608781186e-06, |
|
"loss": 0.0972, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.5576455767613416e-06, |
|
"loss": 0.0788, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.536299673625489e-06, |
|
"loss": 0.0978, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.5150384588785854e-06, |
|
"loss": 0.0989, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.4938620126756377e-06, |
|
"loss": 0.0892, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.472770414852066e-06, |
|
"loss": 0.1043, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.4517637449234325e-06, |
|
"loss": 0.128, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.430842082085097e-06, |
|
"loss": 0.0625, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.4100055052119515e-06, |
|
"loss": 0.0961, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.3892540928581118e-06, |
|
"loss": 0.0753, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.3685879232566184e-06, |
|
"loss": 0.0984, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.348007074319164e-06, |
|
"loss": 0.1287, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.327511623635756e-06, |
|
"loss": 0.1022, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.3071016484744663e-06, |
|
"loss": 0.0877, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.286777225781117e-06, |
|
"loss": 0.0959, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.266538432178991e-06, |
|
"loss": 0.1011, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.2463853439685683e-06, |
|
"loss": 0.1171, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.226318037127198e-06, |
|
"loss": 0.1105, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.206336587308841e-06, |
|
"loss": 0.1006, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1864410698437724e-06, |
|
"loss": 0.1348, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1666315597383107e-06, |
|
"loss": 0.1102, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.146908131674516e-06, |
|
"loss": 0.1541, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1272708600099223e-06, |
|
"loss": 0.1348, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.107719818777254e-06, |
|
"loss": 0.0756, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0882550816841375e-06, |
|
"loss": 0.0993, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0688767221128475e-06, |
|
"loss": 0.1071, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0495848131200005e-06, |
|
"loss": 0.1125, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.030379427436299e-06, |
|
"loss": 0.1436, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.011260637466253e-06, |
|
"loss": 0.1354, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.9922285152879e-06, |
|
"loss": 0.108, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.9732831326525453e-06, |
|
"loss": 0.1493, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.9544245609844857e-06, |
|
"loss": 0.1273, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.935652871380733e-06, |
|
"loss": 0.1096, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.916968134610758e-06, |
|
"loss": 0.0969, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8983704211162107e-06, |
|
"loss": 0.0998, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8798598010106861e-06, |
|
"loss": 0.1205, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8614363440793954e-06, |
|
"loss": 0.106, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8431001197789816e-06, |
|
"loss": 0.1222, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8248511972372012e-06, |
|
"loss": 0.1139, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8066896452526877e-06, |
|
"loss": 0.124, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.7886155322946907e-06, |
|
"loss": 0.1522, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.7706289265027932e-06, |
|
"loss": 0.1053, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.7527298956867e-06, |
|
"loss": 0.1356, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.7349185073259444e-06, |
|
"loss": 0.1165, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.7171948285696544e-06, |
|
"loss": 0.1294, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.699558926236286e-06, |
|
"loss": 0.1029, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.6820108668133771e-06, |
|
"loss": 0.1121, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.664550716457297e-06, |
|
"loss": 0.0673, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.647178540992994e-06, |
|
"loss": 0.1169, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.6298944059137677e-06, |
|
"loss": 0.1314, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.612698376380975e-06, |
|
"loss": 0.088, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.595590517223844e-06, |
|
"loss": 0.1354, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.5785708929391741e-06, |
|
"loss": 0.1067, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.5616395676911339e-06, |
|
"loss": 0.0763, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.544796605311008e-06, |
|
"loss": 0.1398, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.5280420692969283e-06, |
|
"loss": 0.1444, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.5113760228136847e-06, |
|
"loss": 0.1295, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.4947985286924454e-06, |
|
"loss": 0.0716, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.4783096494305342e-06, |
|
"loss": 0.1242, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.4619094471912177e-06, |
|
"loss": 0.0726, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.445597983803415e-06, |
|
"loss": 0.1215, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.429375320761525e-06, |
|
"loss": 0.1018, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.413241519225153e-06, |
|
"loss": 0.1488, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.3971966400189162e-06, |
|
"loss": 0.098, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.3812407436321645e-06, |
|
"loss": 0.1123, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.3653738902188073e-06, |
|
"loss": 0.093, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.3495961395970452e-06, |
|
"loss": 0.1102, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.3339075512491639e-06, |
|
"loss": 0.0882, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.318308184321318e-06, |
|
"loss": 0.107, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.3027980976232702e-06, |
|
"loss": 0.0766, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.2873773496282298e-06, |
|
"loss": 0.1422, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.2720459984725707e-06, |
|
"loss": 0.1008, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.2568041019556498e-06, |
|
"loss": 0.1396, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.2416517175395826e-06, |
|
"loss": 0.0963, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.2265889023490157e-06, |
|
"loss": 0.0879, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.2116157131709244e-06, |
|
"loss": 0.1432, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.1967322064543928e-06, |
|
"loss": 0.0763, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.1819384383103981e-06, |
|
"loss": 0.105, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.1672344645116019e-06, |
|
"loss": 0.0904, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.1526203404921387e-06, |
|
"loss": 0.129, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.1380961213474177e-06, |
|
"loss": 0.0667, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.1236618618338967e-06, |
|
"loss": 0.1257, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.1093176163688905e-06, |
|
"loss": 0.1067, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.09506343903035e-06, |
|
"loss": 0.0863, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.0808993835566828e-06, |
|
"loss": 0.1366, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.0668255033465247e-06, |
|
"loss": 0.1116, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.0528418514585525e-06, |
|
"loss": 0.115, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.0389484806112876e-06, |
|
"loss": 0.1097, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.025145443182876e-06, |
|
"loss": 0.1206, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.0114327912109256e-06, |
|
"loss": 0.1492, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.978105763922773e-07, |
|
"loss": 0.1415, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.84278850082823e-07, |
|
"loss": 0.0782, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.708376632973164e-07, |
|
"loss": 0.0994, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.574870667091701e-07, |
|
"loss": 0.1182, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.442271106502837e-07, |
|
"loss": 0.093, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.310578451108304e-07, |
|
"loss": 0.0951, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.179793197390763e-07, |
|
"loss": 0.1345, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.049915838412027e-07, |
|
"loss": 0.1178, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 8.920946863811119e-07, |
|
"loss": 0.1118, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.792886759802471e-07, |
|
"loss": 0.1547, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.66573600917403e-07, |
|
"loss": 0.151, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.539495091285459e-07, |
|
"loss": 0.1292, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.414164482066333e-07, |
|
"loss": 0.1194, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.28974465401447e-07, |
|
"loss": 0.0824, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.16623607619385e-07, |
|
"loss": 0.1535, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.043639214233207e-07, |
|
"loss": 0.1059, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.921954530323933e-07, |
|
"loss": 0.1577, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.801182483218595e-07, |
|
"loss": 0.0878, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.681323528229145e-07, |
|
"loss": 0.1004, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.5623781172251e-07, |
|
"loss": 0.0641, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.444346698631894e-07, |
|
"loss": 0.1381, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.327229717429245e-07, |
|
"loss": 0.1161, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.211027615149385e-07, |
|
"loss": 0.1388, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.095740829875547e-07, |
|
"loss": 0.118, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.981369796240089e-07, |
|
"loss": 0.1204, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.867914945423049e-07, |
|
"loss": 0.1001, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.755376705150363e-07, |
|
"loss": 0.1359, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.643755499692428e-07, |
|
"loss": 0.1169, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.533051749862407e-07, |
|
"loss": 0.0915, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.423265873014589e-07, |
|
"loss": 0.1115, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.314398283042893e-07, |
|
"loss": 0.1241, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.20644939037926e-07, |
|
"loss": 0.115, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.099419601992201e-07, |
|
"loss": 0.1246, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 5.993309321385171e-07, |
|
"loss": 0.0975, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 5.888118948595006e-07, |
|
"loss": 0.1134, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 5.783848880190567e-07, |
|
"loss": 0.0923, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 5.680499509271076e-07, |
|
"loss": 0.1309, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 5.578071225464803e-07, |
|
"loss": 0.113, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 5.47656441492747e-07, |
|
"loss": 0.14, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 5.375979460340824e-07, |
|
"loss": 0.093, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 5.276316740911197e-07, |
|
"loss": 0.1098, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 5.177576632368092e-07, |
|
"loss": 0.1014, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 5.079759506962795e-07, |
|
"loss": 0.1092, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.982865733466874e-07, |
|
"loss": 0.1228, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.886895677170933e-07, |
|
"loss": 0.0758, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.791849699883083e-07, |
|
"loss": 0.0999, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.697728159927639e-07, |
|
"loss": 0.0957, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.60453141214387e-07, |
|
"loss": 0.0792, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.512259807884417e-07, |
|
"loss": 0.0915, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.420913695014295e-07, |
|
"loss": 0.1734, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.330493417909254e-07, |
|
"loss": 0.1091, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.240999317454725e-07, |
|
"loss": 0.1025, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.152431731044487e-07, |
|
"loss": 0.15, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.0647909925791426e-07, |
|
"loss": 0.1162, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.97807743246531e-07, |
|
"loss": 0.1279, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.892291377613988e-07, |
|
"loss": 0.1216, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.8074331514395824e-07, |
|
"loss": 0.1044, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.7235030738584084e-07, |
|
"loss": 0.0641, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.6405014612878044e-07, |
|
"loss": 0.0878, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.558428626644739e-07, |
|
"loss": 0.1061, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.47728487934465e-07, |
|
"loss": 0.0992, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.3970705253003607e-07, |
|
"loss": 0.1816, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.317785866920775e-07, |
|
"loss": 0.1118, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.239431203109933e-07, |
|
"loss": 0.1187, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.1620068292657625e-07, |
|
"loss": 0.081, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.08551303727897e-07, |
|
"loss": 0.1085, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.009950115531984e-07, |
|
"loss": 0.0814, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.9353183488977364e-07, |
|
"loss": 0.1123, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.861618018738882e-07, |
|
"loss": 0.0972, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.788849402906413e-07, |
|
"loss": 0.0961, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.71701277573877e-07, |
|
"loss": 0.1137, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.6461084080608157e-07, |
|
"loss": 0.128, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.576136567182752e-07, |
|
"loss": 0.104, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.507097516899176e-07, |
|
"loss": 0.1349, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.4389915174880264e-07, |
|
"loss": 0.1089, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.371818825709693e-07, |
|
"loss": 0.1062, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.3055796948058816e-07, |
|
"loss": 0.0802, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.240274374498863e-07, |
|
"loss": 0.1262, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.175903110990335e-07, |
|
"loss": 0.0956, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.112466146960701e-07, |
|
"loss": 0.1017, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.04996372156796e-07, |
|
"loss": 0.0907, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.9883960704469018e-07, |
|
"loss": 0.2302, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.9277634257082734e-07, |
|
"loss": 0.1546, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8680660159377806e-07, |
|
"loss": 0.0741, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8093040661952555e-07, |
|
"loss": 0.0831, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7514777980139062e-07, |
|
"loss": 0.3893, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.694587429399347e-07, |
|
"loss": 0.1135, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.6386331748289018e-07, |
|
"loss": 0.072, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.5836152452506913e-07, |
|
"loss": 0.136, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.529533848082909e-07, |
|
"loss": 0.1248, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.47638918721299e-07, |
|
"loss": 0.2179, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.4241814629968597e-07, |
|
"loss": 0.079, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.3729108722581873e-07, |
|
"loss": 0.1404, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.3225776082876895e-07, |
|
"loss": 0.1148, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.2731818608422707e-07, |
|
"loss": 0.2161, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.224723816144385e-07, |
|
"loss": 0.1393, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.1772036568814537e-07, |
|
"loss": 0.1139, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.1306215622049477e-07, |
|
"loss": 0.1484, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.0849777077298617e-07, |
|
"loss": 0.1434, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.0402722655339924e-07, |
|
"loss": 0.1226, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 9.965054041573552e-08, |
|
"loss": 0.1252, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 9.536772886014899e-08, |
|
"loss": 0.0951, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 9.117880803288237e-08, |
|
"loss": 0.1476, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 8.708379372621144e-08, |
|
"loss": 0.1062, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 8.308270137838692e-08, |
|
"loss": 0.092, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 7.917554607356492e-08, |
|
"loss": 0.1016, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 7.536234254176544e-08, |
|
"loss": 0.0641, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 7.164310515880846e-08, |
|
"loss": 0.1292, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.801784794625287e-08, |
|
"loss": 0.0773, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.448658457136326e-08, |
|
"loss": 0.0808, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.10493283470348e-08, |
|
"loss": 0.0779, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.7706092231760156e-08, |
|
"loss": 0.0776, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.445688882957933e-08, |
|
"loss": 0.1363, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.1301730390018734e-08, |
|
"loss": 0.0925, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.8240628808063346e-08, |
|
"loss": 0.0877, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.527359562409572e-08, |
|
"loss": 0.0768, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.24006420238654e-08, |
|
"loss": 0.0894, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.962177883843343e-08, |
|
"loss": 0.1266, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.6937016544147386e-08, |
|
"loss": 0.1209, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.434636526258861e-08, |
|
"loss": 0.1436, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.184983476054171e-08, |
|
"loss": 0.0835, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.944743444995013e-08, |
|
"loss": 0.1548, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.7139173387891182e-08, |
|
"loss": 0.1046, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.4925060276528857e-08, |
|
"loss": 0.1075, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.2805103463099942e-08, |
|
"loss": 0.1032, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.077931093985852e-08, |
|
"loss": 0.1386, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.8847690344067637e-08, |
|
"loss": 0.0772, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7010248957954887e-08, |
|
"loss": 0.1207, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.5266993708695776e-08, |
|
"loss": 0.1153, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.3617931168380394e-08, |
|
"loss": 0.1396, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.2063067553991225e-08, |
|
"loss": 0.101, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.0602408727383717e-08, |
|
"loss": 0.1365, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 9.235960195252969e-09, |
|
"loss": 0.0849, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 7.963727109125407e-09, |
|
"loss": 0.1093, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.785714265333809e-09, |
|
"loss": 0.1422, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.7019261050006434e-09, |
|
"loss": 0.1219, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.712366714018646e-09, |
|
"loss": 0.1076, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.817039823034163e-09, |
|
"loss": 0.0883, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.0159488074416044e-09, |
|
"loss": 0.1105, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.3090966873640096e-09, |
|
"loss": 0.0981, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6964861276363985e-09, |
|
"loss": 0.0843, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.1781194378113203e-09, |
|
"loss": 0.1171, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 7.539985721366494e-10, |
|
"loss": 0.0812, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.2412512955281037e-10, |
|
"loss": 0.1296, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.885003536816754e-10, |
|
"loss": 0.0793, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.712513283489095e-11, |
|
"loss": 0.1052, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.1093, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.24669183790683746, |
|
"eval_runtime": 119.2309, |
|
"eval_samples_per_second": 2.516, |
|
"eval_steps_per_second": 0.629, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1686, |
|
"total_flos": 8.934629359151481e+18, |
|
"train_loss": 0.20717893251850936, |
|
"train_runtime": 104058.0629, |
|
"train_samples_per_second": 0.519, |
|
"train_steps_per_second": 0.016 |
|
} |
|
], |
|
"max_steps": 1686, |
|
"num_train_epochs": 2, |
|
"total_flos": 8.934629359151481e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|