|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 11.076923076923077, |
|
"eval_steps": 500, |
|
"global_step": 108, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.10256410256410256, |
|
"grad_norm": 0.1054198294878006, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 0.2336, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.20512820512820512, |
|
"grad_norm": 0.10983864963054657, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.2488, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 0.10180113464593887, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.2371, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.41025641025641024, |
|
"grad_norm": 0.10248314589262009, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.2386, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.5128205128205128, |
|
"grad_norm": 0.10735535621643066, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.2428, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 0.10470724105834961, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.2283, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.717948717948718, |
|
"grad_norm": 0.09391864389181137, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 0.2275, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.8205128205128205, |
|
"grad_norm": 0.11098546534776688, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.2395, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 0.09393182396888733, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2279, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 1.0256410256410255, |
|
"grad_norm": 0.08448313176631927, |
|
"learning_rate": 5.555555555555556e-05, |
|
"loss": 0.2339, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 1.1282051282051282, |
|
"grad_norm": 0.06229618564248085, |
|
"learning_rate": 6.111111111111112e-05, |
|
"loss": 0.2203, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 1.2307692307692308, |
|
"grad_norm": 0.06508181244134903, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.2163, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.061453677713871, |
|
"learning_rate": 7.222222222222222e-05, |
|
"loss": 0.2244, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 1.435897435897436, |
|
"grad_norm": 0.05588587746024132, |
|
"learning_rate": 7.777777777777778e-05, |
|
"loss": 0.2196, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 0.05503275617957115, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 0.2193, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 1.641025641025641, |
|
"grad_norm": 0.07127121835947037, |
|
"learning_rate": 8.888888888888889e-05, |
|
"loss": 0.2086, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 1.7435897435897436, |
|
"grad_norm": 0.06692970544099808, |
|
"learning_rate": 9.444444444444444e-05, |
|
"loss": 0.2168, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 1.8461538461538463, |
|
"grad_norm": 0.056347738951444626, |
|
"learning_rate": 0.0001, |
|
"loss": 0.2117, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 1.9487179487179487, |
|
"grad_norm": 0.05091376230120659, |
|
"learning_rate": 9.999059852242507e-05, |
|
"loss": 0.2096, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 2.051282051282051, |
|
"grad_norm": 0.049117326736450195, |
|
"learning_rate": 9.996239762521151e-05, |
|
"loss": 0.2066, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 2.1538461538461537, |
|
"grad_norm": 0.04978122562170029, |
|
"learning_rate": 9.991540791356342e-05, |
|
"loss": 0.2087, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 2.2564102564102564, |
|
"grad_norm": 0.045931246131658554, |
|
"learning_rate": 9.98496470583896e-05, |
|
"loss": 0.2027, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 2.358974358974359, |
|
"grad_norm": 0.05703091621398926, |
|
"learning_rate": 9.976513978965829e-05, |
|
"loss": 0.2146, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 2.4615384615384617, |
|
"grad_norm": 0.042917944490909576, |
|
"learning_rate": 9.966191788709716e-05, |
|
"loss": 0.2067, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 2.564102564102564, |
|
"grad_norm": 0.04547038674354553, |
|
"learning_rate": 9.954002016824227e-05, |
|
"loss": 0.2069, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.046476855874061584, |
|
"learning_rate": 9.939949247384046e-05, |
|
"loss": 0.2058, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 2.769230769230769, |
|
"grad_norm": 0.05241086706519127, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 0.2084, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 2.871794871794872, |
|
"grad_norm": 0.04542744159698486, |
|
"learning_rate": 9.906276553136923e-05, |
|
"loss": 0.2136, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 2.9743589743589745, |
|
"grad_norm": 0.040408626198768616, |
|
"learning_rate": 9.88666929125318e-05, |
|
"loss": 0.1986, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 3.076923076923077, |
|
"grad_norm": 0.03904923424124718, |
|
"learning_rate": 9.865224352899119e-05, |
|
"loss": 0.2128, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 3.1794871794871793, |
|
"grad_norm": 0.04009323939681053, |
|
"learning_rate": 9.84194980263903e-05, |
|
"loss": 0.1921, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 3.282051282051282, |
|
"grad_norm": 0.03925410285592079, |
|
"learning_rate": 9.816854393079403e-05, |
|
"loss": 0.2041, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 3.3846153846153846, |
|
"grad_norm": 0.043651480227708817, |
|
"learning_rate": 9.789947561577445e-05, |
|
"loss": 0.2046, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 3.4871794871794872, |
|
"grad_norm": 0.03763292357325554, |
|
"learning_rate": 9.761239426692077e-05, |
|
"loss": 0.2064, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 3.58974358974359, |
|
"grad_norm": 0.03577704727649689, |
|
"learning_rate": 9.730740784378753e-05, |
|
"loss": 0.2108, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 3.6923076923076925, |
|
"grad_norm": 0.03381442651152611, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 0.1996, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 3.7948717948717947, |
|
"grad_norm": 0.05113065987825394, |
|
"learning_rate": 9.664418523660004e-05, |
|
"loss": 0.1986, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 3.8974358974358974, |
|
"grad_norm": 0.04517243430018425, |
|
"learning_rate": 9.628619846344454e-05, |
|
"loss": 0.2123, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.04306492581963539, |
|
"learning_rate": 9.591080534401371e-05, |
|
"loss": 0.1971, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 4.102564102564102, |
|
"grad_norm": 0.030920082703232765, |
|
"learning_rate": 9.551814704830734e-05, |
|
"loss": 0.1962, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 4.205128205128205, |
|
"grad_norm": 0.040115438401699066, |
|
"learning_rate": 9.51083712390519e-05, |
|
"loss": 0.2028, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 4.3076923076923075, |
|
"grad_norm": 0.03652967885136604, |
|
"learning_rate": 9.468163201617062e-05, |
|
"loss": 0.202, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 4.410256410256411, |
|
"grad_norm": 0.043810825794935226, |
|
"learning_rate": 9.423808985883289e-05, |
|
"loss": 0.2064, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 4.512820512820513, |
|
"grad_norm": 0.04564822092652321, |
|
"learning_rate": 9.377791156510455e-05, |
|
"loss": 0.2057, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 4.615384615384615, |
|
"grad_norm": 0.030029835179448128, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.1988, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 4.717948717948718, |
|
"grad_norm": 0.03449944779276848, |
|
"learning_rate": 9.280834497651334e-05, |
|
"loss": 0.1961, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 4.82051282051282, |
|
"grad_norm": 0.038930896669626236, |
|
"learning_rate": 9.229932129599205e-05, |
|
"loss": 0.2059, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 4.923076923076923, |
|
"grad_norm": 0.050978414714336395, |
|
"learning_rate": 9.177439057064683e-05, |
|
"loss": 0.1936, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 5.0256410256410255, |
|
"grad_norm": 0.03453517332673073, |
|
"learning_rate": 9.123375020545535e-05, |
|
"loss": 0.2019, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 5.128205128205128, |
|
"grad_norm": 0.03941568359732628, |
|
"learning_rate": 9.067760351314838e-05, |
|
"loss": 0.1984, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 5.230769230769231, |
|
"grad_norm": 0.049096424132585526, |
|
"learning_rate": 9.01061596377522e-05, |
|
"loss": 0.1989, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 5.333333333333333, |
|
"grad_norm": 0.04188678413629532, |
|
"learning_rate": 8.951963347593797e-05, |
|
"loss": 0.1948, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 5.435897435897436, |
|
"grad_norm": 0.038111768662929535, |
|
"learning_rate": 8.891824559620801e-05, |
|
"loss": 0.1974, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 5.538461538461538, |
|
"grad_norm": 0.042120371013879776, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.2026, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 5.641025641025641, |
|
"grad_norm": 0.04244457557797432, |
|
"learning_rate": 8.767179481638303e-05, |
|
"loss": 0.1998, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 5.743589743589744, |
|
"grad_norm": 0.04076429829001427, |
|
"learning_rate": 8.702720065545024e-05, |
|
"loss": 0.2059, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 5.846153846153846, |
|
"grad_norm": 0.05762708932161331, |
|
"learning_rate": 8.636868207865244e-05, |
|
"loss": 0.1871, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 5.948717948717949, |
|
"grad_norm": 0.05226736143231392, |
|
"learning_rate": 8.569648672789497e-05, |
|
"loss": 0.1962, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 6.051282051282051, |
|
"grad_norm": 0.050000473856925964, |
|
"learning_rate": 8.501086738835843e-05, |
|
"loss": 0.197, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 6.153846153846154, |
|
"grad_norm": 0.04632322117686272, |
|
"learning_rate": 8.43120818934367e-05, |
|
"loss": 0.1973, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 6.256410256410256, |
|
"grad_norm": 0.05035199597477913, |
|
"learning_rate": 8.360039302777612e-05, |
|
"loss": 0.198, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 6.358974358974359, |
|
"grad_norm": 0.047098372131586075, |
|
"learning_rate": 8.28760684284532e-05, |
|
"loss": 0.1955, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 6.461538461538462, |
|
"grad_norm": 0.06358848512172699, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.2009, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 6.564102564102564, |
|
"grad_norm": 0.047331102192401886, |
|
"learning_rate": 8.139060623360493e-05, |
|
"loss": 0.2012, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 0.05147461220622063, |
|
"learning_rate": 8.063002725966015e-05, |
|
"loss": 0.1969, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 6.769230769230769, |
|
"grad_norm": 0.05776860937476158, |
|
"learning_rate": 7.985792958513931e-05, |
|
"loss": 0.1934, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 6.871794871794872, |
|
"grad_norm": 0.062228187918663025, |
|
"learning_rate": 7.907460356440133e-05, |
|
"loss": 0.1949, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 6.9743589743589745, |
|
"grad_norm": 0.0530543327331543, |
|
"learning_rate": 7.828034377432693e-05, |
|
"loss": 0.1921, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 7.076923076923077, |
|
"grad_norm": 0.05476854741573334, |
|
"learning_rate": 7.74754489035403e-05, |
|
"loss": 0.2001, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 7.17948717948718, |
|
"grad_norm": 0.05120917409658432, |
|
"learning_rate": 7.666022164008457e-05, |
|
"loss": 0.1977, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 7.282051282051282, |
|
"grad_norm": 0.051713503897190094, |
|
"learning_rate": 7.583496855759316e-05, |
|
"loss": 0.2007, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 7.384615384615385, |
|
"grad_norm": 0.05411737784743309, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.1955, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 7.487179487179487, |
|
"grad_norm": 0.058174680918455124, |
|
"learning_rate": 7.415562996483192e-05, |
|
"loss": 0.1943, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 7.589743589743589, |
|
"grad_norm": 0.05788983032107353, |
|
"learning_rate": 7.330217598512695e-05, |
|
"loss": 0.1902, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 7.6923076923076925, |
|
"grad_norm": 0.06303142011165619, |
|
"learning_rate": 7.243995901002312e-05, |
|
"loss": 0.1897, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 7.794871794871795, |
|
"grad_norm": 0.06139795109629631, |
|
"learning_rate": 7.156930328406268e-05, |
|
"loss": 0.1903, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 7.897435897435898, |
|
"grad_norm": 0.06393580138683319, |
|
"learning_rate": 7.069053622525696e-05, |
|
"loss": 0.1834, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 0.07243426889181137, |
|
"learning_rate": 6.980398830195785e-05, |
|
"loss": 0.1976, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 8.102564102564102, |
|
"grad_norm": 0.06290671974420547, |
|
"learning_rate": 6.890999290858214e-05, |
|
"loss": 0.1896, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 8.205128205128204, |
|
"grad_norm": 0.05926685035228729, |
|
"learning_rate": 6.800888624023553e-05, |
|
"loss": 0.1914, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 8.307692307692308, |
|
"grad_norm": 0.06375055015087128, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 0.1923, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 8.41025641025641, |
|
"grad_norm": 0.058604903519153595, |
|
"learning_rate": 6.618669710291606e-05, |
|
"loss": 0.1878, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 8.512820512820513, |
|
"grad_norm": 0.06643830239772797, |
|
"learning_rate": 6.526629988475567e-05, |
|
"loss": 0.1924, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 8.615384615384615, |
|
"grad_norm": 0.06960766762495041, |
|
"learning_rate": 6.434016163555452e-05, |
|
"loss": 0.193, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 8.717948717948717, |
|
"grad_norm": 0.07951146364212036, |
|
"learning_rate": 6.340863063803188e-05, |
|
"loss": 0.1894, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 8.820512820512821, |
|
"grad_norm": 0.08499961346387863, |
|
"learning_rate": 6.247205720289907e-05, |
|
"loss": 0.1946, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 8.923076923076923, |
|
"grad_norm": 0.080729179084301, |
|
"learning_rate": 6.153079353712201e-05, |
|
"loss": 0.1897, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 9.025641025641026, |
|
"grad_norm": 0.08111605793237686, |
|
"learning_rate": 6.058519361147055e-05, |
|
"loss": 0.1952, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 9.128205128205128, |
|
"grad_norm": 0.08702465891838074, |
|
"learning_rate": 5.963561302740449e-05, |
|
"loss": 0.186, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 9.23076923076923, |
|
"grad_norm": 0.08024930208921432, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 0.1899, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 9.333333333333334, |
|
"grad_norm": 0.07547761499881744, |
|
"learning_rate": 5.772593964039203e-05, |
|
"loss": 0.1891, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 9.435897435897436, |
|
"grad_norm": 0.0838337242603302, |
|
"learning_rate": 5.6766564987506566e-05, |
|
"loss": 0.1904, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 9.538461538461538, |
|
"grad_norm": 0.08516307175159454, |
|
"learning_rate": 5.5804645706261514e-05, |
|
"loss": 0.1895, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 9.64102564102564, |
|
"grad_norm": 0.08381783217191696, |
|
"learning_rate": 5.484054353515896e-05, |
|
"loss": 0.1878, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 9.743589743589745, |
|
"grad_norm": 0.08215490728616714, |
|
"learning_rate": 5.387462103359655e-05, |
|
"loss": 0.189, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 9.846153846153847, |
|
"grad_norm": 0.08765217661857605, |
|
"learning_rate": 5.290724144552379e-05, |
|
"loss": 0.1898, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 9.948717948717949, |
|
"grad_norm": 0.08872053772211075, |
|
"learning_rate": 5.193876856284085e-05, |
|
"loss": 0.1908, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 10.051282051282051, |
|
"grad_norm": 0.09559184312820435, |
|
"learning_rate": 5.096956658859122e-05, |
|
"loss": 0.1854, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 10.153846153846153, |
|
"grad_norm": 0.08504116535186768, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1873, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 10.256410256410255, |
|
"grad_norm": 0.10478716343641281, |
|
"learning_rate": 4.903043341140879e-05, |
|
"loss": 0.1835, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 10.35897435897436, |
|
"grad_norm": 0.09934550523757935, |
|
"learning_rate": 4.806123143715916e-05, |
|
"loss": 0.1845, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 10.461538461538462, |
|
"grad_norm": 0.1022539958357811, |
|
"learning_rate": 4.709275855447621e-05, |
|
"loss": 0.189, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 10.564102564102564, |
|
"grad_norm": 0.10897883027791977, |
|
"learning_rate": 4.612537896640346e-05, |
|
"loss": 0.1879, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 10.666666666666666, |
|
"grad_norm": 0.11043286323547363, |
|
"learning_rate": 4.515945646484105e-05, |
|
"loss": 0.1902, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 10.76923076923077, |
|
"grad_norm": 0.11388922482728958, |
|
"learning_rate": 4.4195354293738484e-05, |
|
"loss": 0.1816, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 10.871794871794872, |
|
"grad_norm": 0.12030429393053055, |
|
"learning_rate": 4.323343501249346e-05, |
|
"loss": 0.1902, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 10.974358974358974, |
|
"grad_norm": 0.1329633891582489, |
|
"learning_rate": 4.227406035960798e-05, |
|
"loss": 0.1886, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 11.076923076923077, |
|
"grad_norm": 0.11949360370635986, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.1808, |
|
"step": 108 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 180, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 18, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.346660942657618e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|