|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 126, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.015873015873015872, |
|
"grad_norm": 17.046055731691766, |
|
"learning_rate": 9.998445910004082e-06, |
|
"loss": 1.0473, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.031746031746031744, |
|
"grad_norm": 6.368244201268006, |
|
"learning_rate": 9.993784606094612e-06, |
|
"loss": 0.6073, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.047619047619047616, |
|
"grad_norm": 3.937504515409542, |
|
"learning_rate": 9.986018985905901e-06, |
|
"loss": 0.4184, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06349206349206349, |
|
"grad_norm": 3.1167770147122558, |
|
"learning_rate": 9.975153876827008e-06, |
|
"loss": 0.3656, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.07936507936507936, |
|
"grad_norm": 3.5451663774846858, |
|
"learning_rate": 9.961196033000862e-06, |
|
"loss": 0.3752, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.09523809523809523, |
|
"grad_norm": 1.8934015097083565, |
|
"learning_rate": 9.944154131125643e-06, |
|
"loss": 0.2923, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 1.9773484964869892, |
|
"learning_rate": 9.924038765061042e-06, |
|
"loss": 0.2976, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.12698412698412698, |
|
"grad_norm": 1.8499922558582569, |
|
"learning_rate": 9.900862439242719e-06, |
|
"loss": 0.2858, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 2.1048532676599176, |
|
"learning_rate": 9.874639560909118e-06, |
|
"loss": 0.275, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.15873015873015872, |
|
"grad_norm": 1.5720262400150389, |
|
"learning_rate": 9.84538643114539e-06, |
|
"loss": 0.2756, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1746031746031746, |
|
"grad_norm": 1.684255407558622, |
|
"learning_rate": 9.81312123475006e-06, |
|
"loss": 0.2647, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.19047619047619047, |
|
"grad_norm": 1.635574387616813, |
|
"learning_rate": 9.777864028930705e-06, |
|
"loss": 0.2546, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.20634920634920634, |
|
"grad_norm": 1.422170916071452, |
|
"learning_rate": 9.73963673083566e-06, |
|
"loss": 0.2203, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 1.1575210545990935, |
|
"learning_rate": 9.698463103929542e-06, |
|
"loss": 0.197, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.23809523809523808, |
|
"grad_norm": 1.3304992288104052, |
|
"learning_rate": 9.654368743221022e-06, |
|
"loss": 0.206, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.25396825396825395, |
|
"grad_norm": 1.2029627869249027, |
|
"learning_rate": 9.60738105935204e-06, |
|
"loss": 0.2064, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2698412698412698, |
|
"grad_norm": 1.2168950922943917, |
|
"learning_rate": 9.557529261558367e-06, |
|
"loss": 0.2123, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 1.2061804273710595, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.2234, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.30158730158730157, |
|
"grad_norm": 1.3455088304193212, |
|
"learning_rate": 9.449359044057344e-06, |
|
"loss": 0.2256, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.31746031746031744, |
|
"grad_norm": 1.2713281285022595, |
|
"learning_rate": 9.391107866851143e-06, |
|
"loss": 0.2158, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 1.0727037071509673, |
|
"learning_rate": 9.330127018922195e-06, |
|
"loss": 0.1969, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.3492063492063492, |
|
"grad_norm": 1.3540864350754735, |
|
"learning_rate": 9.266454408160779e-06, |
|
"loss": 0.2598, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.36507936507936506, |
|
"grad_norm": 1.2847824839788686, |
|
"learning_rate": 9.200129615753858e-06, |
|
"loss": 0.2389, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 1.122678154329176, |
|
"learning_rate": 9.131193871579975e-06, |
|
"loss": 0.2279, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.3968253968253968, |
|
"grad_norm": 1.1519429578533356, |
|
"learning_rate": 9.059690028579285e-06, |
|
"loss": 0.2076, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.4126984126984127, |
|
"grad_norm": 1.3613352350414323, |
|
"learning_rate": 8.985662536114614e-06, |
|
"loss": 0.22, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 1.2339714490779135, |
|
"learning_rate": 8.90915741234015e-06, |
|
"loss": 0.2522, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 1.1236413741627491, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 0.1866, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.4603174603174603, |
|
"grad_norm": 1.3086093841693327, |
|
"learning_rate": 8.748906014838672e-06, |
|
"loss": 0.2333, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 1.0158374499871152, |
|
"learning_rate": 8.665259359149132e-06, |
|
"loss": 0.2031, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.49206349206349204, |
|
"grad_norm": 0.9845474076221036, |
|
"learning_rate": 8.579334246298593e-06, |
|
"loss": 0.1895, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5079365079365079, |
|
"grad_norm": 0.9272494831082883, |
|
"learning_rate": 8.491184090430365e-06, |
|
"loss": 0.1678, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5238095238095238, |
|
"grad_norm": 1.1065938204746082, |
|
"learning_rate": 8.400863688854598e-06, |
|
"loss": 0.2131, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.5396825396825397, |
|
"grad_norm": 0.9645961460688046, |
|
"learning_rate": 8.308429187984298e-06, |
|
"loss": 0.1938, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 1.0652512127256022, |
|
"learning_rate": 8.213938048432697e-06, |
|
"loss": 0.228, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 1.2041599554639741, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.227, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.5873015873015873, |
|
"grad_norm": 1.0747489173754983, |
|
"learning_rate": 8.019022051627387e-06, |
|
"loss": 0.2072, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.6031746031746031, |
|
"grad_norm": 1.2773460577215314, |
|
"learning_rate": 7.918718361173951e-06, |
|
"loss": 0.2173, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.6190476190476191, |
|
"grad_norm": 1.135886238064031, |
|
"learning_rate": 7.81660029031811e-06, |
|
"loss": 0.2142, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6349206349206349, |
|
"grad_norm": 0.9260328676177088, |
|
"learning_rate": 7.712731319328798e-06, |
|
"loss": 0.1707, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.6507936507936508, |
|
"grad_norm": 1.3665126828704022, |
|
"learning_rate": 7.607176016897491e-06, |
|
"loss": 0.2181, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 1.3127794515877824, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.2439, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.6825396825396826, |
|
"grad_norm": 1.1360818137590631, |
|
"learning_rate": 7.391269893106592e-06, |
|
"loss": 0.2105, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.6984126984126984, |
|
"grad_norm": 1.1335666149334869, |
|
"learning_rate": 7.281053286765816e-06, |
|
"loss": 0.1929, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 1.1267539001855318, |
|
"learning_rate": 7.169418695587791e-06, |
|
"loss": 0.1984, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.7301587301587301, |
|
"grad_norm": 1.1936854411979159, |
|
"learning_rate": 7.056435515653059e-06, |
|
"loss": 0.2028, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.746031746031746, |
|
"grad_norm": 0.9015309462604437, |
|
"learning_rate": 6.942173981373474e-06, |
|
"loss": 0.1875, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 0.8200713976990537, |
|
"learning_rate": 6.8267051218319766e-06, |
|
"loss": 0.163, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 1.051998804086299, |
|
"learning_rate": 6.710100716628345e-06, |
|
"loss": 0.1953, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.7936507936507936, |
|
"grad_norm": 1.1445651214323342, |
|
"learning_rate": 6.592433251258423e-06, |
|
"loss": 0.2231, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8095238095238095, |
|
"grad_norm": 1.1078932700031434, |
|
"learning_rate": 6.473775872054522e-06, |
|
"loss": 0.1944, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.8253968253968254, |
|
"grad_norm": 0.9313430929773627, |
|
"learning_rate": 6.354202340715027e-06, |
|
"loss": 0.196, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.8412698412698413, |
|
"grad_norm": 0.9270393870240721, |
|
"learning_rate": 6.233786988451468e-06, |
|
"loss": 0.1682, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 0.9164332807254034, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.1846, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.873015873015873, |
|
"grad_norm": 0.8740293066946664, |
|
"learning_rate": 5.990730715996989e-06, |
|
"loss": 0.1657, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 1.2633376067074007, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 0.2077, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.9047619047619048, |
|
"grad_norm": 0.8792598852017888, |
|
"learning_rate": 5.745211330880872e-06, |
|
"loss": 0.1768, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.9206349206349206, |
|
"grad_norm": 1.0220047570734931, |
|
"learning_rate": 5.621718523237427e-06, |
|
"loss": 0.1881, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.9365079365079365, |
|
"grad_norm": 0.8703339387258027, |
|
"learning_rate": 5.497839232979084e-06, |
|
"loss": 0.1611, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 1.0109865838643635, |
|
"learning_rate": 5.373650467932122e-06, |
|
"loss": 0.2008, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.9682539682539683, |
|
"grad_norm": 0.7417523893697854, |
|
"learning_rate": 5.249229428303486e-06, |
|
"loss": 0.1602, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.9841269841269841, |
|
"grad_norm": 0.9461546053888866, |
|
"learning_rate": 5.1246534586903655e-06, |
|
"loss": 0.1993, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.9660336029382152, |
|
"learning_rate": 5e-06, |
|
"loss": 0.191, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.0158730158730158, |
|
"grad_norm": 0.8446705493437469, |
|
"learning_rate": 4.875346541309637e-06, |
|
"loss": 0.1364, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.0317460317460316, |
|
"grad_norm": 0.9820307539758936, |
|
"learning_rate": 4.750770571696514e-06, |
|
"loss": 0.1587, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.0476190476190477, |
|
"grad_norm": 0.9058719502990771, |
|
"learning_rate": 4.626349532067879e-06, |
|
"loss": 0.1483, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.0634920634920635, |
|
"grad_norm": 0.9041764403564576, |
|
"learning_rate": 4.502160767020918e-06, |
|
"loss": 0.1414, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.0793650793650793, |
|
"grad_norm": 0.8001618713509455, |
|
"learning_rate": 4.3782814767625755e-06, |
|
"loss": 0.1338, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.0952380952380953, |
|
"grad_norm": 0.8319305596618536, |
|
"learning_rate": 4.254788669119127e-06, |
|
"loss": 0.1454, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 0.8371055902592329, |
|
"learning_rate": 4.131759111665349e-06, |
|
"loss": 0.1402, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.126984126984127, |
|
"grad_norm": 0.7954052878494495, |
|
"learning_rate": 4.009269284003014e-06, |
|
"loss": 0.1228, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.1428571428571428, |
|
"grad_norm": 0.7671843494889209, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.1221, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.1587301587301586, |
|
"grad_norm": 0.8711455715304539, |
|
"learning_rate": 3.7662130115485317e-06, |
|
"loss": 0.1501, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.1746031746031746, |
|
"grad_norm": 0.7847941425308853, |
|
"learning_rate": 3.6457976592849753e-06, |
|
"loss": 0.1122, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.1904761904761905, |
|
"grad_norm": 0.722628056722779, |
|
"learning_rate": 3.526224127945479e-06, |
|
"loss": 0.1111, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.2063492063492063, |
|
"grad_norm": 0.8245734562679857, |
|
"learning_rate": 3.4075667487415785e-06, |
|
"loss": 0.1212, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"grad_norm": 0.8905262611051837, |
|
"learning_rate": 3.289899283371657e-06, |
|
"loss": 0.1318, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.2380952380952381, |
|
"grad_norm": 0.8016171759163617, |
|
"learning_rate": 3.173294878168025e-06, |
|
"loss": 0.1194, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.253968253968254, |
|
"grad_norm": 0.8978165203445735, |
|
"learning_rate": 3.057826018626527e-06, |
|
"loss": 0.1167, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.2698412698412698, |
|
"grad_norm": 0.8629753908810158, |
|
"learning_rate": 2.9435644843469434e-06, |
|
"loss": 0.1309, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.2857142857142856, |
|
"grad_norm": 0.7772338824641468, |
|
"learning_rate": 2.83058130441221e-06, |
|
"loss": 0.1158, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.3015873015873016, |
|
"grad_norm": 0.8162122268225106, |
|
"learning_rate": 2.718946713234185e-06, |
|
"loss": 0.1205, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.3174603174603174, |
|
"grad_norm": 0.8161848415092973, |
|
"learning_rate": 2.608730106893411e-06, |
|
"loss": 0.1284, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.7777687958543732, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.1161, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.3492063492063493, |
|
"grad_norm": 0.9073998204298158, |
|
"learning_rate": 2.39282398310251e-06, |
|
"loss": 0.1192, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.3650793650793651, |
|
"grad_norm": 0.775388656338499, |
|
"learning_rate": 2.2872686806712037e-06, |
|
"loss": 0.1163, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.380952380952381, |
|
"grad_norm": 0.8224765782876229, |
|
"learning_rate": 2.1833997096818897e-06, |
|
"loss": 0.1172, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.3968253968253967, |
|
"grad_norm": 0.9413042727529922, |
|
"learning_rate": 2.081281638826052e-06, |
|
"loss": 0.1218, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.4126984126984126, |
|
"grad_norm": 0.8442720538674777, |
|
"learning_rate": 1.980977948372612e-06, |
|
"loss": 0.1231, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 0.7194237358207392, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 0.1045, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.4444444444444444, |
|
"grad_norm": 0.8127413072028286, |
|
"learning_rate": 1.7860619515673034e-06, |
|
"loss": 0.1233, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.4603174603174602, |
|
"grad_norm": 1.2439563617941711, |
|
"learning_rate": 1.6915708120157042e-06, |
|
"loss": 0.1275, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.4761904761904763, |
|
"grad_norm": 0.7770780245344372, |
|
"learning_rate": 1.5991363111454023e-06, |
|
"loss": 0.1181, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.492063492063492, |
|
"grad_norm": 0.7672775525058901, |
|
"learning_rate": 1.5088159095696365e-06, |
|
"loss": 0.1059, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.507936507936508, |
|
"grad_norm": 0.766143455209844, |
|
"learning_rate": 1.4206657537014078e-06, |
|
"loss": 0.1178, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.5238095238095237, |
|
"grad_norm": 0.724421182393947, |
|
"learning_rate": 1.3347406408508695e-06, |
|
"loss": 0.1149, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.5396825396825395, |
|
"grad_norm": 0.7658778028894299, |
|
"learning_rate": 1.2510939851613285e-06, |
|
"loss": 0.1225, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 1.032494528371397, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 0.143, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.5714285714285714, |
|
"grad_norm": 0.8317671879333141, |
|
"learning_rate": 1.0908425876598512e-06, |
|
"loss": 0.1226, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.5873015873015874, |
|
"grad_norm": 0.8645979946263267, |
|
"learning_rate": 1.0143374638853892e-06, |
|
"loss": 0.1251, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.6031746031746033, |
|
"grad_norm": 0.8455502085192623, |
|
"learning_rate": 9.403099714207175e-07, |
|
"loss": 0.1147, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.619047619047619, |
|
"grad_norm": 0.7822227098965502, |
|
"learning_rate": 8.688061284200266e-07, |
|
"loss": 0.1325, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.6349206349206349, |
|
"grad_norm": 0.7471909044045777, |
|
"learning_rate": 7.99870384246143e-07, |
|
"loss": 0.1056, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.6507936507936507, |
|
"grad_norm": 0.8069511438267241, |
|
"learning_rate": 7.33545591839222e-07, |
|
"loss": 0.1113, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.7983439950759835, |
|
"learning_rate": 6.698729810778065e-07, |
|
"loss": 0.1158, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.6825396825396826, |
|
"grad_norm": 0.8429996790936396, |
|
"learning_rate": 6.088921331488568e-07, |
|
"loss": 0.1106, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.6984126984126984, |
|
"grad_norm": 0.8241505435395763, |
|
"learning_rate": 5.506409559426573e-07, |
|
"loss": 0.1198, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.7142857142857144, |
|
"grad_norm": 0.7464229994435873, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.1133, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.7301587301587302, |
|
"grad_norm": 0.6974660696481206, |
|
"learning_rate": 4.4247073844163434e-07, |
|
"loss": 0.0932, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.746031746031746, |
|
"grad_norm": 0.7571169718329668, |
|
"learning_rate": 3.9261894064796136e-07, |
|
"loss": 0.1134, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.7619047619047619, |
|
"grad_norm": 0.8233122026769766, |
|
"learning_rate": 3.4563125677897936e-07, |
|
"loss": 0.1343, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 0.7642348427528916, |
|
"learning_rate": 3.015368960704584e-07, |
|
"loss": 0.1134, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.7936507936507935, |
|
"grad_norm": 0.7559524802350545, |
|
"learning_rate": 2.6036326916434153e-07, |
|
"loss": 0.1153, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.8095238095238095, |
|
"grad_norm": 0.7773214279041375, |
|
"learning_rate": 2.2213597106929608e-07, |
|
"loss": 0.1078, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.8253968253968254, |
|
"grad_norm": 0.7680760912308965, |
|
"learning_rate": 1.8687876524993987e-07, |
|
"loss": 0.1051, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.8412698412698414, |
|
"grad_norm": 0.7115438868122161, |
|
"learning_rate": 1.5461356885461077e-07, |
|
"loss": 0.113, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.8571428571428572, |
|
"grad_norm": 0.691478980586458, |
|
"learning_rate": 1.253604390908819e-07, |
|
"loss": 0.0988, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.873015873015873, |
|
"grad_norm": 0.687791401958618, |
|
"learning_rate": 9.913756075728088e-08, |
|
"loss": 0.096, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.8888888888888888, |
|
"grad_norm": 0.7262275320357141, |
|
"learning_rate": 7.59612349389599e-08, |
|
"loss": 0.1122, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.9047619047619047, |
|
"grad_norm": 0.7002743164354781, |
|
"learning_rate": 5.584586887435739e-08, |
|
"loss": 0.101, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.9206349206349205, |
|
"grad_norm": 0.6436376226113324, |
|
"learning_rate": 3.8803966999139686e-08, |
|
"loss": 0.0939, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.9365079365079365, |
|
"grad_norm": 0.8215986730670641, |
|
"learning_rate": 2.4846123172992953e-08, |
|
"loss": 0.113, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.9523809523809523, |
|
"grad_norm": 0.7252216992457547, |
|
"learning_rate": 1.3981014094099354e-08, |
|
"loss": 0.1029, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.9682539682539684, |
|
"grad_norm": 0.8386753220451895, |
|
"learning_rate": 6.215393905388278e-09, |
|
"loss": 0.1312, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.9841269841269842, |
|
"grad_norm": 0.6788368293698803, |
|
"learning_rate": 1.5540899959187727e-09, |
|
"loss": 0.0994, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.6890938486700772, |
|
"learning_rate": 0.0, |
|
"loss": 0.0922, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 126, |
|
"total_flos": 8257007517696.0, |
|
"train_loss": 0.1798131594818736, |
|
"train_runtime": 502.2354, |
|
"train_samples_per_second": 1.999, |
|
"train_steps_per_second": 0.251 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 126, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 70000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8257007517696.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|