|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 126, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.015873015873015872, |
|
"grad_norm": 14.136896419046405, |
|
"learning_rate": 9.998445910004082e-06, |
|
"loss": 1.2293, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.031746031746031744, |
|
"grad_norm": 9.414916379661909, |
|
"learning_rate": 9.993784606094612e-06, |
|
"loss": 0.8742, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.047619047619047616, |
|
"grad_norm": 7.055571354439035, |
|
"learning_rate": 9.986018985905901e-06, |
|
"loss": 0.6603, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06349206349206349, |
|
"grad_norm": 6.0109678713542065, |
|
"learning_rate": 9.975153876827008e-06, |
|
"loss": 0.6402, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.07936507936507936, |
|
"grad_norm": 6.286672268332456, |
|
"learning_rate": 9.961196033000862e-06, |
|
"loss": 0.5669, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.09523809523809523, |
|
"grad_norm": 3.75907915574848, |
|
"learning_rate": 9.944154131125643e-06, |
|
"loss": 0.4859, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 2.7783434693295717, |
|
"learning_rate": 9.924038765061042e-06, |
|
"loss": 0.4665, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.12698412698412698, |
|
"grad_norm": 3.5757346497904514, |
|
"learning_rate": 9.900862439242719e-06, |
|
"loss": 0.4592, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 3.0154588649031027, |
|
"learning_rate": 9.874639560909118e-06, |
|
"loss": 0.4289, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.15873015873015872, |
|
"grad_norm": 2.9640179533460693, |
|
"learning_rate": 9.84538643114539e-06, |
|
"loss": 0.4375, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1746031746031746, |
|
"grad_norm": 2.4616695644526505, |
|
"learning_rate": 9.81312123475006e-06, |
|
"loss": 0.4332, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.19047619047619047, |
|
"grad_norm": 2.2172107727937336, |
|
"learning_rate": 9.777864028930705e-06, |
|
"loss": 0.3862, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.20634920634920634, |
|
"grad_norm": 2.0026334395442253, |
|
"learning_rate": 9.73963673083566e-06, |
|
"loss": 0.3375, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 2.008975524410557, |
|
"learning_rate": 9.698463103929542e-06, |
|
"loss": 0.3209, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.23809523809523808, |
|
"grad_norm": 2.0020308436998615, |
|
"learning_rate": 9.654368743221022e-06, |
|
"loss": 0.3352, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.25396825396825395, |
|
"grad_norm": 1.7951748955191769, |
|
"learning_rate": 9.60738105935204e-06, |
|
"loss": 0.324, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2698412698412698, |
|
"grad_norm": 1.9455517887105027, |
|
"learning_rate": 9.557529261558367e-06, |
|
"loss": 0.3292, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 1.9655040976499933, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.3404, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.30158730158730157, |
|
"grad_norm": 2.257691645993752, |
|
"learning_rate": 9.449359044057344e-06, |
|
"loss": 0.3416, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.31746031746031744, |
|
"grad_norm": 1.9178041719883492, |
|
"learning_rate": 9.391107866851143e-06, |
|
"loss": 0.3283, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 1.8376837225343723, |
|
"learning_rate": 9.330127018922195e-06, |
|
"loss": 0.3065, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.3492063492063492, |
|
"grad_norm": 2.255376117326017, |
|
"learning_rate": 9.266454408160779e-06, |
|
"loss": 0.3898, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.36507936507936506, |
|
"grad_norm": 2.3147995497368017, |
|
"learning_rate": 9.200129615753858e-06, |
|
"loss": 0.3566, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 1.9670946821467294, |
|
"learning_rate": 9.131193871579975e-06, |
|
"loss": 0.3397, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.3968253968253968, |
|
"grad_norm": 1.9043148573056243, |
|
"learning_rate": 9.059690028579285e-06, |
|
"loss": 0.3146, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.4126984126984127, |
|
"grad_norm": 2.0018857432631436, |
|
"learning_rate": 8.985662536114614e-06, |
|
"loss": 0.326, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 1.9805418049960362, |
|
"learning_rate": 8.90915741234015e-06, |
|
"loss": 0.3561, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 2.002055916467354, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 0.2896, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.4603174603174603, |
|
"grad_norm": 1.9173208879793804, |
|
"learning_rate": 8.748906014838672e-06, |
|
"loss": 0.3481, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 1.7900189098270431, |
|
"learning_rate": 8.665259359149132e-06, |
|
"loss": 0.3038, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.49206349206349204, |
|
"grad_norm": 1.6509929161585246, |
|
"learning_rate": 8.579334246298593e-06, |
|
"loss": 0.2826, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5079365079365079, |
|
"grad_norm": 1.671085946192089, |
|
"learning_rate": 8.491184090430365e-06, |
|
"loss": 0.2615, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5238095238095238, |
|
"grad_norm": 1.8602137091136728, |
|
"learning_rate": 8.400863688854598e-06, |
|
"loss": 0.2967, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.5396825396825397, |
|
"grad_norm": 1.7196097127036853, |
|
"learning_rate": 8.308429187984298e-06, |
|
"loss": 0.2952, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 1.931814044902892, |
|
"learning_rate": 8.213938048432697e-06, |
|
"loss": 0.3372, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 2.1049189778926003, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.3404, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.5873015873015873, |
|
"grad_norm": 1.8220307585532305, |
|
"learning_rate": 8.019022051627387e-06, |
|
"loss": 0.2965, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.6031746031746031, |
|
"grad_norm": 2.1357017184024776, |
|
"learning_rate": 7.918718361173951e-06, |
|
"loss": 0.3163, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.6190476190476191, |
|
"grad_norm": 1.9302715132373918, |
|
"learning_rate": 7.81660029031811e-06, |
|
"loss": 0.3218, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6349206349206349, |
|
"grad_norm": 1.6349542473923382, |
|
"learning_rate": 7.712731319328798e-06, |
|
"loss": 0.2527, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.6507936507936508, |
|
"grad_norm": 2.300579309285893, |
|
"learning_rate": 7.607176016897491e-06, |
|
"loss": 0.3232, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 2.1075973294948263, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.367, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.6825396825396826, |
|
"grad_norm": 1.8304036968165827, |
|
"learning_rate": 7.391269893106592e-06, |
|
"loss": 0.3094, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.6984126984126984, |
|
"grad_norm": 1.9405966550411191, |
|
"learning_rate": 7.281053286765816e-06, |
|
"loss": 0.282, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 1.920794107387291, |
|
"learning_rate": 7.169418695587791e-06, |
|
"loss": 0.287, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.7301587301587301, |
|
"grad_norm": 1.8746805079924518, |
|
"learning_rate": 7.056435515653059e-06, |
|
"loss": 0.3016, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.746031746031746, |
|
"grad_norm": 1.6795992682490004, |
|
"learning_rate": 6.942173981373474e-06, |
|
"loss": 0.2765, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 1.5560823579671665, |
|
"learning_rate": 6.8267051218319766e-06, |
|
"loss": 0.2532, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 1.895658785400284, |
|
"learning_rate": 6.710100716628345e-06, |
|
"loss": 0.2892, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.7936507936507936, |
|
"grad_norm": 1.8753314593622137, |
|
"learning_rate": 6.592433251258423e-06, |
|
"loss": 0.3301, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8095238095238095, |
|
"grad_norm": 1.829267141246674, |
|
"learning_rate": 6.473775872054522e-06, |
|
"loss": 0.2891, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.8253968253968254, |
|
"grad_norm": 1.7420326737904375, |
|
"learning_rate": 6.354202340715027e-06, |
|
"loss": 0.2866, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.8412698412698413, |
|
"grad_norm": 1.6023111714513605, |
|
"learning_rate": 6.233786988451468e-06, |
|
"loss": 0.2546, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 1.6958524220983555, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.263, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.873015873015873, |
|
"grad_norm": 1.641293642541498, |
|
"learning_rate": 5.990730715996989e-06, |
|
"loss": 0.2519, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 1.9737668057519353, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 0.3114, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.9047619047619048, |
|
"grad_norm": 1.6222206764245741, |
|
"learning_rate": 5.745211330880872e-06, |
|
"loss": 0.2611, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.9206349206349206, |
|
"grad_norm": 1.6779148540502549, |
|
"learning_rate": 5.621718523237427e-06, |
|
"loss": 0.2749, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.9365079365079365, |
|
"grad_norm": 1.5069533839875844, |
|
"learning_rate": 5.497839232979084e-06, |
|
"loss": 0.2395, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 1.8959358860602276, |
|
"learning_rate": 5.373650467932122e-06, |
|
"loss": 0.2893, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.9682539682539683, |
|
"grad_norm": 1.523846974950363, |
|
"learning_rate": 5.249229428303486e-06, |
|
"loss": 0.2465, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.9841269841269841, |
|
"grad_norm": 1.648360547715999, |
|
"learning_rate": 5.1246534586903655e-06, |
|
"loss": 0.2895, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.7313601334897208, |
|
"learning_rate": 5e-06, |
|
"loss": 0.2725, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.0158730158730158, |
|
"grad_norm": 1.5974296941422694, |
|
"learning_rate": 4.875346541309637e-06, |
|
"loss": 0.2332, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.0317460317460316, |
|
"grad_norm": 1.7754313088972902, |
|
"learning_rate": 4.750770571696514e-06, |
|
"loss": 0.2586, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.0476190476190477, |
|
"grad_norm": 1.7376217331972978, |
|
"learning_rate": 4.626349532067879e-06, |
|
"loss": 0.2542, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.0634920634920635, |
|
"grad_norm": 1.6766787084873536, |
|
"learning_rate": 4.502160767020918e-06, |
|
"loss": 0.2526, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.0793650793650793, |
|
"grad_norm": 1.6040607155196303, |
|
"learning_rate": 4.3782814767625755e-06, |
|
"loss": 0.2305, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.0952380952380953, |
|
"grad_norm": 1.76649027588737, |
|
"learning_rate": 4.254788669119127e-06, |
|
"loss": 0.2713, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 1.689340586990794, |
|
"learning_rate": 4.131759111665349e-06, |
|
"loss": 0.2382, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.126984126984127, |
|
"grad_norm": 1.6502677192393955, |
|
"learning_rate": 4.009269284003014e-06, |
|
"loss": 0.226, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.1428571428571428, |
|
"grad_norm": 1.6333958229898413, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.2289, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.1587301587301586, |
|
"grad_norm": 1.7310975785907043, |
|
"learning_rate": 3.7662130115485317e-06, |
|
"loss": 0.2873, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.1746031746031746, |
|
"grad_norm": 1.5468081102672981, |
|
"learning_rate": 3.6457976592849753e-06, |
|
"loss": 0.1974, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.1904761904761905, |
|
"grad_norm": 1.5296817125485362, |
|
"learning_rate": 3.526224127945479e-06, |
|
"loss": 0.2013, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.2063492063492063, |
|
"grad_norm": 1.6513883543662364, |
|
"learning_rate": 3.4075667487415785e-06, |
|
"loss": 0.2257, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"grad_norm": 1.7657149068040885, |
|
"learning_rate": 3.289899283371657e-06, |
|
"loss": 0.2389, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.2380952380952381, |
|
"grad_norm": 1.6542259947868383, |
|
"learning_rate": 3.173294878168025e-06, |
|
"loss": 0.2207, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.253968253968254, |
|
"grad_norm": 1.785500829184994, |
|
"learning_rate": 3.057826018626527e-06, |
|
"loss": 0.2409, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.2698412698412698, |
|
"grad_norm": 1.624145624947224, |
|
"learning_rate": 2.9435644843469434e-06, |
|
"loss": 0.2361, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.2857142857142856, |
|
"grad_norm": 1.552144673465774, |
|
"learning_rate": 2.83058130441221e-06, |
|
"loss": 0.2051, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.3015873015873016, |
|
"grad_norm": 1.6259207954939752, |
|
"learning_rate": 2.718946713234185e-06, |
|
"loss": 0.2213, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.3174603174603174, |
|
"grad_norm": 1.6311364982482741, |
|
"learning_rate": 2.608730106893411e-06, |
|
"loss": 0.2505, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 1.5946669632314558, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.2077, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.3492063492063493, |
|
"grad_norm": 1.6540736898757478, |
|
"learning_rate": 2.39282398310251e-06, |
|
"loss": 0.2231, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.3650793650793651, |
|
"grad_norm": 1.5631800963608526, |
|
"learning_rate": 2.2872686806712037e-06, |
|
"loss": 0.2063, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.380952380952381, |
|
"grad_norm": 1.6790445903303324, |
|
"learning_rate": 2.1833997096818897e-06, |
|
"loss": 0.2275, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.3968253968253967, |
|
"grad_norm": 1.71594970960584, |
|
"learning_rate": 2.081281638826052e-06, |
|
"loss": 0.2304, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.4126984126984126, |
|
"grad_norm": 1.6467944353789714, |
|
"learning_rate": 1.980977948372612e-06, |
|
"loss": 0.2284, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 1.508481981490406, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 0.2133, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.4444444444444444, |
|
"grad_norm": 1.6577136924947176, |
|
"learning_rate": 1.7860619515673034e-06, |
|
"loss": 0.2344, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.4603174603174602, |
|
"grad_norm": 1.6358011219062036, |
|
"learning_rate": 1.6915708120157042e-06, |
|
"loss": 0.2326, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.4761904761904763, |
|
"grad_norm": 1.5343887042100857, |
|
"learning_rate": 1.5991363111454023e-06, |
|
"loss": 0.2376, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.492063492063492, |
|
"grad_norm": 1.660583983348991, |
|
"learning_rate": 1.5088159095696365e-06, |
|
"loss": 0.225, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.507936507936508, |
|
"grad_norm": 1.6005130793524434, |
|
"learning_rate": 1.4206657537014078e-06, |
|
"loss": 0.2305, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.5238095238095237, |
|
"grad_norm": 1.553147977120734, |
|
"learning_rate": 1.3347406408508695e-06, |
|
"loss": 0.2288, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.5396825396825395, |
|
"grad_norm": 1.5908104698304062, |
|
"learning_rate": 1.2510939851613285e-06, |
|
"loss": 0.2215, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 1.9504543471947708, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 0.278, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.5714285714285714, |
|
"grad_norm": 1.704011286004108, |
|
"learning_rate": 1.0908425876598512e-06, |
|
"loss": 0.2287, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.5873015873015874, |
|
"grad_norm": 1.618234720661816, |
|
"learning_rate": 1.0143374638853892e-06, |
|
"loss": 0.2402, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.6031746031746033, |
|
"grad_norm": 1.6475638575246008, |
|
"learning_rate": 9.403099714207175e-07, |
|
"loss": 0.2192, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.619047619047619, |
|
"grad_norm": 1.5958099241656356, |
|
"learning_rate": 8.688061284200266e-07, |
|
"loss": 0.2549, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.6349206349206349, |
|
"grad_norm": 1.561819978797271, |
|
"learning_rate": 7.99870384246143e-07, |
|
"loss": 0.2111, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.6507936507936507, |
|
"grad_norm": 1.7949770813372914, |
|
"learning_rate": 7.33545591839222e-07, |
|
"loss": 0.2376, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 1.698853452351741, |
|
"learning_rate": 6.698729810778065e-07, |
|
"loss": 0.2243, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.6825396825396826, |
|
"grad_norm": 1.7776514869091975, |
|
"learning_rate": 6.088921331488568e-07, |
|
"loss": 0.2356, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.6984126984126984, |
|
"grad_norm": 1.6125533182565601, |
|
"learning_rate": 5.506409559426573e-07, |
|
"loss": 0.2192, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.7142857142857144, |
|
"grad_norm": 1.4580499888991223, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.2011, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.7301587301587302, |
|
"grad_norm": 1.44711042249179, |
|
"learning_rate": 4.4247073844163434e-07, |
|
"loss": 0.1919, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.746031746031746, |
|
"grad_norm": 1.5319692607578637, |
|
"learning_rate": 3.9261894064796136e-07, |
|
"loss": 0.2228, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.7619047619047619, |
|
"grad_norm": 1.703198205141255, |
|
"learning_rate": 3.4563125677897936e-07, |
|
"loss": 0.2446, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 1.4382012783244584, |
|
"learning_rate": 3.015368960704584e-07, |
|
"loss": 0.2085, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.7936507936507935, |
|
"grad_norm": 1.4938131300996766, |
|
"learning_rate": 2.6036326916434153e-07, |
|
"loss": 0.2043, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.8095238095238095, |
|
"grad_norm": 1.5238283598819027, |
|
"learning_rate": 2.2213597106929608e-07, |
|
"loss": 0.213, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.8253968253968254, |
|
"grad_norm": 1.5482037294832478, |
|
"learning_rate": 1.8687876524993987e-07, |
|
"loss": 0.2136, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.8412698412698414, |
|
"grad_norm": 1.480422561570781, |
|
"learning_rate": 1.5461356885461077e-07, |
|
"loss": 0.2242, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.8571428571428572, |
|
"grad_norm": 1.4385375275853922, |
|
"learning_rate": 1.253604390908819e-07, |
|
"loss": 0.1934, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.873015873015873, |
|
"grad_norm": 1.398116868516386, |
|
"learning_rate": 9.913756075728088e-08, |
|
"loss": 0.1943, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.8888888888888888, |
|
"grad_norm": 1.481535586423097, |
|
"learning_rate": 7.59612349389599e-08, |
|
"loss": 0.2096, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.9047619047619047, |
|
"grad_norm": 1.474742617578431, |
|
"learning_rate": 5.584586887435739e-08, |
|
"loss": 0.207, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.9206349206349205, |
|
"grad_norm": 1.3680015424187337, |
|
"learning_rate": 3.8803966999139686e-08, |
|
"loss": 0.1845, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.9365079365079365, |
|
"grad_norm": 1.7090636442828981, |
|
"learning_rate": 2.4846123172992953e-08, |
|
"loss": 0.2299, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.9523809523809523, |
|
"grad_norm": 1.4560355937269758, |
|
"learning_rate": 1.3981014094099354e-08, |
|
"loss": 0.1846, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.9682539682539684, |
|
"grad_norm": 1.6521726152458496, |
|
"learning_rate": 6.215393905388278e-09, |
|
"loss": 0.2492, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.9841269841269842, |
|
"grad_norm": 1.3715437105112576, |
|
"learning_rate": 1.5540899959187727e-09, |
|
"loss": 0.1846, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.411488207674534, |
|
"learning_rate": 0.0, |
|
"loss": 0.1976, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 126, |
|
"total_flos": 1772529647616.0, |
|
"train_loss": 0.2918981748913962, |
|
"train_runtime": 115.6265, |
|
"train_samples_per_second": 8.683, |
|
"train_steps_per_second": 1.09 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 126, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 70000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1772529647616.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|