|
{ |
|
"best_metric": 0.4396902620792389, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 3.008695652173913, |
|
"eval_steps": 50, |
|
"global_step": 173, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.017391304347826087, |
|
"grad_norm": 8.883275032043457, |
|
"learning_rate": 1e-05, |
|
"loss": 4.2926, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017391304347826087, |
|
"eval_loss": 1.9020391702651978, |
|
"eval_runtime": 9.0598, |
|
"eval_samples_per_second": 10.707, |
|
"eval_steps_per_second": 2.759, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.034782608695652174, |
|
"grad_norm": 18.399316787719727, |
|
"learning_rate": 2e-05, |
|
"loss": 4.2206, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05217391304347826, |
|
"grad_norm": 18.08584213256836, |
|
"learning_rate": 3e-05, |
|
"loss": 4.1941, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06956521739130435, |
|
"grad_norm": 18.791566848754883, |
|
"learning_rate": 4e-05, |
|
"loss": 3.7203, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.08695652173913043, |
|
"grad_norm": 14.128814697265625, |
|
"learning_rate": 5e-05, |
|
"loss": 2.8787, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.10434782608695652, |
|
"grad_norm": 13.8895263671875, |
|
"learning_rate": 6e-05, |
|
"loss": 2.7352, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.12173913043478261, |
|
"grad_norm": 12.573573112487793, |
|
"learning_rate": 7e-05, |
|
"loss": 1.7571, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.1391304347826087, |
|
"grad_norm": 17.67091178894043, |
|
"learning_rate": 8e-05, |
|
"loss": 2.9257, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1565217391304348, |
|
"grad_norm": 26.21525001525879, |
|
"learning_rate": 9e-05, |
|
"loss": 3.6193, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.17391304347826086, |
|
"grad_norm": 32.96034622192383, |
|
"learning_rate": 0.0001, |
|
"loss": 3.5862, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.19130434782608696, |
|
"grad_norm": 34.51445388793945, |
|
"learning_rate": 9.999071352056675e-05, |
|
"loss": 2.0717, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.20869565217391303, |
|
"grad_norm": 20.016292572021484, |
|
"learning_rate": 9.9962857531815e-05, |
|
"loss": 2.6985, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.22608695652173913, |
|
"grad_norm": 29.78652572631836, |
|
"learning_rate": 9.99164423811074e-05, |
|
"loss": 2.7999, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.24347826086956523, |
|
"grad_norm": 103.21263122558594, |
|
"learning_rate": 9.985148530977767e-05, |
|
"loss": 4.0946, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.2608695652173913, |
|
"grad_norm": 13.88134765625, |
|
"learning_rate": 9.976801044672608e-05, |
|
"loss": 4.4529, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2782608695652174, |
|
"grad_norm": 12.383321762084961, |
|
"learning_rate": 9.966604879945659e-05, |
|
"loss": 4.0322, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2956521739130435, |
|
"grad_norm": 16.159950256347656, |
|
"learning_rate": 9.954563824255878e-05, |
|
"loss": 3.4233, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.3130434782608696, |
|
"grad_norm": 10.361807823181152, |
|
"learning_rate": 9.940682350363912e-05, |
|
"loss": 2.4715, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.33043478260869563, |
|
"grad_norm": 11.233977317810059, |
|
"learning_rate": 9.924965614670629e-05, |
|
"loss": 3.0579, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.34782608695652173, |
|
"grad_norm": 9.937857627868652, |
|
"learning_rate": 9.907419455301741e-05, |
|
"loss": 1.8839, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.3652173913043478, |
|
"grad_norm": 11.618575096130371, |
|
"learning_rate": 9.888050389939172e-05, |
|
"loss": 2.0335, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.3826086956521739, |
|
"grad_norm": 12.595673561096191, |
|
"learning_rate": 9.866865613400008e-05, |
|
"loss": 2.1181, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 18.374019622802734, |
|
"learning_rate": 9.843872994963911e-05, |
|
"loss": 4.7754, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.41739130434782606, |
|
"grad_norm": 20.49725914001465, |
|
"learning_rate": 9.819081075450014e-05, |
|
"loss": 1.8529, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.43478260869565216, |
|
"grad_norm": 9.583206176757812, |
|
"learning_rate": 9.792499064044342e-05, |
|
"loss": 1.1469, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.45217391304347826, |
|
"grad_norm": 14.794954299926758, |
|
"learning_rate": 9.764136834878986e-05, |
|
"loss": 1.3944, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.46956521739130436, |
|
"grad_norm": 17.454011917114258, |
|
"learning_rate": 9.734004923364257e-05, |
|
"loss": 2.2708, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.48695652173913045, |
|
"grad_norm": 42.63334274291992, |
|
"learning_rate": 9.702114522275216e-05, |
|
"loss": 1.3025, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.5043478260869565, |
|
"grad_norm": 8.601645469665527, |
|
"learning_rate": 9.66847747759402e-05, |
|
"loss": 4.1274, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.5217391304347826, |
|
"grad_norm": 10.501288414001465, |
|
"learning_rate": 9.63310628410961e-05, |
|
"loss": 3.3343, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.5391304347826087, |
|
"grad_norm": 10.755828857421875, |
|
"learning_rate": 9.596014080776423e-05, |
|
"loss": 1.7963, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5565217391304348, |
|
"grad_norm": 7.994204044342041, |
|
"learning_rate": 9.557214645833792e-05, |
|
"loss": 1.995, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5739130434782609, |
|
"grad_norm": 11.35947322845459, |
|
"learning_rate": 9.516722391687902e-05, |
|
"loss": 2.3837, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.591304347826087, |
|
"grad_norm": 9.96140193939209, |
|
"learning_rate": 9.474552359558166e-05, |
|
"loss": 1.6152, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.6086956521739131, |
|
"grad_norm": 8.912185668945312, |
|
"learning_rate": 9.43072021389003e-05, |
|
"loss": 2.9803, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.6260869565217392, |
|
"grad_norm": 14.464909553527832, |
|
"learning_rate": 9.38524223653626e-05, |
|
"loss": 2.7963, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.6434782608695652, |
|
"grad_norm": 11.840982437133789, |
|
"learning_rate": 9.338135320708911e-05, |
|
"loss": 3.135, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.6608695652173913, |
|
"grad_norm": 11.059337615966797, |
|
"learning_rate": 9.289416964704185e-05, |
|
"loss": 1.1317, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.6782608695652174, |
|
"grad_norm": 10.759260177612305, |
|
"learning_rate": 9.239105265402525e-05, |
|
"loss": 1.2317, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6956521739130435, |
|
"grad_norm": 8.21378231048584, |
|
"learning_rate": 9.187218911546362e-05, |
|
"loss": 0.8413, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.7130434782608696, |
|
"grad_norm": 17.352684020996094, |
|
"learning_rate": 9.133777176798013e-05, |
|
"loss": 1.3149, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.7304347826086957, |
|
"grad_norm": 26.137418746948242, |
|
"learning_rate": 9.078799912580304e-05, |
|
"loss": 2.0237, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.7478260869565218, |
|
"grad_norm": 8.654214859008789, |
|
"learning_rate": 9.022307540702576e-05, |
|
"loss": 3.6969, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.7652173913043478, |
|
"grad_norm": 11.014862060546875, |
|
"learning_rate": 8.964321045774807e-05, |
|
"loss": 3.2013, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.782608695652174, |
|
"grad_norm": 8.544939994812012, |
|
"learning_rate": 8.904861967412703e-05, |
|
"loss": 3.5936, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 10.577417373657227, |
|
"learning_rate": 8.843952392236594e-05, |
|
"loss": 2.7795, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.8173913043478261, |
|
"grad_norm": 8.548279762268066, |
|
"learning_rate": 8.781614945667169e-05, |
|
"loss": 2.1591, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.8347826086956521, |
|
"grad_norm": 9.758591651916504, |
|
"learning_rate": 8.717872783521047e-05, |
|
"loss": 2.1567, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.8521739130434782, |
|
"grad_norm": 9.762876510620117, |
|
"learning_rate": 8.65274958340934e-05, |
|
"loss": 2.3352, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.8695652173913043, |
|
"grad_norm": 8.696921348571777, |
|
"learning_rate": 8.586269535942385e-05, |
|
"loss": 2.2677, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8695652173913043, |
|
"eval_loss": 0.4909374713897705, |
|
"eval_runtime": 9.2435, |
|
"eval_samples_per_second": 10.494, |
|
"eval_steps_per_second": 2.705, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8869565217391304, |
|
"grad_norm": 11.873095512390137, |
|
"learning_rate": 8.518457335743926e-05, |
|
"loss": 2.753, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.9043478260869565, |
|
"grad_norm": 12.200470924377441, |
|
"learning_rate": 8.449338172278059e-05, |
|
"loss": 2.466, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.9217391304347826, |
|
"grad_norm": 26.438901901245117, |
|
"learning_rate": 8.378937720492384e-05, |
|
"loss": 1.0808, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.9391304347826087, |
|
"grad_norm": 22.44983673095703, |
|
"learning_rate": 8.307282131280804e-05, |
|
"loss": 0.7925, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.9565217391304348, |
|
"grad_norm": 9.913865089416504, |
|
"learning_rate": 8.23439802176954e-05, |
|
"loss": 1.5618, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.9739130434782609, |
|
"grad_norm": 106.08903503417969, |
|
"learning_rate": 8.160312465429952e-05, |
|
"loss": 2.0868, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.991304347826087, |
|
"grad_norm": 10.000048637390137, |
|
"learning_rate": 8.085052982021847e-05, |
|
"loss": 2.5997, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.008695652173913, |
|
"grad_norm": 9.5238676071167, |
|
"learning_rate": 8.008647527371023e-05, |
|
"loss": 2.9929, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.0260869565217392, |
|
"grad_norm": 5.974183082580566, |
|
"learning_rate": 7.931124482984802e-05, |
|
"loss": 2.2291, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.0434782608695652, |
|
"grad_norm": 7.498763561248779, |
|
"learning_rate": 7.85251264550948e-05, |
|
"loss": 1.6798, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.0608695652173914, |
|
"grad_norm": 6.492100715637207, |
|
"learning_rate": 7.772841216033533e-05, |
|
"loss": 1.8157, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.0782608695652174, |
|
"grad_norm": 6.82267951965332, |
|
"learning_rate": 7.692139789240611e-05, |
|
"loss": 1.2322, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.0956521739130434, |
|
"grad_norm": 5.267804145812988, |
|
"learning_rate": 7.610438342416319e-05, |
|
"loss": 1.5287, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.1130434782608696, |
|
"grad_norm": 5.903561115264893, |
|
"learning_rate": 7.527767224312883e-05, |
|
"loss": 0.9725, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.1304347826086956, |
|
"grad_norm": 6.4522624015808105, |
|
"learning_rate": 7.44415714387582e-05, |
|
"loss": 1.1919, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.1478260869565218, |
|
"grad_norm": 12.900763511657715, |
|
"learning_rate": 7.359639158836828e-05, |
|
"loss": 1.3117, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.1652173913043478, |
|
"grad_norm": 8.239570617675781, |
|
"learning_rate": 7.274244664177097e-05, |
|
"loss": 1.0773, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.182608695652174, |
|
"grad_norm": 6.0005364418029785, |
|
"learning_rate": 7.188005380465364e-05, |
|
"loss": 0.3932, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 8.502946853637695, |
|
"learning_rate": 7.10095334207501e-05, |
|
"loss": 0.9309, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.2173913043478262, |
|
"grad_norm": 13.114483833312988, |
|
"learning_rate": 7.013120885284598e-05, |
|
"loss": 0.6948, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.2347826086956522, |
|
"grad_norm": 12.373690605163574, |
|
"learning_rate": 6.924540636266272e-05, |
|
"loss": 1.1723, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.2521739130434781, |
|
"grad_norm": 8.012110710144043, |
|
"learning_rate": 6.835245498966461e-05, |
|
"loss": 2.3979, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.2695652173913043, |
|
"grad_norm": 7.275670528411865, |
|
"learning_rate": 6.745268642883404e-05, |
|
"loss": 2.593, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.2869565217391306, |
|
"grad_norm": 8.32419490814209, |
|
"learning_rate": 6.654643490746042e-05, |
|
"loss": 1.9259, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.3043478260869565, |
|
"grad_norm": 6.298715591430664, |
|
"learning_rate": 6.563403706098833e-05, |
|
"loss": 1.7114, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.3217391304347825, |
|
"grad_norm": 8.741583824157715, |
|
"learning_rate": 6.471583180797121e-05, |
|
"loss": 1.3124, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.3391304347826087, |
|
"grad_norm": 6.135732650756836, |
|
"learning_rate": 6.379216022417696e-05, |
|
"loss": 1.1135, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.3565217391304347, |
|
"grad_norm": 7.119026184082031, |
|
"learning_rate": 6.286336541589224e-05, |
|
"loss": 0.8562, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.373913043478261, |
|
"grad_norm": 5.54868221282959, |
|
"learning_rate": 6.192979239247243e-05, |
|
"loss": 0.9345, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.391304347826087, |
|
"grad_norm": 5.854312896728516, |
|
"learning_rate": 6.0991787938184784e-05, |
|
"loss": 0.9421, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.4086956521739131, |
|
"grad_norm": 8.708279609680176, |
|
"learning_rate": 6.004970048339226e-05, |
|
"loss": 1.6717, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.4260869565217391, |
|
"grad_norm": 14.216882705688477, |
|
"learning_rate": 5.910387997512573e-05, |
|
"loss": 1.0587, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.4434782608695653, |
|
"grad_norm": 9.43204116821289, |
|
"learning_rate": 5.8154677747093134e-05, |
|
"loss": 0.3196, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.4608695652173913, |
|
"grad_norm": 21.38327980041504, |
|
"learning_rate": 5.7202446389173223e-05, |
|
"loss": 0.6099, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.4782608695652173, |
|
"grad_norm": 12.446172714233398, |
|
"learning_rate": 5.624753961644281e-05, |
|
"loss": 0.5437, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.4956521739130435, |
|
"grad_norm": 19.80995750427246, |
|
"learning_rate": 5.5290312137786146e-05, |
|
"loss": 1.503, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.5130434782608697, |
|
"grad_norm": 5.809496879577637, |
|
"learning_rate": 5.433111952413495e-05, |
|
"loss": 2.0654, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.5304347826086957, |
|
"grad_norm": 7.104956150054932, |
|
"learning_rate": 5.33703180763884e-05, |
|
"loss": 1.8031, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.5478260869565217, |
|
"grad_norm": 8.171567916870117, |
|
"learning_rate": 5.240826469306187e-05, |
|
"loss": 1.3728, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.5652173913043477, |
|
"grad_norm": 7.269556522369385, |
|
"learning_rate": 5.144531673771363e-05, |
|
"loss": 1.2346, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.5826086956521739, |
|
"grad_norm": 5.152873992919922, |
|
"learning_rate": 5.048183190619904e-05, |
|
"loss": 0.7145, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 6.587265491485596, |
|
"learning_rate": 4.951816809380097e-05, |
|
"loss": 1.0577, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.617391304347826, |
|
"grad_norm": 7.419044017791748, |
|
"learning_rate": 4.855468326228638e-05, |
|
"loss": 1.4921, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.634782608695652, |
|
"grad_norm": 7.340956687927246, |
|
"learning_rate": 4.759173530693814e-05, |
|
"loss": 1.1331, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.6521739130434783, |
|
"grad_norm": 9.950460433959961, |
|
"learning_rate": 4.6629681923611603e-05, |
|
"loss": 1.1272, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.6695652173913045, |
|
"grad_norm": 26.5368595123291, |
|
"learning_rate": 4.566888047586507e-05, |
|
"loss": 0.6709, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.6869565217391305, |
|
"grad_norm": 7.289899826049805, |
|
"learning_rate": 4.4709687862213866e-05, |
|
"loss": 0.5705, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.7043478260869565, |
|
"grad_norm": 30.423858642578125, |
|
"learning_rate": 4.3752460383557195e-05, |
|
"loss": 1.0803, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.7217391304347827, |
|
"grad_norm": 9.387825965881348, |
|
"learning_rate": 4.27975536108268e-05, |
|
"loss": 0.5288, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.7391304347826086, |
|
"grad_norm": 15.499227523803711, |
|
"learning_rate": 4.1845322252906864e-05, |
|
"loss": 2.4856, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7391304347826086, |
|
"eval_loss": 0.4396902620792389, |
|
"eval_runtime": 9.2547, |
|
"eval_samples_per_second": 10.481, |
|
"eval_steps_per_second": 2.701, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7565217391304349, |
|
"grad_norm": 9.441740989685059, |
|
"learning_rate": 4.0896120024874286e-05, |
|
"loss": 2.5925, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.7739130434782608, |
|
"grad_norm": 6.262640953063965, |
|
"learning_rate": 3.9950299516607766e-05, |
|
"loss": 1.9076, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.7913043478260868, |
|
"grad_norm": 6.341916084289551, |
|
"learning_rate": 3.900821206181521e-05, |
|
"loss": 1.2718, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.808695652173913, |
|
"grad_norm": 6.276321887969971, |
|
"learning_rate": 3.8070207607527584e-05, |
|
"loss": 1.3664, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.8260869565217392, |
|
"grad_norm": 6.208406925201416, |
|
"learning_rate": 3.713663458410779e-05, |
|
"loss": 0.9109, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.8434782608695652, |
|
"grad_norm": 4.897834300994873, |
|
"learning_rate": 3.620783977582305e-05, |
|
"loss": 0.7668, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.8608695652173912, |
|
"grad_norm": 7.578703880310059, |
|
"learning_rate": 3.528416819202881e-05, |
|
"loss": 1.0439, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.8782608695652174, |
|
"grad_norm": 7.348963737487793, |
|
"learning_rate": 3.43659629390117e-05, |
|
"loss": 1.5878, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.8956521739130436, |
|
"grad_norm": 9.581995964050293, |
|
"learning_rate": 3.345356509253959e-05, |
|
"loss": 1.3644, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.9130434782608696, |
|
"grad_norm": 10.65773868560791, |
|
"learning_rate": 3.254731357116597e-05, |
|
"loss": 0.4552, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.9304347826086956, |
|
"grad_norm": 8.8335542678833, |
|
"learning_rate": 3.16475450103354e-05, |
|
"loss": 0.2004, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.9478260869565216, |
|
"grad_norm": 4.476780891418457, |
|
"learning_rate": 3.0754593637337276e-05, |
|
"loss": 0.3355, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.9652173913043478, |
|
"grad_norm": 6.725592613220215, |
|
"learning_rate": 2.986879114715403e-05, |
|
"loss": 0.4255, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.982608695652174, |
|
"grad_norm": 4.152976989746094, |
|
"learning_rate": 2.899046657924992e-05, |
|
"loss": 1.3199, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 8.147799491882324, |
|
"learning_rate": 2.8119946195346375e-05, |
|
"loss": 1.0268, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.017391304347826, |
|
"grad_norm": 3.997720241546631, |
|
"learning_rate": 2.7257553358229034e-05, |
|
"loss": 2.3721, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.034782608695652, |
|
"grad_norm": 4.236757755279541, |
|
"learning_rate": 2.6403608411631742e-05, |
|
"loss": 1.0922, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.0521739130434784, |
|
"grad_norm": 4.957376956939697, |
|
"learning_rate": 2.555842856124182e-05, |
|
"loss": 0.9086, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.0695652173913044, |
|
"grad_norm": 5.636981964111328, |
|
"learning_rate": 2.472232775687119e-05, |
|
"loss": 0.5955, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.0869565217391304, |
|
"grad_norm": 3.7651617527008057, |
|
"learning_rate": 2.389561657583681e-05, |
|
"loss": 0.4258, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.1043478260869564, |
|
"grad_norm": 4.104703903198242, |
|
"learning_rate": 2.30786021075939e-05, |
|
"loss": 0.3615, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.121739130434783, |
|
"grad_norm": 4.564859390258789, |
|
"learning_rate": 2.2271587839664672e-05, |
|
"loss": 0.3972, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.139130434782609, |
|
"grad_norm": 5.188112735748291, |
|
"learning_rate": 2.1474873544905205e-05, |
|
"loss": 0.5548, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.1565217391304348, |
|
"grad_norm": 5.654900550842285, |
|
"learning_rate": 2.0688755170151996e-05, |
|
"loss": 0.6482, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.1739130434782608, |
|
"grad_norm": 6.888501167297363, |
|
"learning_rate": 1.9913524726289784e-05, |
|
"loss": 0.3852, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.1913043478260867, |
|
"grad_norm": 6.729064464569092, |
|
"learning_rate": 1.914947017978153e-05, |
|
"loss": 0.282, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.208695652173913, |
|
"grad_norm": 8.329999923706055, |
|
"learning_rate": 1.8396875345700497e-05, |
|
"loss": 0.1444, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.226086956521739, |
|
"grad_norm": 9.099133491516113, |
|
"learning_rate": 1.76560197823046e-05, |
|
"loss": 0.435, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 2.243478260869565, |
|
"grad_norm": 3.2181084156036377, |
|
"learning_rate": 1.692717868719195e-05, |
|
"loss": 0.03, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.260869565217391, |
|
"grad_norm": 5.805236339569092, |
|
"learning_rate": 1.621062279507617e-05, |
|
"loss": 1.2943, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.2782608695652176, |
|
"grad_norm": 9.254953384399414, |
|
"learning_rate": 1.550661827721941e-05, |
|
"loss": 1.424, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 2.2956521739130435, |
|
"grad_norm": 7.753330230712891, |
|
"learning_rate": 1.4815426642560754e-05, |
|
"loss": 0.8185, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.3130434782608695, |
|
"grad_norm": 9.017913818359375, |
|
"learning_rate": 1.413730464057616e-05, |
|
"loss": 0.7997, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.3304347826086955, |
|
"grad_norm": 6.765071868896484, |
|
"learning_rate": 1.3472504165906613e-05, |
|
"loss": 0.4594, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.3478260869565215, |
|
"grad_norm": 4.712010860443115, |
|
"learning_rate": 1.2821272164789544e-05, |
|
"loss": 0.2742, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.365217391304348, |
|
"grad_norm": 4.8088250160217285, |
|
"learning_rate": 1.2183850543328312e-05, |
|
"loss": 0.2534, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.382608695652174, |
|
"grad_norm": 10.322266578674316, |
|
"learning_rate": 1.156047607763407e-05, |
|
"loss": 0.4052, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 15.81982421875, |
|
"learning_rate": 1.0951380325872979e-05, |
|
"loss": 0.5153, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.417391304347826, |
|
"grad_norm": 4.597114562988281, |
|
"learning_rate": 1.0356789542251938e-05, |
|
"loss": 0.1805, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.4347826086956523, |
|
"grad_norm": 10.027156829833984, |
|
"learning_rate": 9.776924592974256e-06, |
|
"loss": 0.084, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.4521739130434783, |
|
"grad_norm": 4.2314276695251465, |
|
"learning_rate": 9.212000874196953e-06, |
|
"loss": 0.1051, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.4695652173913043, |
|
"grad_norm": 6.991565704345703, |
|
"learning_rate": 8.662228232019876e-06, |
|
"loss": 0.3733, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.4869565217391303, |
|
"grad_norm": 5.351903915405273, |
|
"learning_rate": 8.127810884536403e-06, |
|
"loss": 0.0471, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.5043478260869563, |
|
"grad_norm": 5.928205966949463, |
|
"learning_rate": 7.60894734597476e-06, |
|
"loss": 2.3398, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.5217391304347827, |
|
"grad_norm": 8.773082733154297, |
|
"learning_rate": 7.105830352958142e-06, |
|
"loss": 1.3465, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.5391304347826087, |
|
"grad_norm": 5.912259578704834, |
|
"learning_rate": 6.618646792910893e-06, |
|
"loss": 0.8377, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.5565217391304347, |
|
"grad_norm": 5.538220405578613, |
|
"learning_rate": 6.147577634637414e-06, |
|
"loss": 0.4977, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.573913043478261, |
|
"grad_norm": 6.276126861572266, |
|
"learning_rate": 5.692797861099719e-06, |
|
"loss": 0.4744, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.591304347826087, |
|
"grad_norm": 5.5036396980285645, |
|
"learning_rate": 5.25447640441834e-06, |
|
"loss": 0.4011, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.608695652173913, |
|
"grad_norm": 4.149447441101074, |
|
"learning_rate": 4.832776083120982e-06, |
|
"loss": 0.22, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.608695652173913, |
|
"eval_loss": 0.5425323247909546, |
|
"eval_runtime": 9.2404, |
|
"eval_samples_per_second": 10.497, |
|
"eval_steps_per_second": 2.706, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.626086956521739, |
|
"grad_norm": 5.254273414611816, |
|
"learning_rate": 4.427853541662091e-06, |
|
"loss": 0.425, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.643478260869565, |
|
"grad_norm": 10.881125450134277, |
|
"learning_rate": 4.039859192235779e-06, |
|
"loss": 0.6446, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.660869565217391, |
|
"grad_norm": 8.573467254638672, |
|
"learning_rate": 3.668937158903901e-06, |
|
"loss": 0.1585, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.6782608695652175, |
|
"grad_norm": 8.719847679138184, |
|
"learning_rate": 3.315225224059809e-06, |
|
"loss": 0.3111, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.6956521739130435, |
|
"grad_norm": 4.025722026824951, |
|
"learning_rate": 2.9788547772478416e-06, |
|
"loss": 0.3628, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.7130434782608694, |
|
"grad_norm": 12.714775085449219, |
|
"learning_rate": 2.6599507663574384e-06, |
|
"loss": 0.2022, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.730434782608696, |
|
"grad_norm": 17.537216186523438, |
|
"learning_rate": 2.3586316512101416e-06, |
|
"loss": 0.1643, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.747826086956522, |
|
"grad_norm": 6.424210071563721, |
|
"learning_rate": 2.0750093595565733e-06, |
|
"loss": 2.195, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.765217391304348, |
|
"grad_norm": 6.1707048416137695, |
|
"learning_rate": 1.8091892454998594e-06, |
|
"loss": 0.915, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.782608695652174, |
|
"grad_norm": 9.390337944030762, |
|
"learning_rate": 1.5612700503608968e-06, |
|
"loss": 0.8495, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 4.434484958648682, |
|
"learning_rate": 1.33134386599994e-06, |
|
"loss": 0.3239, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.8173913043478263, |
|
"grad_norm": 6.292799472808838, |
|
"learning_rate": 1.1194961006082972e-06, |
|
"loss": 0.5492, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.8347826086956522, |
|
"grad_norm": 4.593890190124512, |
|
"learning_rate": 9.258054469825972e-07, |
|
"loss": 0.3086, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.8521739130434782, |
|
"grad_norm": 5.500854969024658, |
|
"learning_rate": 7.503438532937168e-07, |
|
"loss": 0.2752, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.869565217391304, |
|
"grad_norm": 9.617355346679688, |
|
"learning_rate": 5.931764963608866e-07, |
|
"loss": 0.3153, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.8869565217391306, |
|
"grad_norm": 7.330923080444336, |
|
"learning_rate": 4.543617574412184e-07, |
|
"loss": 0.6061, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.9043478260869566, |
|
"grad_norm": 4.938751220703125, |
|
"learning_rate": 3.339512005434309e-07, |
|
"loss": 0.2001, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.9217391304347826, |
|
"grad_norm": 9.237695693969727, |
|
"learning_rate": 2.319895532739369e-07, |
|
"loss": 0.1176, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.9391304347826086, |
|
"grad_norm": 13.457283973693848, |
|
"learning_rate": 1.4851469022234e-07, |
|
"loss": 0.2774, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.9565217391304346, |
|
"grad_norm": 2.380657911300659, |
|
"learning_rate": 8.355761889260461e-08, |
|
"loss": 0.042, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.973913043478261, |
|
"grad_norm": 12.363837242126465, |
|
"learning_rate": 3.7142468185014104e-08, |
|
"loss": 0.1025, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.991304347826087, |
|
"grad_norm": 5.269643783569336, |
|
"learning_rate": 9.286479433257e-09, |
|
"loss": 1.1058, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 3.008695652173913, |
|
"grad_norm": 4.399047374725342, |
|
"learning_rate": 0.0, |
|
"loss": 1.715, |
|
"step": 173 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 173, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 1 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.5064824501174272e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|