|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0219132811959306, |
|
"eval_steps": 100000000, |
|
"global_step": 126000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 10.7586, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5e-05, |
|
"loss": 6.5292, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.99998214600225e-05, |
|
"loss": 4.7262, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9999285842640105e-05, |
|
"loss": 4.1997, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999839315550315e-05, |
|
"loss": 3.8892, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9997143411362066e-05, |
|
"loss": 3.6618, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9995536628067196e-05, |
|
"loss": 3.5132, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9993572828568536e-05, |
|
"loss": 3.4092, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.999125204091544e-05, |
|
"loss": 3.3295, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998857429825615e-05, |
|
"loss": 3.2596, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9985539638837424e-05, |
|
"loss": 3.2068, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.998214810600389e-05, |
|
"loss": 3.163, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.997839974819749e-05, |
|
"loss": 3.1256, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.997429461895675e-05, |
|
"loss": 3.0844, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.996983277691606e-05, |
|
"loss": 3.0541, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.996501428580478e-05, |
|
"loss": 3.0293, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.995983921444638e-05, |
|
"loss": 2.999, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9954307636757434e-05, |
|
"loss": 2.9744, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.994841963174656e-05, |
|
"loss": 2.9525, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9942175283513294e-05, |
|
"loss": 2.9321, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.993557468124691e-05, |
|
"loss": 2.9237, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9928617919225106e-05, |
|
"loss": 2.9021, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.99213050968127e-05, |
|
"loss": 2.8766, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.991363631846019e-05, |
|
"loss": 2.8691, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.990561169370224e-05, |
|
"loss": 2.8531, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.989723133715618e-05, |
|
"loss": 2.8366, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.988849536852028e-05, |
|
"loss": 2.8265, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.987940391257212e-05, |
|
"loss": 2.8099, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.986995709916677e-05, |
|
"loss": 2.8009, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9860155063234944e-05, |
|
"loss": 2.7881, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9849997944781055e-05, |
|
"loss": 2.7794, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9839485888881235e-05, |
|
"loss": 2.7674, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.982861904568127e-05, |
|
"loss": 2.7559, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.981739757039443e-05, |
|
"loss": 2.7455, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9805821623299285e-05, |
|
"loss": 2.7351, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.979389136973737e-05, |
|
"loss": 2.7317, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.978160698011085e-05, |
|
"loss": 2.7192, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.976896862988012e-05, |
|
"loss": 2.709, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.975597649956122e-05, |
|
"loss": 2.7043, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.974263077472334e-05, |
|
"loss": 2.6954, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.97289316459861e-05, |
|
"loss": 2.6905, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9714879309016874e-05, |
|
"loss": 2.6782, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.970047396452798e-05, |
|
"loss": 2.6751, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.968571581827381e-05, |
|
"loss": 2.6659, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9670605081047886e-05, |
|
"loss": 2.6594, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9655141968679866e-05, |
|
"loss": 2.6553, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.963932670203245e-05, |
|
"loss": 2.6499, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.962315950699822e-05, |
|
"loss": 2.6391, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.960664061449643e-05, |
|
"loss": 2.6313, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.958977026046971e-05, |
|
"loss": 2.6318, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.957254868588065e-05, |
|
"loss": 2.6275, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.955497613670842e-05, |
|
"loss": 2.6192, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.953705286394522e-05, |
|
"loss": 2.6153, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9518779123592705e-05, |
|
"loss": 2.6102, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9500155176658345e-05, |
|
"loss": 2.601, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9481181289151655e-05, |
|
"loss": 2.5991, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9461857732080436e-05, |
|
"loss": 2.5946, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9442184781446876e-05, |
|
"loss": 2.5911, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9422162718243626e-05, |
|
"loss": 2.5827, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.94017918284498e-05, |
|
"loss": 2.5785, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.938107240302683e-05, |
|
"loss": 2.5757, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.93600047379144e-05, |
|
"loss": 2.5705, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9338589134026124e-05, |
|
"loss": 2.5703, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.931682589724534e-05, |
|
"loss": 2.5593, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.929471533842065e-05, |
|
"loss": 2.5647, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9272257773361574e-05, |
|
"loss": 2.5564, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.924945352283394e-05, |
|
"loss": 2.5492, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.922630291255539e-05, |
|
"loss": 2.5509, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.920280627319067e-05, |
|
"loss": 2.5466, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.917896394034696e-05, |
|
"loss": 2.5412, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.915477625456899e-05, |
|
"loss": 2.5383, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.91302435613343e-05, |
|
"loss": 2.5364, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.91053662110482e-05, |
|
"loss": 2.5292, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.908014455903881e-05, |
|
"loss": 2.5304, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.905457896555199e-05, |
|
"loss": 2.5257, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.902866979574618e-05, |
|
"loss": 2.5189, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9002417419687183e-05, |
|
"loss": 2.522, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.897582221234289e-05, |
|
"loss": 2.5141, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.8948884553577926e-05, |
|
"loss": 2.517, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.8921604828148206e-05, |
|
"loss": 2.5077, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8893983425695445e-05, |
|
"loss": 2.5068, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8866020740741626e-05, |
|
"loss": 2.5048, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.883771717268331e-05, |
|
"loss": 2.4972, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8809073125785964e-05, |
|
"loss": 2.4999, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8780089009178206e-05, |
|
"loss": 2.4941, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8750765236845894e-05, |
|
"loss": 2.4891, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8721102227626304e-05, |
|
"loss": 2.4876, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.869110040520206e-05, |
|
"loss": 2.4859, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.8660760198095145e-05, |
|
"loss": 2.4833, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.8630082039660744e-05, |
|
"loss": 2.4825, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.859906636808108e-05, |
|
"loss": 2.4801, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8567713626359134e-05, |
|
"loss": 2.4763, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.853602426231234e-05, |
|
"loss": 2.4742, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.850399872856615e-05, |
|
"loss": 2.4702, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.847163748254762e-05, |
|
"loss": 2.4679, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.843894098647884e-05, |
|
"loss": 2.4676, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.840590970737035e-05, |
|
"loss": 2.4679, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8372544117014436e-05, |
|
"loss": 2.4644, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8338844691978456e-05, |
|
"loss": 2.4622, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.830481191359797e-05, |
|
"loss": 2.4605, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.82704462679699e-05, |
|
"loss": 2.4552, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.823574824594558e-05, |
|
"loss": 2.4501, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.820071834312372e-05, |
|
"loss": 2.4563, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.816535705984337e-05, |
|
"loss": 2.4467, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.812966490117676e-05, |
|
"loss": 2.4434, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.8093642376922057e-05, |
|
"loss": 2.4422, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.8057290001596106e-05, |
|
"loss": 2.4462, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.802060829442711e-05, |
|
"loss": 2.4424, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.798359777934714e-05, |
|
"loss": 2.4373, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.7946258984984735e-05, |
|
"loss": 2.4381, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.7908592444657287e-05, |
|
"loss": 2.4334, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.787059869636346e-05, |
|
"loss": 2.4328, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.783227828277549e-05, |
|
"loss": 2.4284, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.779363175123144e-05, |
|
"loss": 2.4278, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.7754659653727384e-05, |
|
"loss": 2.4231, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.771536254690951e-05, |
|
"loss": 2.4212, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.767574099206619e-05, |
|
"loss": 2.4247, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.763579555511993e-05, |
|
"loss": 2.4209, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.759552680661933e-05, |
|
"loss": 2.4224, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.755493532173092e-05, |
|
"loss": 2.4204, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.75140216802309e-05, |
|
"loss": 2.4201, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7472786466496944e-05, |
|
"loss": 2.4127, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7431230269499764e-05, |
|
"loss": 2.4146, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7389353682794765e-05, |
|
"loss": 2.4074, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7347157304513544e-05, |
|
"loss": 2.411, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7304641737355326e-05, |
|
"loss": 2.411, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7261807588578375e-05, |
|
"loss": 2.4068, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.45778831291669553, |
|
"grad_norm": 0.4898933470249176, |
|
"learning_rate": 4.721865546999135e-05, |
|
"loss": 2.3865, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.4613929453018663, |
|
"grad_norm": 0.5031083226203918, |
|
"learning_rate": 4.7175185997944485e-05, |
|
"loss": 2.3853, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.46499757768703714, |
|
"grad_norm": 0.5099691152572632, |
|
"learning_rate": 4.713139979332089e-05, |
|
"loss": 2.3843, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.468602210072208, |
|
"grad_norm": 0.528971791267395, |
|
"learning_rate": 4.708729748152759e-05, |
|
"loss": 2.3867, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.4722068424573788, |
|
"grad_norm": 0.5447410941123962, |
|
"learning_rate": 4.7042879692486655e-05, |
|
"loss": 2.3871, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.47581147484254965, |
|
"grad_norm": 0.5187677145004272, |
|
"learning_rate": 4.699814706062616e-05, |
|
"loss": 2.3821, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.4794161072277205, |
|
"grad_norm": 0.5079197883605957, |
|
"learning_rate": 4.6953100224871163e-05, |
|
"loss": 2.3846, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.4830207396128913, |
|
"grad_norm": 0.5204797387123108, |
|
"learning_rate": 4.6907739828634536e-05, |
|
"loss": 2.3867, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.48662537199806216, |
|
"grad_norm": 0.51589435338974, |
|
"learning_rate": 4.6862066519807816e-05, |
|
"loss": 2.3792, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.490230004383233, |
|
"grad_norm": 0.5342502593994141, |
|
"learning_rate": 4.681608095075192e-05, |
|
"loss": 2.3767, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.49383463676840383, |
|
"grad_norm": 0.5265562534332275, |
|
"learning_rate": 4.676978377828785e-05, |
|
"loss": 2.3839, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.4974392691535746, |
|
"grad_norm": 0.5328825116157532, |
|
"learning_rate": 4.67231756636873e-05, |
|
"loss": 2.3813, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.5010439015387455, |
|
"grad_norm": 0.4992935061454773, |
|
"learning_rate": 4.6676257272663194e-05, |
|
"loss": 2.377, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.5046485339239163, |
|
"grad_norm": 0.5137242674827576, |
|
"learning_rate": 4.662902927536022e-05, |
|
"loss": 2.3798, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.5082531663090871, |
|
"grad_norm": 0.5188072323799133, |
|
"learning_rate": 4.658149234634523e-05, |
|
"loss": 2.3783, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.511857798694258, |
|
"grad_norm": 0.5286343693733215, |
|
"learning_rate": 4.65336471645976e-05, |
|
"loss": 2.3719, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.5154624310794288, |
|
"grad_norm": 0.5098246335983276, |
|
"learning_rate": 4.648549441349953e-05, |
|
"loss": 2.3712, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.5190670634645996, |
|
"grad_norm": 0.528311014175415, |
|
"learning_rate": 4.643703478082632e-05, |
|
"loss": 2.37, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.5226716958497705, |
|
"grad_norm": 0.5106325745582581, |
|
"learning_rate": 4.638826895873651e-05, |
|
"loss": 2.3717, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.5262763282349413, |
|
"grad_norm": 0.5130262970924377, |
|
"learning_rate": 4.6339197643762e-05, |
|
"loss": 2.3669, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.5298809606201121, |
|
"grad_norm": 0.5350182056427002, |
|
"learning_rate": 4.628982153679811e-05, |
|
"loss": 2.3675, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.533485593005283, |
|
"grad_norm": 0.5258482098579407, |
|
"learning_rate": 4.6240141343093545e-05, |
|
"loss": 2.3673, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.5370902253904538, |
|
"grad_norm": 0.5219452977180481, |
|
"learning_rate": 4.6190157772240385e-05, |
|
"loss": 2.3644, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.5406948577756246, |
|
"grad_norm": 0.531566858291626, |
|
"learning_rate": 4.6139871538163856e-05, |
|
"loss": 2.366, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.5442994901607955, |
|
"grad_norm": 0.5164352655410767, |
|
"learning_rate": 4.608928335911222e-05, |
|
"loss": 2.3651, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.5479041225459663, |
|
"grad_norm": 0.558618426322937, |
|
"learning_rate": 4.603839395764646e-05, |
|
"loss": 2.3618, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.5515087549311372, |
|
"grad_norm": 0.5367295145988464, |
|
"learning_rate": 4.598720406062999e-05, |
|
"loss": 2.3581, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.5551133873163079, |
|
"grad_norm": 0.5240716934204102, |
|
"learning_rate": 4.593571439921825e-05, |
|
"loss": 2.3609, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.5587180197014787, |
|
"grad_norm": 0.5218440890312195, |
|
"learning_rate": 4.5883925708848275e-05, |
|
"loss": 2.3583, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.5623226520866496, |
|
"grad_norm": 0.5101985335350037, |
|
"learning_rate": 4.58318387292282e-05, |
|
"loss": 2.3587, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.636668195030798, |
|
"grad_norm": 0.527410626411438, |
|
"learning_rate": 4.469806659761896e-05, |
|
"loss": 2.3498, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.6407234064641152, |
|
"grad_norm": 0.4921613335609436, |
|
"learning_rate": 4.4632426356679503e-05, |
|
"loss": 2.3469, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.6447786178974323, |
|
"grad_norm": 0.4988476037979126, |
|
"learning_rate": 4.456643111161236e-05, |
|
"loss": 2.348, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.6488338293307495, |
|
"grad_norm": 0.48752740025520325, |
|
"learning_rate": 4.450008205577918e-05, |
|
"loss": 2.3469, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.6528890407640667, |
|
"grad_norm": 0.5155342817306519, |
|
"learning_rate": 4.443338038893938e-05, |
|
"loss": 2.346, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.656944252197384, |
|
"grad_norm": 0.4966714680194855, |
|
"learning_rate": 4.4366327317228506e-05, |
|
"loss": 2.346, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.6609994636307011, |
|
"grad_norm": 0.49693840742111206, |
|
"learning_rate": 4.429892405313638e-05, |
|
"loss": 2.3406, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.6650546750640183, |
|
"grad_norm": 0.4809018671512604, |
|
"learning_rate": 4.42311718154852e-05, |
|
"loss": 2.3429, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.6691098864973355, |
|
"grad_norm": 0.5525850653648376, |
|
"learning_rate": 4.416307182940748e-05, |
|
"loss": 2.3424, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.6731650979306526, |
|
"grad_norm": 0.5057451128959656, |
|
"learning_rate": 4.409462532632391e-05, |
|
"loss": 2.3388, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.6772203093639698, |
|
"grad_norm": 0.5083210468292236, |
|
"learning_rate": 4.402583354392112e-05, |
|
"loss": 2.3395, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.681275520797287, |
|
"grad_norm": 0.49632710218429565, |
|
"learning_rate": 4.3956697726129223e-05, |
|
"loss": 2.3335, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.6853307322306041, |
|
"grad_norm": 0.5196436047554016, |
|
"learning_rate": 4.388721912309939e-05, |
|
"loss": 2.3361, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.6893859436639214, |
|
"grad_norm": 0.49743419885635376, |
|
"learning_rate": 4.381739899118121e-05, |
|
"loss": 2.3352, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.6934411550972386, |
|
"grad_norm": 0.4990304112434387, |
|
"learning_rate": 4.3747238592899994e-05, |
|
"loss": 2.3314, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.6974963665305558, |
|
"grad_norm": 0.4926765263080597, |
|
"learning_rate": 4.367673919693392e-05, |
|
"loss": 2.3346, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.7015515779638729, |
|
"grad_norm": 0.520667314529419, |
|
"learning_rate": 4.3605902078091106e-05, |
|
"loss": 2.3334, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.7056067893971901, |
|
"grad_norm": 0.5267304182052612, |
|
"learning_rate": 4.353472851728656e-05, |
|
"loss": 2.3295, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.7096620008305073, |
|
"grad_norm": 0.5232897996902466, |
|
"learning_rate": 4.346321980151903e-05, |
|
"loss": 2.3295, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.7137172122638245, |
|
"grad_norm": 0.5169503092765808, |
|
"learning_rate": 4.3391377223847684e-05, |
|
"loss": 2.325, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.7177724236971417, |
|
"grad_norm": 0.5122842192649841, |
|
"learning_rate": 4.331920208336879e-05, |
|
"loss": 2.3294, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.7218276351304589, |
|
"grad_norm": 0.5060274600982666, |
|
"learning_rate": 4.3246695685192185e-05, |
|
"loss": 2.3226, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.725882846563776, |
|
"grad_norm": 0.5004491806030273, |
|
"learning_rate": 4.317385934041769e-05, |
|
"loss": 2.3262, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.7299380579970932, |
|
"grad_norm": 0.5048273801803589, |
|
"learning_rate": 4.310069436611139e-05, |
|
"loss": 2.3279, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.7339932694304104, |
|
"grad_norm": 0.5081968307495117, |
|
"learning_rate": 4.3027202085281844e-05, |
|
"loss": 2.3263, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 0.7380484808637275, |
|
"grad_norm": 0.5030719041824341, |
|
"learning_rate": 4.295338382685612e-05, |
|
"loss": 2.3238, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.7421036922970448, |
|
"grad_norm": 0.5075569748878479, |
|
"learning_rate": 4.28792409256558e-05, |
|
"loss": 2.324, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 0.746158903730362, |
|
"grad_norm": 0.5366798639297485, |
|
"learning_rate": 4.2804774722372855e-05, |
|
"loss": 2.3163, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.7502141151636792, |
|
"grad_norm": 0.49396347999572754, |
|
"learning_rate": 4.272998656354533e-05, |
|
"loss": 2.3187, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 0.7542693265969963, |
|
"grad_norm": 0.5059054493904114, |
|
"learning_rate": 4.265487780153309e-05, |
|
"loss": 2.3147, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.7583245380303135, |
|
"grad_norm": 0.5356733202934265, |
|
"learning_rate": 4.257944979449329e-05, |
|
"loss": 2.3163, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 0.7623797494636307, |
|
"grad_norm": 0.5293506979942322, |
|
"learning_rate": 4.250370390635586e-05, |
|
"loss": 2.3121, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.7664349608969478, |
|
"grad_norm": 0.5404239296913147, |
|
"learning_rate": 4.242764150679883e-05, |
|
"loss": 2.3174, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 0.7704901723302651, |
|
"grad_norm": 0.5234089493751526, |
|
"learning_rate": 4.235126397122356e-05, |
|
"loss": 2.3179, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.7745453837635823, |
|
"grad_norm": 0.5130496025085449, |
|
"learning_rate": 4.227457268072984e-05, |
|
"loss": 2.3156, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 0.7786005951968994, |
|
"grad_norm": 0.50419020652771, |
|
"learning_rate": 4.2197569022090984e-05, |
|
"loss": 2.3128, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.7826558066302166, |
|
"grad_norm": 0.515084981918335, |
|
"learning_rate": 4.21202543877287e-05, |
|
"loss": 2.3113, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 0.7867110180635338, |
|
"grad_norm": 0.516667366027832, |
|
"learning_rate": 4.204263017568793e-05, |
|
"loss": 2.3111, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.790766229496851, |
|
"grad_norm": 0.5135312676429749, |
|
"learning_rate": 4.196469778961157e-05, |
|
"loss": 2.3105, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 0.7948214409301682, |
|
"grad_norm": 0.5299580097198486, |
|
"learning_rate": 4.1886458638715065e-05, |
|
"loss": 2.3097, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 0.7988766523634854, |
|
"grad_norm": 0.5144892334938049, |
|
"learning_rate": 4.180791413776097e-05, |
|
"loss": 2.3048, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 0.8029318637968026, |
|
"grad_norm": 0.49354636669158936, |
|
"learning_rate": 4.172906570703334e-05, |
|
"loss": 2.3086, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 0.8069870752301197, |
|
"grad_norm": 0.5165512561798096, |
|
"learning_rate": 4.164991477231205e-05, |
|
"loss": 2.3016, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 0.8110422866634369, |
|
"grad_norm": 0.5210604071617126, |
|
"learning_rate": 4.1570462764847015e-05, |
|
"loss": 2.3061, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 0.8150974980967541, |
|
"grad_norm": 0.5409877896308899, |
|
"learning_rate": 4.1490711121332295e-05, |
|
"loss": 2.3047, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 0.8191527095300712, |
|
"grad_norm": 0.5180912613868713, |
|
"learning_rate": 4.141066128388015e-05, |
|
"loss": 2.3022, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 0.8232079209633885, |
|
"grad_norm": 0.50931715965271, |
|
"learning_rate": 4.1330314699994944e-05, |
|
"loss": 2.3028, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 0.8272631323967057, |
|
"grad_norm": 0.5214248895645142, |
|
"learning_rate": 4.1249672822546955e-05, |
|
"loss": 2.3008, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 0.8313183438300228, |
|
"grad_norm": 0.5405526757240295, |
|
"learning_rate": 4.116873710974615e-05, |
|
"loss": 2.3023, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 0.83537355526334, |
|
"grad_norm": 0.5330822467803955, |
|
"learning_rate": 4.1087509025115734e-05, |
|
"loss": 2.3051, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 0.8394287666966572, |
|
"grad_norm": 0.5409587621688843, |
|
"learning_rate": 4.1005990037465785e-05, |
|
"loss": 2.302, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 0.8434839781299744, |
|
"grad_norm": 0.5315828919410706, |
|
"learning_rate": 4.0924181620866624e-05, |
|
"loss": 2.2981, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 0.8475391895632916, |
|
"grad_norm": 0.5308647751808167, |
|
"learning_rate": 4.084208525462219e-05, |
|
"loss": 2.2938, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 0.8515944009966088, |
|
"grad_norm": 0.5263886451721191, |
|
"learning_rate": 4.0759702423243276e-05, |
|
"loss": 2.295, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 0.855649612429926, |
|
"grad_norm": 0.5318562388420105, |
|
"learning_rate": 4.067703461642069e-05, |
|
"loss": 2.2964, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 0.8597048238632431, |
|
"grad_norm": 0.5380021333694458, |
|
"learning_rate": 4.0594083328998335e-05, |
|
"loss": 2.2895, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 0.8637600352965603, |
|
"grad_norm": 0.537627637386322, |
|
"learning_rate": 4.0510850060946143e-05, |
|
"loss": 2.2922, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 0.8678152467298775, |
|
"grad_norm": 0.5394497513771057, |
|
"learning_rate": 4.042733631733299e-05, |
|
"loss": 2.2961, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 0.8718704581631946, |
|
"grad_norm": 0.5409214496612549, |
|
"learning_rate": 4.034354360829946e-05, |
|
"loss": 2.294, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 0.8759256695965119, |
|
"grad_norm": 0.5413061380386353, |
|
"learning_rate": 4.025947344903052e-05, |
|
"loss": 2.2911, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 0.8799808810298291, |
|
"grad_norm": 0.5403317809104919, |
|
"learning_rate": 4.017512735972816e-05, |
|
"loss": 2.2906, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 0.8840360924631462, |
|
"grad_norm": 0.5209652185440063, |
|
"learning_rate": 4.009050686558389e-05, |
|
"loss": 2.2883, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 0.8880913038964634, |
|
"grad_norm": 0.5193003416061401, |
|
"learning_rate": 4.000561349675115e-05, |
|
"loss": 2.2903, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 0.8921465153297806, |
|
"grad_norm": 0.5441010594367981, |
|
"learning_rate": 3.992044878831764e-05, |
|
"loss": 2.2841, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 0.8962017267630978, |
|
"grad_norm": 0.536781907081604, |
|
"learning_rate": 3.9835014280277584e-05, |
|
"loss": 2.2843, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 0.900256938196415, |
|
"grad_norm": 0.5598934888839722, |
|
"learning_rate": 3.974931151750385e-05, |
|
"loss": 2.293, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 0.9043121496297322, |
|
"grad_norm": 0.5360564589500427, |
|
"learning_rate": 3.9663342049720046e-05, |
|
"loss": 2.2845, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 0.9083673610630494, |
|
"grad_norm": 0.5301195383071899, |
|
"learning_rate": 3.957710743147248e-05, |
|
"loss": 2.2888, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 0.9124225724963665, |
|
"grad_norm": 0.536584198474884, |
|
"learning_rate": 3.9490609222102056e-05, |
|
"loss": 2.2867, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 0.9164777839296837, |
|
"grad_norm": 0.5443806648254395, |
|
"learning_rate": 3.940384898571608e-05, |
|
"loss": 2.281, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 0.9205329953630009, |
|
"grad_norm": 0.5341424942016602, |
|
"learning_rate": 3.9316828291159945e-05, |
|
"loss": 2.2862, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 0.924588206796318, |
|
"grad_norm": 0.5316222906112671, |
|
"learning_rate": 3.922954871198883e-05, |
|
"loss": 2.2854, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 0.9286434182296353, |
|
"grad_norm": 0.5432649850845337, |
|
"learning_rate": 3.914201182643917e-05, |
|
"loss": 2.2822, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 0.9326986296629525, |
|
"grad_norm": 0.5397413969039917, |
|
"learning_rate": 3.9054219217400176e-05, |
|
"loss": 2.2812, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 0.9367538410962697, |
|
"grad_norm": 0.5237780809402466, |
|
"learning_rate": 3.8966172472385175e-05, |
|
"loss": 2.2797, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 0.9408090525295868, |
|
"grad_norm": 0.5564081072807312, |
|
"learning_rate": 3.8877873183502934e-05, |
|
"loss": 2.2812, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 0.944864263962904, |
|
"grad_norm": 0.5437479615211487, |
|
"learning_rate": 3.878932294742883e-05, |
|
"loss": 2.2806, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 0.9489194753962212, |
|
"grad_norm": 0.5499303340911865, |
|
"learning_rate": 3.870052336537602e-05, |
|
"loss": 2.2763, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 0.9529746868295383, |
|
"grad_norm": 0.5608726143836975, |
|
"learning_rate": 3.861147604306647e-05, |
|
"loss": 2.2767, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 0.9570298982628556, |
|
"grad_norm": 0.5344070196151733, |
|
"learning_rate": 3.8522182590701905e-05, |
|
"loss": 2.2747, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 0.9610851096961728, |
|
"grad_norm": 0.5565013289451599, |
|
"learning_rate": 3.843264462293473e-05, |
|
"loss": 2.2789, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 0.9651403211294899, |
|
"grad_norm": 0.5644780993461609, |
|
"learning_rate": 3.83428637588388e-05, |
|
"loss": 2.2784, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 0.9691955325628071, |
|
"grad_norm": 0.5427052974700928, |
|
"learning_rate": 3.825284162188012e-05, |
|
"loss": 2.2732, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 0.9732507439961243, |
|
"grad_norm": 0.546487033367157, |
|
"learning_rate": 3.8162579839887576e-05, |
|
"loss": 2.2758, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 0.9773059554294414, |
|
"grad_norm": 0.5496521592140198, |
|
"learning_rate": 3.807208004502341e-05, |
|
"loss": 2.2732, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 0.9813611668627586, |
|
"grad_norm": 0.5730307102203369, |
|
"learning_rate": 3.798134387375374e-05, |
|
"loss": 2.2674, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 0.9854163782960759, |
|
"grad_norm": 0.5476057529449463, |
|
"learning_rate": 3.7890372966819e-05, |
|
"loss": 2.2699, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 0.9894715897293931, |
|
"grad_norm": 0.5156444907188416, |
|
"learning_rate": 3.7799168969204204e-05, |
|
"loss": 2.2692, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 0.9935268011627102, |
|
"grad_norm": 0.5473557710647583, |
|
"learning_rate": 3.770773353010924e-05, |
|
"loss": 2.2701, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 0.9975820125960274, |
|
"grad_norm": 0.5534400343894958, |
|
"learning_rate": 3.761606830291911e-05, |
|
"loss": 2.2712, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.0016372240293445, |
|
"grad_norm": 0.5497949123382568, |
|
"learning_rate": 3.752417494517386e-05, |
|
"loss": 2.2592, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.0056924354626617, |
|
"grad_norm": 0.5588605403900146, |
|
"learning_rate": 3.7432055118538806e-05, |
|
"loss": 2.2494, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.009747646895979, |
|
"grad_norm": 0.5374738574028015, |
|
"learning_rate": 3.733971048877435e-05, |
|
"loss": 2.2487, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.0138028583292962, |
|
"grad_norm": 0.5497466325759888, |
|
"learning_rate": 3.724714272570593e-05, |
|
"loss": 2.2461, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.0178580697626134, |
|
"grad_norm": 0.5669336915016174, |
|
"learning_rate": 3.715435350319379e-05, |
|
"loss": 2.2415, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.0219132811959306, |
|
"grad_norm": 0.5785222053527832, |
|
"learning_rate": 3.706134449910271e-05, |
|
"loss": 2.2502, |
|
"step": 126000 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 369894, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 3000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.834342663980319e+19, |
|
"train_batch_size": 12, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|