|
{ |
|
"best_metric": 2.340223550796509, |
|
"best_model_checkpoint": "results/models/prophetnet-large-uncased-cnndm-NewsRoom/checkpoint-97402", |
|
"epoch": 2.0, |
|
"global_step": 97402, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9948871686413012e-05, |
|
"loss": 3.115, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9897640705529663e-05, |
|
"loss": 2.9868, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9846307057349954e-05, |
|
"loss": 2.9327, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9794973409170245e-05, |
|
"loss": 2.9211, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9743639760990536e-05, |
|
"loss": 2.9204, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9692306112810827e-05, |
|
"loss": 2.8705, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9640972464631118e-05, |
|
"loss": 2.8643, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.958974148374777e-05, |
|
"loss": 2.8546, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.953840783556806e-05, |
|
"loss": 2.8215, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.948707418738835e-05, |
|
"loss": 2.8266, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9435740539208642e-05, |
|
"loss": 2.7956, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9384509558325293e-05, |
|
"loss": 2.807, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9333175910145584e-05, |
|
"loss": 2.8149, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9281842261965875e-05, |
|
"loss": 2.8123, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9230508613786166e-05, |
|
"loss": 2.7988, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9179174965606457e-05, |
|
"loss": 2.7968, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9128046652019467e-05, |
|
"loss": 2.8055, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9076815671136118e-05, |
|
"loss": 2.7762, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.902548202295641e-05, |
|
"loss": 2.7965, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.89741483747767e-05, |
|
"loss": 2.7588, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.892281472659699e-05, |
|
"loss": 2.7493, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8871481078417282e-05, |
|
"loss": 2.741, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8820147430237573e-05, |
|
"loss": 2.7593, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8768813782057864e-05, |
|
"loss": 2.7392, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.8717685468470875e-05, |
|
"loss": 2.7177, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.8666351820291166e-05, |
|
"loss": 2.726, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.8615018172111457e-05, |
|
"loss": 2.7165, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.8563684523931748e-05, |
|
"loss": 2.7333, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.851235087575204e-05, |
|
"loss": 2.725, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.846101722757233e-05, |
|
"loss": 2.728, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.840968357939262e-05, |
|
"loss": 2.7335, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8358349931212912e-05, |
|
"loss": 2.7167, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8307016283033203e-05, |
|
"loss": 2.7273, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8255682634853494e-05, |
|
"loss": 2.6938, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8204348986673785e-05, |
|
"loss": 2.7068, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8153015338494076e-05, |
|
"loss": 2.7108, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8101681690314367e-05, |
|
"loss": 2.6894, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.805045070943102e-05, |
|
"loss": 2.7194, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.799911706125131e-05, |
|
"loss": 2.6978, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.7947783413071603e-05, |
|
"loss": 2.6842, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.789644976489189e-05, |
|
"loss": 2.7017, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7845116116712185e-05, |
|
"loss": 2.6924, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7793782468532473e-05, |
|
"loss": 2.7021, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7742551487649127e-05, |
|
"loss": 2.6904, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7691217839469418e-05, |
|
"loss": 2.6983, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7639986858586066e-05, |
|
"loss": 2.7012, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.758865321040636e-05, |
|
"loss": 2.6839, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7537319562226648e-05, |
|
"loss": 2.6706, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7486088581343302e-05, |
|
"loss": 2.6806, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7434857600459953e-05, |
|
"loss": 2.6741, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.738352395228024e-05, |
|
"loss": 2.6666, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7332190304100535e-05, |
|
"loss": 2.6669, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.7280856655920822e-05, |
|
"loss": 2.6774, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.7229523007741117e-05, |
|
"loss": 2.6755, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.7178189359561404e-05, |
|
"loss": 2.6688, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.71268557113817e-05, |
|
"loss": 2.6464, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7075522063201986e-05, |
|
"loss": 2.6501, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.702418841502228e-05, |
|
"loss": 2.6459, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6972854766842572e-05, |
|
"loss": 2.6579, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.6921521118662863e-05, |
|
"loss": 2.6277, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.6870187470483154e-05, |
|
"loss": 2.6606, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.6818853822303445e-05, |
|
"loss": 2.6642, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.6767520174123736e-05, |
|
"loss": 2.6157, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.6716186525944027e-05, |
|
"loss": 2.6221, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6664852877764318e-05, |
|
"loss": 2.6417, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.661351922958461e-05, |
|
"loss": 2.647, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.656228824870126e-05, |
|
"loss": 2.6216, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.651095460052155e-05, |
|
"loss": 2.6208, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.6459620952341842e-05, |
|
"loss": 2.6175, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.6408287304162133e-05, |
|
"loss": 2.621, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.6357056323278784e-05, |
|
"loss": 2.6139, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.6305722675099075e-05, |
|
"loss": 2.6424, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.6254389026919366e-05, |
|
"loss": 2.6405, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6203055378739657e-05, |
|
"loss": 2.5977, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.6151721730559948e-05, |
|
"loss": 2.6184, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.6100388082380242e-05, |
|
"loss": 2.6256, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.604905443420053e-05, |
|
"loss": 2.6289, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.5997720786020824e-05, |
|
"loss": 2.6286, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.594659247243383e-05, |
|
"loss": 2.5953, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5895258824254122e-05, |
|
"loss": 2.6369, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5843925176074413e-05, |
|
"loss": 2.6003, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.5792591527894704e-05, |
|
"loss": 2.6032, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.5741257879715e-05, |
|
"loss": 2.6312, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.5689924231535287e-05, |
|
"loss": 2.6386, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.563859058335558e-05, |
|
"loss": 2.6112, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.558725693517587e-05, |
|
"loss": 2.6156, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5535923286996163e-05, |
|
"loss": 2.5918, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.548458963881645e-05, |
|
"loss": 2.6063, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.5433358657933105e-05, |
|
"loss": 2.6263, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.5382025009753392e-05, |
|
"loss": 2.6083, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.5330691361573687e-05, |
|
"loss": 2.602, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5279357713393978e-05, |
|
"loss": 2.5989, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5228024065214269e-05, |
|
"loss": 2.5898, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5176690417034558e-05, |
|
"loss": 2.6033, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5125356768854851e-05, |
|
"loss": 2.5861, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.50741257879715e-05, |
|
"loss": 2.5959, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5022792139791791e-05, |
|
"loss": 2.6006, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_gen_len": 13.17, |
|
"eval_loss": 2.431262493133545, |
|
"eval_rouge1": 36.6311, |
|
"eval_rouge2": 19.2397, |
|
"eval_rougeL": 34.5043, |
|
"eval_rougeLsum": 34.9589, |
|
"eval_runtime": 12.9254, |
|
"eval_samples_per_second": 7.737, |
|
"eval_steps_per_second": 0.542, |
|
"step": 48701 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.4971458491612084e-05, |
|
"loss": 2.4662, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.4920124843432375e-05, |
|
"loss": 2.3465, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.4868791195252666e-05, |
|
"loss": 2.3791, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.4817457547072957e-05, |
|
"loss": 2.3492, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.4766123898893248e-05, |
|
"loss": 2.3735, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.471479025071354e-05, |
|
"loss": 2.3669, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.466345660253383e-05, |
|
"loss": 2.3797, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4612122954354123e-05, |
|
"loss": 2.3744, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4560891973470772e-05, |
|
"loss": 2.3798, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4509558325291063e-05, |
|
"loss": 2.3719, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4458224677111354e-05, |
|
"loss": 2.3724, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4406993696228005e-05, |
|
"loss": 2.3723, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4355660048048297e-05, |
|
"loss": 2.3588, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4304326399868587e-05, |
|
"loss": 2.3719, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4253095418985237e-05, |
|
"loss": 2.3747, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4201761770805528e-05, |
|
"loss": 2.3708, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.415042812262582e-05, |
|
"loss": 2.3665, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.409909447444611e-05, |
|
"loss": 2.3773, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.4047760826266402e-05, |
|
"loss": 2.3692, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3996427178086693e-05, |
|
"loss": 2.3853, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3945093529906985e-05, |
|
"loss": 2.3764, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3893759881727276e-05, |
|
"loss": 2.3565, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3842426233547567e-05, |
|
"loss": 2.3514, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3791092585367858e-05, |
|
"loss": 2.3657, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.373975893718815e-05, |
|
"loss": 2.3637, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.368842528900844e-05, |
|
"loss": 2.3653, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.363719430812509e-05, |
|
"loss": 2.3765, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.3585860659945382e-05, |
|
"loss": 2.3799, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.3534527011765672e-05, |
|
"loss": 2.3681, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.3483193363585964e-05, |
|
"loss": 2.3751, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.3431859715406255e-05, |
|
"loss": 2.3787, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3380526067226546e-05, |
|
"loss": 2.3769, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3329192419046837e-05, |
|
"loss": 2.3727, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.327796143816349e-05, |
|
"loss": 2.3642, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.322662778998378e-05, |
|
"loss": 2.3593, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.317539680910043e-05, |
|
"loss": 2.3817, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.3124063160920721e-05, |
|
"loss": 2.3743, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.3072729512741014e-05, |
|
"loss": 2.402, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.3021395864561303e-05, |
|
"loss": 2.3731, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.2970062216381596e-05, |
|
"loss": 2.3722, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.2918728568201885e-05, |
|
"loss": 2.3703, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.2867394920022178e-05, |
|
"loss": 2.3565, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.2816061271842469e-05, |
|
"loss": 2.3842, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.276472762366276e-05, |
|
"loss": 2.3783, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.2713393975483051e-05, |
|
"loss": 2.3662, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.2662060327303342e-05, |
|
"loss": 2.3613, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.2610726679123633e-05, |
|
"loss": 2.3799, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.2559495698240282e-05, |
|
"loss": 2.3457, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.2508162050060575e-05, |
|
"loss": 2.3773, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.2456931069177226e-05, |
|
"loss": 2.3736, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.2405597420997517e-05, |
|
"loss": 2.3625, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.2354263772817808e-05, |
|
"loss": 2.3818, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.2302930124638099e-05, |
|
"loss": 2.3538, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.225159647645839e-05, |
|
"loss": 2.381, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.2200262828278682e-05, |
|
"loss": 2.3648, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.2148929180098972e-05, |
|
"loss": 2.3679, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.2097595531919265e-05, |
|
"loss": 2.3437, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.2046364551035914e-05, |
|
"loss": 2.36, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.1995030902856206e-05, |
|
"loss": 2.3563, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.1943697254676496e-05, |
|
"loss": 2.3474, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.1892363606496788e-05, |
|
"loss": 2.3577, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.1841029958317078e-05, |
|
"loss": 2.3795, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.178969631013737e-05, |
|
"loss": 2.3727, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.1738362661957661e-05, |
|
"loss": 2.3259, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.1687029013777953e-05, |
|
"loss": 2.3806, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.1635798032894603e-05, |
|
"loss": 2.3705, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.1584464384714893e-05, |
|
"loss": 2.3839, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1533130736535185e-05, |
|
"loss": 2.3448, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1481797088355475e-05, |
|
"loss": 2.3528, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.1430463440175767e-05, |
|
"loss": 2.3618, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.1379129791996057e-05, |
|
"loss": 2.3763, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.132789881111271e-05, |
|
"loss": 2.3391, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.1276565162933e-05, |
|
"loss": 2.3545, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.1225231514753291e-05, |
|
"loss": 2.3352, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.1173897866573582e-05, |
|
"loss": 2.3718, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.1122666885690235e-05, |
|
"loss": 2.3774, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.1071333237510524e-05, |
|
"loss": 2.3454, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.1020102256627175e-05, |
|
"loss": 2.3579, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.0968871275743827e-05, |
|
"loss": 2.3349, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.0917640294860476e-05, |
|
"loss": 2.3655, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.0866306646680766e-05, |
|
"loss": 2.3605, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.0814972998501058e-05, |
|
"loss": 2.3601, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.076363935032135e-05, |
|
"loss": 2.3622, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.071230570214164e-05, |
|
"loss": 2.3586, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.0660972053961932e-05, |
|
"loss": 2.3637, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.0609638405782223e-05, |
|
"loss": 2.3384, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.0558304757602514e-05, |
|
"loss": 2.3503, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.0506971109422806e-05, |
|
"loss": 2.3703, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.0455637461243096e-05, |
|
"loss": 2.3415, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.0404303813063388e-05, |
|
"loss": 2.3576, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.0352970164883678e-05, |
|
"loss": 2.3351, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.030173918400033e-05, |
|
"loss": 2.3393, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.025040553582062e-05, |
|
"loss": 2.3387, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.019917455493727e-05, |
|
"loss": 2.3464, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.0147840906757563e-05, |
|
"loss": 2.3427, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.0096507258577854e-05, |
|
"loss": 2.3678, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.0045173610398145e-05, |
|
"loss": 2.3497, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_gen_len": 13.07, |
|
"eval_loss": 2.340223550796509, |
|
"eval_rouge1": 38.6817, |
|
"eval_rouge2": 21.0493, |
|
"eval_rougeL": 37.0384, |
|
"eval_rougeLsum": 37.1377, |
|
"eval_runtime": 8.7057, |
|
"eval_samples_per_second": 11.487, |
|
"eval_steps_per_second": 0.804, |
|
"step": 97402 |
|
} |
|
], |
|
"max_steps": 194804, |
|
"num_train_epochs": 4, |
|
"total_flos": 3.4246205705319014e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|