|
{ |
|
"best_metric": 75.75802081436439, |
|
"best_model_checkpoint": "/home/jcanete/ft-data/all_results/tar/albeto_base_8/epochs_2_bs_16_lr_5e-5/checkpoint-10500", |
|
"epoch": 2.0, |
|
"global_step": 10970, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"eval_exact_match": 43.973509933774835, |
|
"eval_f1": 62.94849738783248, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.773473108477667e-05, |
|
"loss": 2.4433, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"eval_exact_match": 47.682119205298015, |
|
"eval_f1": 66.74232170504494, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"eval_exact_match": 49.84862819299905, |
|
"eval_f1": 67.82623236742938, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.545578851412944e-05, |
|
"loss": 1.8951, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"eval_exact_match": 51.7123935666982, |
|
"eval_f1": 70.35883323184615, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.3176845943482223e-05, |
|
"loss": 1.7818, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"eval_exact_match": 52.69631031220435, |
|
"eval_f1": 70.10225551135818, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"eval_exact_match": 52.93282876064333, |
|
"eval_f1": 70.57743501701938, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.0897903372835004e-05, |
|
"loss": 1.7351, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"eval_exact_match": 53.822138126773886, |
|
"eval_f1": 72.2913967717095, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"eval_exact_match": 54.15326395458846, |
|
"eval_f1": 71.8960686504451, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.8618960802187785e-05, |
|
"loss": 1.6499, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"eval_exact_match": 54.91012298959319, |
|
"eval_f1": 72.36339758418018, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3.6340018231540566e-05, |
|
"loss": 1.6346, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"eval_exact_match": 55.2317880794702, |
|
"eval_f1": 72.83417554949612, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_exact_match": 56.67928098391675, |
|
"eval_f1": 73.351425055483, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.406107566089335e-05, |
|
"loss": 1.6113, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"eval_exact_match": 56.20624408703879, |
|
"eval_f1": 73.5623207381341, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"eval_exact_match": 56.51844843897824, |
|
"eval_f1": 73.8479893696103, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.178213309024613e-05, |
|
"loss": 1.5583, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"eval_exact_match": 57.11447492904446, |
|
"eval_f1": 74.4610534306545, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.9503190519598906e-05, |
|
"loss": 1.5623, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"eval_exact_match": 57.60643330179754, |
|
"eval_f1": 74.50261751014862, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"eval_exact_match": 57.228003784295176, |
|
"eval_f1": 74.4736305671364, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.722880583409298e-05, |
|
"loss": 1.5367, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"eval_exact_match": 57.29422894985809, |
|
"eval_f1": 74.42957742124376, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"eval_exact_match": 57.93755912961211, |
|
"eval_f1": 75.26960240667054, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.4949863263445765e-05, |
|
"loss": 1.4926, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"eval_exact_match": 58.438978240302745, |
|
"eval_f1": 75.16537064693637, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.2670920692798542e-05, |
|
"loss": 1.238, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"eval_exact_match": 57.994323557237465, |
|
"eval_f1": 75.09801725717467, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"eval_exact_match": 58.306527909176914, |
|
"eval_f1": 75.10273058477487, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.0391978122151323e-05, |
|
"loss": 1.2301, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"eval_exact_match": 57.6631977294229, |
|
"eval_f1": 75.16465753346614, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"eval_exact_match": 57.83349101229896, |
|
"eval_f1": 74.40142589790919, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.8113035551504104e-05, |
|
"loss": 1.2176, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"eval_exact_match": 58.39167455061495, |
|
"eval_f1": 75.34980345320106, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.583409298085688e-05, |
|
"loss": 1.2076, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"eval_exact_match": 58.35383159886471, |
|
"eval_f1": 75.31554814626054, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"eval_exact_match": 58.00378429517502, |
|
"eval_f1": 75.15654833167893, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.3555150410209664e-05, |
|
"loss": 1.2522, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"eval_exact_match": 58.42005676442763, |
|
"eval_f1": 75.42667424347809, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"eval_exact_match": 58.533585619678334, |
|
"eval_f1": 75.482174465219, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.1276207839562443e-05, |
|
"loss": 1.2069, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"eval_exact_match": 58.438978240302745, |
|
"eval_f1": 75.37247235785051, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.001823154056518e-06, |
|
"loss": 1.2185, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"eval_exact_match": 58.80794701986755, |
|
"eval_f1": 75.55945320105242, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"eval_exact_match": 58.63765373699149, |
|
"eval_f1": 75.48349982961263, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 6.722880583409298e-06, |
|
"loss": 1.2221, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"eval_exact_match": 58.7038789025544, |
|
"eval_f1": 75.52334202995019, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"eval_exact_match": 58.87417218543046, |
|
"eval_f1": 75.59476406337527, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.443938012762078e-06, |
|
"loss": 1.1926, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"eval_exact_match": 58.883632923368026, |
|
"eval_f1": 75.53002560032212, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.169553327256153e-06, |
|
"loss": 1.216, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"eval_exact_match": 58.80794701986755, |
|
"eval_f1": 75.75802081436439, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"eval_exact_match": 58.836329233680225, |
|
"eval_f1": 75.61405946041744, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 10970, |
|
"total_flos": 2544773915225664.0, |
|
"train_loss": 1.4681447838476385, |
|
"train_runtime": 1862.4153, |
|
"train_samples_per_second": 94.229, |
|
"train_steps_per_second": 5.89 |
|
} |
|
], |
|
"max_steps": 10970, |
|
"num_train_epochs": 2, |
|
"total_flos": 2544773915225664.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|