|
{ |
|
"best_metric": 0.3388192057609558, |
|
"best_model_checkpoint": "outputs/output_8_clip14_cxrbert/checkpoint-22500", |
|
"epoch": 8.0, |
|
"global_step": 43608, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.999999755266707e-05, |
|
"loss": 0.7951, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"eval_loss": 1.1912389993667603, |
|
"eval_runtime": 139.1386, |
|
"eval_samples_per_second": 58.747, |
|
"eval_steps_per_second": 2.451, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.993123185382302e-05, |
|
"loss": 0.5887, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"eval_loss": 0.9833270907402039, |
|
"eval_runtime": 139.0379, |
|
"eval_samples_per_second": 58.79, |
|
"eval_steps_per_second": 2.453, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.972693864808811e-05, |
|
"loss": 0.5023, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"eval_loss": 0.8458877205848694, |
|
"eval_runtime": 139.1851, |
|
"eval_samples_per_second": 58.728, |
|
"eval_steps_per_second": 2.45, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.938822848423147e-05, |
|
"loss": 0.4709, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"eval_loss": 0.8479061126708984, |
|
"eval_runtime": 138.6519, |
|
"eval_samples_per_second": 58.953, |
|
"eval_steps_per_second": 2.459, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.891694260878015e-05, |
|
"loss": 0.4484, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"eval_loss": 0.766708493232727, |
|
"eval_runtime": 138.9466, |
|
"eval_samples_per_second": 58.828, |
|
"eval_steps_per_second": 2.454, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.831564295690475e-05, |
|
"loss": 0.4319, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"eval_loss": 0.8092461228370667, |
|
"eval_runtime": 138.9741, |
|
"eval_samples_per_second": 58.817, |
|
"eval_steps_per_second": 2.454, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.7587598225603125e-05, |
|
"loss": 0.4181, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"eval_loss": 0.6963649392127991, |
|
"eval_runtime": 138.9254, |
|
"eval_samples_per_second": 58.837, |
|
"eval_steps_per_second": 2.455, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.673676610488902e-05, |
|
"loss": 0.4107, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"eval_loss": 0.6463401913642883, |
|
"eval_runtime": 138.8758, |
|
"eval_samples_per_second": 58.858, |
|
"eval_steps_per_second": 2.455, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.576777176357795e-05, |
|
"loss": 0.3723, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"eval_loss": 0.7892907857894897, |
|
"eval_runtime": 138.7167, |
|
"eval_samples_per_second": 58.926, |
|
"eval_steps_per_second": 2.458, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.468588270662272e-05, |
|
"loss": 0.3746, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"eval_loss": 0.686305582523346, |
|
"eval_runtime": 139.0126, |
|
"eval_samples_per_second": 58.8, |
|
"eval_steps_per_second": 2.453, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.349698014067534e-05, |
|
"loss": 0.3667, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"eval_loss": 0.6910073161125183, |
|
"eval_runtime": 138.8546, |
|
"eval_samples_per_second": 58.867, |
|
"eval_steps_per_second": 2.456, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.220752700353382e-05, |
|
"loss": 0.3253, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"eval_loss": 0.6863256096839905, |
|
"eval_runtime": 138.8547, |
|
"eval_samples_per_second": 58.867, |
|
"eval_steps_per_second": 2.456, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.082453283126738e-05, |
|
"loss": 0.3274, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"eval_loss": 0.6445034146308899, |
|
"eval_runtime": 138.846, |
|
"eval_samples_per_second": 58.871, |
|
"eval_steps_per_second": 2.456, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.935551565400428e-05, |
|
"loss": 0.3065, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"eval_loss": 0.5908203125, |
|
"eval_runtime": 138.7486, |
|
"eval_samples_per_second": 58.912, |
|
"eval_steps_per_second": 2.458, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.7808461127518854e-05, |
|
"loss": 0.2834, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"eval_loss": 0.6137728691101074, |
|
"eval_runtime": 139.0095, |
|
"eval_samples_per_second": 58.802, |
|
"eval_steps_per_second": 2.453, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.6191779122780486e-05, |
|
"loss": 0.293, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"eval_loss": 0.6515378355979919, |
|
"eval_runtime": 139.1162, |
|
"eval_samples_per_second": 58.757, |
|
"eval_steps_per_second": 2.451, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.4514258009446234e-05, |
|
"loss": 0.303, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"eval_loss": 0.5806155800819397, |
|
"eval_runtime": 138.924, |
|
"eval_samples_per_second": 58.838, |
|
"eval_steps_per_second": 2.455, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.278501688181439e-05, |
|
"loss": 0.2638, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"eval_loss": 0.5586961507797241, |
|
"eval_runtime": 139.1301, |
|
"eval_samples_per_second": 58.751, |
|
"eval_steps_per_second": 2.451, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.101345598694112e-05, |
|
"loss": 0.2593, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"eval_loss": 0.5215563178062439, |
|
"eval_runtime": 138.9957, |
|
"eval_samples_per_second": 58.808, |
|
"eval_steps_per_second": 2.453, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.9209205624395885e-05, |
|
"loss": 0.2451, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"eval_loss": 0.5282728672027588, |
|
"eval_runtime": 138.8608, |
|
"eval_samples_per_second": 58.865, |
|
"eval_steps_per_second": 2.456, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.7382073795438957e-05, |
|
"loss": 0.2468, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"eval_loss": 0.5001487135887146, |
|
"eval_runtime": 138.9207, |
|
"eval_samples_per_second": 58.839, |
|
"eval_steps_per_second": 2.455, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.5541992886203175e-05, |
|
"loss": 0.2295, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"eval_loss": 0.49750879406929016, |
|
"eval_runtime": 138.9502, |
|
"eval_samples_per_second": 58.827, |
|
"eval_steps_per_second": 2.454, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.3698965674712838e-05, |
|
"loss": 0.1953, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_loss": 0.4750489890575409, |
|
"eval_runtime": 138.8668, |
|
"eval_samples_per_second": 58.862, |
|
"eval_steps_per_second": 2.456, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.1863010955248543e-05, |
|
"loss": 0.1954, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"eval_loss": 0.45723679661750793, |
|
"eval_runtime": 139.0817, |
|
"eval_samples_per_second": 58.771, |
|
"eval_steps_per_second": 2.452, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.0044109075646793e-05, |
|
"loss": 0.1737, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"eval_loss": 0.4731180667877197, |
|
"eval_runtime": 139.0822, |
|
"eval_samples_per_second": 58.771, |
|
"eval_steps_per_second": 2.452, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.8252147683596503e-05, |
|
"loss": 0.175, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"eval_loss": 0.4526049494743347, |
|
"eval_runtime": 139.059, |
|
"eval_samples_per_second": 58.781, |
|
"eval_steps_per_second": 2.452, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.6496867976858525e-05, |
|
"loss": 0.1873, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"eval_loss": 0.4890150725841522, |
|
"eval_runtime": 138.99, |
|
"eval_samples_per_second": 58.81, |
|
"eval_steps_per_second": 2.453, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.4787811749594674e-05, |
|
"loss": 0.1809, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"eval_loss": 0.4210197627544403, |
|
"eval_runtime": 139.0599, |
|
"eval_samples_per_second": 58.78, |
|
"eval_steps_per_second": 2.452, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.3134269522665521e-05, |
|
"loss": 0.1711, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"eval_loss": 0.4197298586368561, |
|
"eval_runtime": 139.0776, |
|
"eval_samples_per_second": 58.773, |
|
"eval_steps_per_second": 2.452, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.1545230039863117e-05, |
|
"loss": 0.1457, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"eval_loss": 0.3998343348503113, |
|
"eval_runtime": 138.7787, |
|
"eval_samples_per_second": 58.9, |
|
"eval_steps_per_second": 2.457, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.0029331404620077e-05, |
|
"loss": 0.1583, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"eval_loss": 0.392282098531723, |
|
"eval_runtime": 139.0688, |
|
"eval_samples_per_second": 58.777, |
|
"eval_steps_per_second": 2.452, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 8.59481412281825e-06, |
|
"loss": 0.1579, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"eval_loss": 0.3823428750038147, |
|
"eval_runtime": 139.0069, |
|
"eval_samples_per_second": 58.803, |
|
"eval_steps_per_second": 2.453, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 7.249476306959052e-06, |
|
"loss": 0.1339, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"eval_loss": 0.3654000163078308, |
|
"eval_runtime": 139.085, |
|
"eval_samples_per_second": 58.77, |
|
"eval_steps_per_second": 2.452, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 6.00063128520765e-06, |
|
"loss": 0.1164, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"eval_loss": 0.3591544032096863, |
|
"eval_runtime": 139.2185, |
|
"eval_samples_per_second": 58.713, |
|
"eval_steps_per_second": 2.449, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.855067845750841e-06, |
|
"loss": 0.1217, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"eval_loss": 0.3641490936279297, |
|
"eval_runtime": 139.1903, |
|
"eval_samples_per_second": 58.725, |
|
"eval_steps_per_second": 2.45, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.8190133325820834e-06, |
|
"loss": 0.119, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"eval_loss": 0.3553272783756256, |
|
"eval_runtime": 139.0893, |
|
"eval_samples_per_second": 58.768, |
|
"eval_steps_per_second": 2.452, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 2.8980997933272802e-06, |
|
"loss": 0.1151, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"eval_loss": 0.35238373279571533, |
|
"eval_runtime": 139.0702, |
|
"eval_samples_per_second": 58.776, |
|
"eval_steps_per_second": 2.452, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 2.0973333631332525e-06, |
|
"loss": 0.119, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"eval_loss": 0.3452140688896179, |
|
"eval_runtime": 138.934, |
|
"eval_samples_per_second": 58.834, |
|
"eval_steps_per_second": 2.454, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.4210670510499595e-06, |
|
"loss": 0.102, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"eval_loss": 0.34390997886657715, |
|
"eval_runtime": 139.1805, |
|
"eval_samples_per_second": 58.729, |
|
"eval_steps_per_second": 2.45, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 8.729770768409501e-07, |
|
"loss": 0.1085, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"eval_loss": 0.3422289192676544, |
|
"eval_runtime": 139.0942, |
|
"eval_samples_per_second": 58.766, |
|
"eval_steps_per_second": 2.452, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 4.5604288685657804e-07, |
|
"loss": 0.1142, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"eval_loss": 0.33955371379852295, |
|
"eval_runtime": 138.9826, |
|
"eval_samples_per_second": 58.813, |
|
"eval_steps_per_second": 2.454, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.7253095760459415e-07, |
|
"loss": 0.1038, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"eval_loss": 0.33917009830474854, |
|
"eval_runtime": 139.1121, |
|
"eval_samples_per_second": 58.758, |
|
"eval_steps_per_second": 2.451, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.3982475062916954e-08, |
|
"loss": 0.1143, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"eval_loss": 0.33897778391838074, |
|
"eval_runtime": 139.1906, |
|
"eval_samples_per_second": 58.725, |
|
"eval_steps_per_second": 2.45, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.1204956710403336e-08, |
|
"loss": 0.0983, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"eval_loss": 0.3389684855937958, |
|
"eval_runtime": 139.2217, |
|
"eval_samples_per_second": 58.712, |
|
"eval_steps_per_second": 2.449, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.3426786181872375e-07, |
|
"loss": 0.0974, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"eval_loss": 0.3388192057609558, |
|
"eval_runtime": 139.1103, |
|
"eval_samples_per_second": 58.759, |
|
"eval_steps_per_second": 2.451, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 3.925022138680762e-07, |
|
"loss": 0.1007, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"eval_loss": 0.33886849880218506, |
|
"eval_runtime": 139.2186, |
|
"eval_samples_per_second": 58.713, |
|
"eval_steps_per_second": 2.449, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 7.845042371392303e-07, |
|
"loss": 0.0903, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"eval_loss": 0.33964774012565613, |
|
"eval_runtime": 139.1917, |
|
"eval_samples_per_second": 58.725, |
|
"eval_steps_per_second": 2.45, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.308142987713265e-06, |
|
"loss": 0.095, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"eval_loss": 0.3394069969654083, |
|
"eval_runtime": 139.0407, |
|
"eval_samples_per_second": 58.789, |
|
"eval_steps_per_second": 2.453, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.960571937396438e-06, |
|
"loss": 0.0955, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"eval_loss": 0.3435823619365692, |
|
"eval_runtime": 138.8656, |
|
"eval_samples_per_second": 58.863, |
|
"eval_steps_per_second": 2.456, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.7382444475993473e-06, |
|
"loss": 0.1032, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"eval_loss": 0.3425971269607544, |
|
"eval_runtime": 139.1498, |
|
"eval_samples_per_second": 58.742, |
|
"eval_steps_per_second": 2.451, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 3.636933049053598e-06, |
|
"loss": 0.1037, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"eval_loss": 0.3484514653682709, |
|
"eval_runtime": 139.2414, |
|
"eval_samples_per_second": 58.704, |
|
"eval_steps_per_second": 2.449, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 4.651752422560337e-06, |
|
"loss": 0.103, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"eval_loss": 0.35472801327705383, |
|
"eval_runtime": 139.171, |
|
"eval_samples_per_second": 58.733, |
|
"eval_steps_per_second": 2.45, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 5.777185955846176e-06, |
|
"loss": 0.0987, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"eval_loss": 0.355197936296463, |
|
"eval_runtime": 139.255, |
|
"eval_samples_per_second": 58.698, |
|
"eval_steps_per_second": 2.449, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 7.007115732161859e-06, |
|
"loss": 0.1076, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"eval_loss": 0.35372602939605713, |
|
"eval_runtime": 139.256, |
|
"eval_samples_per_second": 58.698, |
|
"eval_steps_per_second": 2.449, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 8.334855787604286e-06, |
|
"loss": 0.1134, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"eval_loss": 0.35491758584976196, |
|
"eval_runtime": 139.157, |
|
"eval_samples_per_second": 58.739, |
|
"eval_steps_per_second": 2.45, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 9.753188456373041e-06, |
|
"loss": 0.1044, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"eval_loss": 0.362209677696228, |
|
"eval_runtime": 139.1058, |
|
"eval_samples_per_second": 58.761, |
|
"eval_steps_per_second": 2.451, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 1.1254403606386926e-05, |
|
"loss": 0.1099, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"eval_loss": 0.37740227580070496, |
|
"eval_runtime": 139.2551, |
|
"eval_samples_per_second": 58.698, |
|
"eval_steps_per_second": 2.449, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 1.2830340551973424e-05, |
|
"loss": 0.1129, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"eval_loss": 0.387184202671051, |
|
"eval_runtime": 138.9995, |
|
"eval_samples_per_second": 58.806, |
|
"eval_steps_per_second": 2.453, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 1.4472432415791445e-05, |
|
"loss": 0.1235, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"eval_loss": 0.3766579329967499, |
|
"eval_runtime": 139.1988, |
|
"eval_samples_per_second": 58.722, |
|
"eval_steps_per_second": 2.45, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 1.6171752698833968e-05, |
|
"loss": 0.1099, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"eval_loss": 0.3879966139793396, |
|
"eval_runtime": 139.1987, |
|
"eval_samples_per_second": 58.722, |
|
"eval_steps_per_second": 2.45, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 1.7919063805352744e-05, |
|
"loss": 0.1331, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"eval_loss": 0.41808027029037476, |
|
"eval_runtime": 139.3796, |
|
"eval_samples_per_second": 58.646, |
|
"eval_steps_per_second": 2.447, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 1.9704867258922042e-05, |
|
"loss": 0.134, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"eval_loss": 0.4090297818183899, |
|
"eval_runtime": 139.2797, |
|
"eval_samples_per_second": 58.688, |
|
"eval_steps_per_second": 2.448, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.1519455336663182e-05, |
|
"loss": 0.142, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"eval_loss": 0.4044671654701233, |
|
"eval_runtime": 139.2242, |
|
"eval_samples_per_second": 58.711, |
|
"eval_steps_per_second": 2.449, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 2.335296384094446e-05, |
|
"loss": 0.1441, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"eval_loss": 0.41757142543792725, |
|
"eval_runtime": 139.1671, |
|
"eval_samples_per_second": 58.735, |
|
"eval_steps_per_second": 2.45, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 2.51954257216856e-05, |
|
"loss": 0.1577, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"eval_loss": 0.43774479627609253, |
|
"eval_runtime": 139.219, |
|
"eval_samples_per_second": 58.713, |
|
"eval_steps_per_second": 2.449, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 2.703682525777417e-05, |
|
"loss": 0.1539, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"eval_loss": 0.43269890546798706, |
|
"eval_runtime": 139.3068, |
|
"eval_samples_per_second": 58.676, |
|
"eval_steps_per_second": 2.448, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 2.8867152503059856e-05, |
|
"loss": 0.1475, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"eval_loss": 0.4586590826511383, |
|
"eval_runtime": 139.2759, |
|
"eval_samples_per_second": 58.689, |
|
"eval_steps_per_second": 2.448, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 3.0676457700956226e-05, |
|
"loss": 0.1616, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"eval_loss": 0.47090479731559753, |
|
"eval_runtime": 139.1928, |
|
"eval_samples_per_second": 58.724, |
|
"eval_steps_per_second": 2.45, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 3.2454905371848176e-05, |
|
"loss": 0.1671, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"eval_loss": 0.49197548627853394, |
|
"eval_runtime": 139.1637, |
|
"eval_samples_per_second": 58.737, |
|
"eval_steps_per_second": 2.45, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 3.4192827779284355e-05, |
|
"loss": 0.1792, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"eval_loss": 0.48025813698768616, |
|
"eval_runtime": 139.2895, |
|
"eval_samples_per_second": 58.684, |
|
"eval_steps_per_second": 2.448, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 3.588077748430818e-05, |
|
"loss": 0.2025, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"eval_loss": 0.5274905562400818, |
|
"eval_runtime": 138.8747, |
|
"eval_samples_per_second": 58.859, |
|
"eval_steps_per_second": 2.455, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 3.7509578702240475e-05, |
|
"loss": 0.1823, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"eval_loss": 0.5114786028862, |
|
"eval_runtime": 139.2682, |
|
"eval_samples_per_second": 58.692, |
|
"eval_steps_per_second": 2.449, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 3.9070377182734444e-05, |
|
"loss": 0.2123, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"eval_loss": 0.4975065290927887, |
|
"eval_runtime": 138.9217, |
|
"eval_samples_per_second": 58.839, |
|
"eval_steps_per_second": 2.455, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 4.0554688341953205e-05, |
|
"loss": 0.2043, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"eval_loss": 0.48896968364715576, |
|
"eval_runtime": 139.3258, |
|
"eval_samples_per_second": 58.668, |
|
"eval_steps_per_second": 2.448, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 4.19544433852203e-05, |
|
"loss": 0.2086, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"eval_loss": 0.5374048352241516, |
|
"eval_runtime": 139.1786, |
|
"eval_samples_per_second": 58.73, |
|
"eval_steps_per_second": 2.45, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 4.326203316941825e-05, |
|
"loss": 0.2299, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"eval_loss": 0.5565398335456848, |
|
"eval_runtime": 139.0129, |
|
"eval_samples_per_second": 58.8, |
|
"eval_steps_per_second": 2.453, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 4.44703495666965e-05, |
|
"loss": 0.2151, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"eval_loss": 0.6073034405708313, |
|
"eval_runtime": 139.179, |
|
"eval_samples_per_second": 58.73, |
|
"eval_steps_per_second": 2.45, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 4.5572824104633835e-05, |
|
"loss": 0.222, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"eval_loss": 0.5468436479568481, |
|
"eval_runtime": 139.1916, |
|
"eval_samples_per_second": 58.725, |
|
"eval_steps_per_second": 2.45, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 4.656346367280503e-05, |
|
"loss": 0.236, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"eval_loss": 0.5504103899002075, |
|
"eval_runtime": 139.2016, |
|
"eval_samples_per_second": 58.721, |
|
"eval_steps_per_second": 2.45, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 4.743688310164889e-05, |
|
"loss": 0.2031, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"eval_loss": 0.5548919439315796, |
|
"eval_runtime": 139.2056, |
|
"eval_samples_per_second": 58.719, |
|
"eval_steps_per_second": 2.45, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 4.818833443653748e-05, |
|
"loss": 0.2251, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"eval_loss": 0.5905419588088989, |
|
"eval_runtime": 139.1367, |
|
"eval_samples_per_second": 58.748, |
|
"eval_steps_per_second": 2.451, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 4.881373274791077e-05, |
|
"loss": 0.2251, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"eval_loss": 0.6011632680892944, |
|
"eval_runtime": 139.1129, |
|
"eval_samples_per_second": 58.758, |
|
"eval_steps_per_second": 2.451, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 4.9309678337171785e-05, |
|
"loss": 0.2464, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"eval_loss": 0.5931146740913391, |
|
"eval_runtime": 139.1046, |
|
"eval_samples_per_second": 58.762, |
|
"eval_steps_per_second": 2.451, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 4.9673475217629615e-05, |
|
"loss": 0.2451, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"eval_loss": 0.6498579978942871, |
|
"eval_runtime": 139.0889, |
|
"eval_samples_per_second": 58.768, |
|
"eval_steps_per_second": 2.452, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 4.990314577002693e-05, |
|
"loss": 0.2463, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"eval_loss": 0.5696046948432922, |
|
"eval_runtime": 139.0384, |
|
"eval_samples_per_second": 58.79, |
|
"eval_steps_per_second": 2.453, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 4.999744149298381e-05, |
|
"loss": 0.2385, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"eval_loss": 0.5360204577445984, |
|
"eval_runtime": 139.0432, |
|
"eval_samples_per_second": 58.787, |
|
"eval_steps_per_second": 2.452, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 4.995584978991786e-05, |
|
"loss": 0.2353, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"eval_loss": 0.5489608645439148, |
|
"eval_runtime": 139.1804, |
|
"eval_samples_per_second": 58.73, |
|
"eval_steps_per_second": 2.45, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"step": 43608, |
|
"total_flos": 1.5655045448788992e+17, |
|
"train_loss": 0.21080181559736064, |
|
"train_runtime": 44575.4239, |
|
"train_samples_per_second": 11.741, |
|
"train_steps_per_second": 0.978 |
|
} |
|
], |
|
"max_steps": 43608, |
|
"num_train_epochs": 8, |
|
"total_flos": 1.5655045448788992e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|