|
{ |
|
"best_metric": 2.0496439933776855, |
|
"best_model_checkpoint": "./outputs/checkpoint-4100", |
|
"epoch": 2.987249544626594, |
|
"eval_steps": 100, |
|
"global_step": 4100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0002, |
|
"loss": 2.8179, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"eval_loss": 2.723731517791748, |
|
"eval_runtime": 208.4619, |
|
"eval_samples_per_second": 30.097, |
|
"eval_steps_per_second": 3.766, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0002, |
|
"loss": 2.6952, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"eval_loss": 2.6728808879852295, |
|
"eval_runtime": 204.3784, |
|
"eval_samples_per_second": 30.698, |
|
"eval_steps_per_second": 3.841, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0002, |
|
"loss": 2.6508, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"eval_loss": 2.6387226581573486, |
|
"eval_runtime": 205.5464, |
|
"eval_samples_per_second": 30.524, |
|
"eval_steps_per_second": 3.819, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0002, |
|
"loss": 2.6247, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"eval_loss": 2.6044600009918213, |
|
"eval_runtime": 204.5115, |
|
"eval_samples_per_second": 30.678, |
|
"eval_steps_per_second": 3.838, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0002, |
|
"loss": 2.5814, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"eval_loss": 2.5784292221069336, |
|
"eval_runtime": 204.5265, |
|
"eval_samples_per_second": 30.676, |
|
"eval_steps_per_second": 3.838, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0002, |
|
"loss": 2.5538, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"eval_loss": 2.5523250102996826, |
|
"eval_runtime": 205.1078, |
|
"eval_samples_per_second": 30.589, |
|
"eval_steps_per_second": 3.827, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0002, |
|
"loss": 2.5382, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"eval_loss": 2.5274579524993896, |
|
"eval_runtime": 204.8352, |
|
"eval_samples_per_second": 30.63, |
|
"eval_steps_per_second": 3.832, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0002, |
|
"loss": 2.5141, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"eval_loss": 2.506831645965576, |
|
"eval_runtime": 204.4886, |
|
"eval_samples_per_second": 30.681, |
|
"eval_steps_per_second": 3.839, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0002, |
|
"loss": 2.4765, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"eval_loss": 2.484386920928955, |
|
"eval_runtime": 204.734, |
|
"eval_samples_per_second": 30.645, |
|
"eval_steps_per_second": 3.834, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0002, |
|
"loss": 2.4729, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"eval_loss": 2.4648232460021973, |
|
"eval_runtime": 204.5514, |
|
"eval_samples_per_second": 30.672, |
|
"eval_steps_per_second": 3.838, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.0002, |
|
"loss": 2.468, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"eval_loss": 2.442662477493286, |
|
"eval_runtime": 204.6558, |
|
"eval_samples_per_second": 30.656, |
|
"eval_steps_per_second": 3.836, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.0002, |
|
"loss": 2.4198, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"eval_loss": 2.4222779273986816, |
|
"eval_runtime": 204.5507, |
|
"eval_samples_per_second": 30.672, |
|
"eval_steps_per_second": 3.838, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.0002, |
|
"loss": 2.4326, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"eval_loss": 2.405611753463745, |
|
"eval_runtime": 204.6008, |
|
"eval_samples_per_second": 30.665, |
|
"eval_steps_per_second": 3.837, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.0002, |
|
"loss": 2.3782, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"eval_loss": 2.381305694580078, |
|
"eval_runtime": 204.4744, |
|
"eval_samples_per_second": 30.684, |
|
"eval_steps_per_second": 3.839, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.0002, |
|
"loss": 2.3396, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"eval_loss": 2.3646771907806396, |
|
"eval_runtime": 204.6348, |
|
"eval_samples_per_second": 30.659, |
|
"eval_steps_per_second": 3.836, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.0002, |
|
"loss": 2.3327, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"eval_loss": 2.3456737995147705, |
|
"eval_runtime": 204.4826, |
|
"eval_samples_per_second": 30.682, |
|
"eval_steps_per_second": 3.839, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.0002, |
|
"loss": 2.3308, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"eval_loss": 2.329716920852661, |
|
"eval_runtime": 204.6937, |
|
"eval_samples_per_second": 30.651, |
|
"eval_steps_per_second": 3.835, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.0002, |
|
"loss": 2.2938, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"eval_loss": 2.313258409500122, |
|
"eval_runtime": 204.5658, |
|
"eval_samples_per_second": 30.67, |
|
"eval_steps_per_second": 3.837, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.0002, |
|
"loss": 2.2756, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"eval_loss": 2.298874855041504, |
|
"eval_runtime": 204.84, |
|
"eval_samples_per_second": 30.629, |
|
"eval_steps_per_second": 3.832, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.0002, |
|
"loss": 2.2724, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"eval_loss": 2.2830755710601807, |
|
"eval_runtime": 204.6478, |
|
"eval_samples_per_second": 30.658, |
|
"eval_steps_per_second": 3.836, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.0002, |
|
"loss": 2.252, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"eval_loss": 2.2688376903533936, |
|
"eval_runtime": 204.8236, |
|
"eval_samples_per_second": 30.631, |
|
"eval_steps_per_second": 3.833, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 0.0002, |
|
"loss": 2.2357, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"eval_loss": 2.2520592212677, |
|
"eval_runtime": 204.8048, |
|
"eval_samples_per_second": 30.634, |
|
"eval_steps_per_second": 3.833, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 0.0002, |
|
"loss": 2.2266, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"eval_loss": 2.240290880203247, |
|
"eval_runtime": 204.7035, |
|
"eval_samples_per_second": 30.649, |
|
"eval_steps_per_second": 3.835, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 0.0002, |
|
"loss": 2.2316, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"eval_loss": 2.22774338722229, |
|
"eval_runtime": 204.8482, |
|
"eval_samples_per_second": 30.628, |
|
"eval_steps_per_second": 3.832, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 0.0002, |
|
"loss": 2.1995, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"eval_loss": 2.2125885486602783, |
|
"eval_runtime": 204.9544, |
|
"eval_samples_per_second": 30.612, |
|
"eval_steps_per_second": 3.83, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 0.0002, |
|
"loss": 2.1997, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"eval_loss": 2.201737880706787, |
|
"eval_runtime": 205.102, |
|
"eval_samples_per_second": 30.59, |
|
"eval_steps_per_second": 3.827, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 0.0002, |
|
"loss": 2.1825, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"eval_loss": 2.185546636581421, |
|
"eval_runtime": 204.9463, |
|
"eval_samples_per_second": 30.613, |
|
"eval_steps_per_second": 3.83, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 0.0002, |
|
"loss": 2.1384, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"eval_loss": 2.1790666580200195, |
|
"eval_runtime": 204.8473, |
|
"eval_samples_per_second": 30.628, |
|
"eval_steps_per_second": 3.832, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 0.0002, |
|
"loss": 2.1105, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_loss": 2.167008638381958, |
|
"eval_runtime": 204.8809, |
|
"eval_samples_per_second": 30.623, |
|
"eval_steps_per_second": 3.831, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 0.0002, |
|
"loss": 2.1284, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"eval_loss": 2.1564178466796875, |
|
"eval_runtime": 205.2562, |
|
"eval_samples_per_second": 30.567, |
|
"eval_steps_per_second": 3.824, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 0.0002, |
|
"loss": 2.0972, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"eval_loss": 2.146613359451294, |
|
"eval_runtime": 205.098, |
|
"eval_samples_per_second": 30.59, |
|
"eval_steps_per_second": 3.827, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 0.0002, |
|
"loss": 2.1167, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"eval_loss": 2.134009599685669, |
|
"eval_runtime": 205.3378, |
|
"eval_samples_per_second": 30.555, |
|
"eval_steps_per_second": 3.823, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 0.0002, |
|
"loss": 2.0774, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"eval_loss": 2.125035285949707, |
|
"eval_runtime": 204.8829, |
|
"eval_samples_per_second": 30.622, |
|
"eval_steps_per_second": 3.831, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 0.0002, |
|
"loss": 2.0749, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"eval_loss": 2.1194825172424316, |
|
"eval_runtime": 208.2795, |
|
"eval_samples_per_second": 30.123, |
|
"eval_steps_per_second": 3.769, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 0.0002, |
|
"loss": 2.072, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"eval_loss": 2.106606960296631, |
|
"eval_runtime": 205.5389, |
|
"eval_samples_per_second": 30.525, |
|
"eval_steps_per_second": 3.819, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 0.0002, |
|
"loss": 2.0712, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"eval_loss": 2.096245765686035, |
|
"eval_runtime": 205.0401, |
|
"eval_samples_per_second": 30.599, |
|
"eval_steps_per_second": 3.829, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 0.0002, |
|
"loss": 2.0508, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"eval_loss": 2.086164951324463, |
|
"eval_runtime": 206.1713, |
|
"eval_samples_per_second": 30.431, |
|
"eval_steps_per_second": 3.808, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 0.0002, |
|
"loss": 2.0398, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"eval_loss": 2.0767734050750732, |
|
"eval_runtime": 205.3418, |
|
"eval_samples_per_second": 30.554, |
|
"eval_steps_per_second": 3.823, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 0.0002, |
|
"loss": 2.0276, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"eval_loss": 2.0676474571228027, |
|
"eval_runtime": 205.4014, |
|
"eval_samples_per_second": 30.545, |
|
"eval_steps_per_second": 3.822, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 0.0002, |
|
"loss": 2.0277, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"eval_loss": 2.0607223510742188, |
|
"eval_runtime": 205.2677, |
|
"eval_samples_per_second": 30.565, |
|
"eval_steps_per_second": 3.824, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 0.0002, |
|
"loss": 2.0315, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"eval_loss": 2.0496439933776855, |
|
"eval_runtime": 205.7242, |
|
"eval_samples_per_second": 30.497, |
|
"eval_steps_per_second": 3.816, |
|
"step": 4100 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 4116, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"total_flos": 1.1671768877819904e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|