|
{ |
|
"best_metric": 1.5861479043960571, |
|
"best_model_checkpoint": "detr-r50-cd45rb-8ah-6l-gelu-corrected\\checkpoint-115150", |
|
"epoch": 25.0, |
|
"global_step": 115150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.00238818931828e-06, |
|
"loss": 3.0438, |
|
"step": 4606 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.9413050413131714, |
|
"eval_runtime": 201.9196, |
|
"eval_samples_per_second": 8.82, |
|
"eval_steps_per_second": 1.104, |
|
"step": 4606 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 8.002822405557968e-06, |
|
"loss": 2.3933, |
|
"step": 9212 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.8238338232040405, |
|
"eval_runtime": 202.1836, |
|
"eval_samples_per_second": 8.809, |
|
"eval_steps_per_second": 1.103, |
|
"step": 9212 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.003039513677812e-06, |
|
"loss": 2.2782, |
|
"step": 13818 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.771848440170288, |
|
"eval_runtime": 202.1516, |
|
"eval_samples_per_second": 8.81, |
|
"eval_steps_per_second": 1.103, |
|
"step": 13818 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 6.003473729917499e-06, |
|
"loss": 2.2383, |
|
"step": 18424 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.752848505973816, |
|
"eval_runtime": 202.2922, |
|
"eval_samples_per_second": 8.804, |
|
"eval_steps_per_second": 1.102, |
|
"step": 18424 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 5.003690838037343e-06, |
|
"loss": 2.2046, |
|
"step": 23030 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.7264593839645386, |
|
"eval_runtime": 201.788, |
|
"eval_samples_per_second": 8.826, |
|
"eval_steps_per_second": 1.105, |
|
"step": 23030 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 4.0041250542770305e-06, |
|
"loss": 2.1659, |
|
"step": 27636 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.7125098705291748, |
|
"eval_runtime": 202.0053, |
|
"eval_samples_per_second": 8.817, |
|
"eval_steps_per_second": 1.104, |
|
"step": 27636 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 3.0045592705167175e-06, |
|
"loss": 2.1457, |
|
"step": 32242 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.6760411262512207, |
|
"eval_runtime": 201.2859, |
|
"eval_samples_per_second": 8.848, |
|
"eval_steps_per_second": 1.108, |
|
"step": 32242 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 2.0047763786365613e-06, |
|
"loss": 2.1111, |
|
"step": 36848 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 1.6621686220169067, |
|
"eval_runtime": 201.2613, |
|
"eval_samples_per_second": 8.849, |
|
"eval_steps_per_second": 1.108, |
|
"step": 36848 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 1.0052105948762484e-06, |
|
"loss": 2.0959, |
|
"step": 41454 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 1.6466784477233887, |
|
"eval_runtime": 201.3173, |
|
"eval_samples_per_second": 8.847, |
|
"eval_steps_per_second": 1.108, |
|
"step": 41454 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 5.427702996092055e-09, |
|
"loss": 2.0826, |
|
"step": 46060 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 1.639158010482788, |
|
"eval_runtime": 201.0423, |
|
"eval_samples_per_second": 8.859, |
|
"eval_steps_per_second": 1.109, |
|
"step": 46060 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 4.50293095961789e-06, |
|
"loss": 2.1132, |
|
"step": 50666 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 1.687514305114746, |
|
"eval_runtime": 203.299, |
|
"eval_samples_per_second": 8.76, |
|
"eval_steps_per_second": 1.097, |
|
"step": 50666 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 4.003039513677812e-06, |
|
"loss": 2.1145, |
|
"step": 55272 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_loss": 1.6863107681274414, |
|
"eval_runtime": 203.0628, |
|
"eval_samples_per_second": 8.771, |
|
"eval_steps_per_second": 1.098, |
|
"step": 55272 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 3.5032566217976557e-06, |
|
"loss": 2.0947, |
|
"step": 59878 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_loss": 1.6527632474899292, |
|
"eval_runtime": 203.1785, |
|
"eval_samples_per_second": 8.766, |
|
"eval_steps_per_second": 1.098, |
|
"step": 59878 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 3.003582283977421e-06, |
|
"loss": 2.0777, |
|
"step": 64484 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_loss": 1.666866660118103, |
|
"eval_runtime": 202.7783, |
|
"eval_samples_per_second": 8.783, |
|
"eval_steps_per_second": 1.1, |
|
"step": 64484 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 2.5036908380373426e-06, |
|
"loss": 2.0551, |
|
"step": 69090 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_loss": 1.6240596771240234, |
|
"eval_runtime": 203.6325, |
|
"eval_samples_per_second": 8.746, |
|
"eval_steps_per_second": 1.095, |
|
"step": 69090 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 2.0037993920972645e-06, |
|
"loss": 2.0567, |
|
"step": 73696 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_loss": 1.6240826845169067, |
|
"eval_runtime": 203.7113, |
|
"eval_samples_per_second": 8.743, |
|
"eval_steps_per_second": 1.095, |
|
"step": 73696 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 1.504233608336952e-06, |
|
"loss": 2.042, |
|
"step": 78302 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_loss": 1.6171499490737915, |
|
"eval_runtime": 203.6475, |
|
"eval_samples_per_second": 8.746, |
|
"eval_steps_per_second": 1.095, |
|
"step": 78302 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 1.004233608336952e-06, |
|
"loss": 2.0306, |
|
"step": 82908 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_loss": 1.6062376499176025, |
|
"eval_runtime": 203.3215, |
|
"eval_samples_per_second": 8.76, |
|
"eval_steps_per_second": 1.097, |
|
"step": 82908 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 5.043421623968737e-07, |
|
"loss": 2.015, |
|
"step": 87514 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_loss": 1.5988867282867432, |
|
"eval_runtime": 203.4303, |
|
"eval_samples_per_second": 8.755, |
|
"eval_steps_per_second": 1.096, |
|
"step": 87514 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 2.0037342596613115e-06, |
|
"loss": 2.0206, |
|
"step": 92120 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_loss": 1.6168476343154907, |
|
"eval_runtime": 207.1893, |
|
"eval_samples_per_second": 8.596, |
|
"eval_steps_per_second": 1.076, |
|
"step": 92120 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"learning_rate": 1.6039947894051239e-06, |
|
"loss": 2.026, |
|
"step": 96726 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"eval_loss": 1.6022319793701172, |
|
"eval_runtime": 206.9802, |
|
"eval_samples_per_second": 8.605, |
|
"eval_steps_per_second": 1.077, |
|
"step": 96726 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"learning_rate": 1.2041684759009988e-06, |
|
"loss": 2.0109, |
|
"step": 101332 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"eval_loss": 1.5995537042617798, |
|
"eval_runtime": 206.8836, |
|
"eval_samples_per_second": 8.609, |
|
"eval_steps_per_second": 1.078, |
|
"step": 101332 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"learning_rate": 8.042553191489362e-07, |
|
"loss": 2.0133, |
|
"step": 105938 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"eval_loss": 1.5983381271362305, |
|
"eval_runtime": 206.4758, |
|
"eval_samples_per_second": 8.626, |
|
"eval_steps_per_second": 1.08, |
|
"step": 105938 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"learning_rate": 4.043421623968737e-07, |
|
"loss": 2.0081, |
|
"step": 110544 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_loss": 1.5887646675109863, |
|
"eval_runtime": 203.9191, |
|
"eval_samples_per_second": 8.734, |
|
"eval_steps_per_second": 1.094, |
|
"step": 110544 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 4.515848892748589e-09, |
|
"loss": 1.9975, |
|
"step": 115150 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"eval_loss": 1.5861479043960571, |
|
"eval_runtime": 204.8681, |
|
"eval_samples_per_second": 8.693, |
|
"eval_steps_per_second": 1.089, |
|
"step": 115150 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"step": 115150, |
|
"total_flos": 2.2008141195206407e+20, |
|
"train_loss": 0.4830571452317629, |
|
"train_runtime": 17891.958, |
|
"train_samples_per_second": 25.739, |
|
"train_steps_per_second": 6.436 |
|
} |
|
], |
|
"max_steps": 115150, |
|
"num_train_epochs": 25, |
|
"total_flos": 2.2008141195206407e+20, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|