|
{ |
|
"best_metric": 1.5988867282867432, |
|
"best_model_checkpoint": "detr-r50-cd45rb-8ah-6l-gelu-corrected\\checkpoint-87514", |
|
"epoch": 20.0, |
|
"global_step": 92120, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.00238818931828e-06, |
|
"loss": 3.0438, |
|
"step": 4606 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.9413050413131714, |
|
"eval_runtime": 201.9196, |
|
"eval_samples_per_second": 8.82, |
|
"eval_steps_per_second": 1.104, |
|
"step": 4606 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 8.002822405557968e-06, |
|
"loss": 2.3933, |
|
"step": 9212 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.8238338232040405, |
|
"eval_runtime": 202.1836, |
|
"eval_samples_per_second": 8.809, |
|
"eval_steps_per_second": 1.103, |
|
"step": 9212 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.003039513677812e-06, |
|
"loss": 2.2782, |
|
"step": 13818 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.771848440170288, |
|
"eval_runtime": 202.1516, |
|
"eval_samples_per_second": 8.81, |
|
"eval_steps_per_second": 1.103, |
|
"step": 13818 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 6.003473729917499e-06, |
|
"loss": 2.2383, |
|
"step": 18424 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.752848505973816, |
|
"eval_runtime": 202.2922, |
|
"eval_samples_per_second": 8.804, |
|
"eval_steps_per_second": 1.102, |
|
"step": 18424 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 5.003690838037343e-06, |
|
"loss": 2.2046, |
|
"step": 23030 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.7264593839645386, |
|
"eval_runtime": 201.788, |
|
"eval_samples_per_second": 8.826, |
|
"eval_steps_per_second": 1.105, |
|
"step": 23030 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 4.0041250542770305e-06, |
|
"loss": 2.1659, |
|
"step": 27636 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.7125098705291748, |
|
"eval_runtime": 202.0053, |
|
"eval_samples_per_second": 8.817, |
|
"eval_steps_per_second": 1.104, |
|
"step": 27636 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 3.0045592705167175e-06, |
|
"loss": 2.1457, |
|
"step": 32242 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.6760411262512207, |
|
"eval_runtime": 201.2859, |
|
"eval_samples_per_second": 8.848, |
|
"eval_steps_per_second": 1.108, |
|
"step": 32242 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 2.0047763786365613e-06, |
|
"loss": 2.1111, |
|
"step": 36848 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 1.6621686220169067, |
|
"eval_runtime": 201.2613, |
|
"eval_samples_per_second": 8.849, |
|
"eval_steps_per_second": 1.108, |
|
"step": 36848 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 1.0052105948762484e-06, |
|
"loss": 2.0959, |
|
"step": 41454 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 1.6466784477233887, |
|
"eval_runtime": 201.3173, |
|
"eval_samples_per_second": 8.847, |
|
"eval_steps_per_second": 1.108, |
|
"step": 41454 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 5.427702996092055e-09, |
|
"loss": 2.0826, |
|
"step": 46060 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 1.639158010482788, |
|
"eval_runtime": 201.0423, |
|
"eval_samples_per_second": 8.859, |
|
"eval_steps_per_second": 1.109, |
|
"step": 46060 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 4.50293095961789e-06, |
|
"loss": 2.1132, |
|
"step": 50666 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 1.687514305114746, |
|
"eval_runtime": 203.299, |
|
"eval_samples_per_second": 8.76, |
|
"eval_steps_per_second": 1.097, |
|
"step": 50666 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 4.003039513677812e-06, |
|
"loss": 2.1145, |
|
"step": 55272 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_loss": 1.6863107681274414, |
|
"eval_runtime": 203.0628, |
|
"eval_samples_per_second": 8.771, |
|
"eval_steps_per_second": 1.098, |
|
"step": 55272 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 3.5032566217976557e-06, |
|
"loss": 2.0947, |
|
"step": 59878 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_loss": 1.6527632474899292, |
|
"eval_runtime": 203.1785, |
|
"eval_samples_per_second": 8.766, |
|
"eval_steps_per_second": 1.098, |
|
"step": 59878 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 3.003582283977421e-06, |
|
"loss": 2.0777, |
|
"step": 64484 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_loss": 1.666866660118103, |
|
"eval_runtime": 202.7783, |
|
"eval_samples_per_second": 8.783, |
|
"eval_steps_per_second": 1.1, |
|
"step": 64484 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 2.5036908380373426e-06, |
|
"loss": 2.0551, |
|
"step": 69090 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_loss": 1.6240596771240234, |
|
"eval_runtime": 203.6325, |
|
"eval_samples_per_second": 8.746, |
|
"eval_steps_per_second": 1.095, |
|
"step": 69090 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 2.0037993920972645e-06, |
|
"loss": 2.0567, |
|
"step": 73696 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_loss": 1.6240826845169067, |
|
"eval_runtime": 203.7113, |
|
"eval_samples_per_second": 8.743, |
|
"eval_steps_per_second": 1.095, |
|
"step": 73696 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 1.504233608336952e-06, |
|
"loss": 2.042, |
|
"step": 78302 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_loss": 1.6171499490737915, |
|
"eval_runtime": 203.6475, |
|
"eval_samples_per_second": 8.746, |
|
"eval_steps_per_second": 1.095, |
|
"step": 78302 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 1.004233608336952e-06, |
|
"loss": 2.0306, |
|
"step": 82908 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_loss": 1.6062376499176025, |
|
"eval_runtime": 203.3215, |
|
"eval_samples_per_second": 8.76, |
|
"eval_steps_per_second": 1.097, |
|
"step": 82908 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 5.043421623968737e-07, |
|
"loss": 2.015, |
|
"step": 87514 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_loss": 1.5988867282867432, |
|
"eval_runtime": 203.4303, |
|
"eval_samples_per_second": 8.755, |
|
"eval_steps_per_second": 1.096, |
|
"step": 87514 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 4.559270516717325e-09, |
|
"loss": 2.0084, |
|
"step": 92120 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_loss": 1.599236011505127, |
|
"eval_runtime": 202.2872, |
|
"eval_samples_per_second": 8.804, |
|
"eval_steps_per_second": 1.102, |
|
"step": 92120 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 92120, |
|
"total_flos": 1.7606512956165125e+20, |
|
"train_loss": 1.0303937374484369, |
|
"train_runtime": 29465.5365, |
|
"train_samples_per_second": 12.503, |
|
"train_steps_per_second": 3.126 |
|
} |
|
], |
|
"max_steps": 92120, |
|
"num_train_epochs": 20, |
|
"total_flos": 1.7606512956165125e+20, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|