|
{ |
|
"best_metric": 0.9851851851851852, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-eurosat/checkpoint-588", |
|
"epoch": 4.99492385786802, |
|
"global_step": 735, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 2.2699, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.3513513513513515e-05, |
|
"loss": 2.0753, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.0270270270270273e-05, |
|
"loss": 1.6254, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.702702702702703e-05, |
|
"loss": 0.9696, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.3783783783783784e-05, |
|
"loss": 0.529, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.0540540540540545e-05, |
|
"loss": 0.358, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.72972972972973e-05, |
|
"loss": 0.24, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.9546142208774585e-05, |
|
"loss": 0.2678, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.878971255673222e-05, |
|
"loss": 0.2537, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.803328290468986e-05, |
|
"loss": 0.1921, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.7276853252647505e-05, |
|
"loss": 0.1691, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.652042360060514e-05, |
|
"loss": 0.1716, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.576399394856278e-05, |
|
"loss": 0.1516, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.5007564296520425e-05, |
|
"loss": 0.1547, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"eval_accuracy": 0.9711111111111111, |
|
"eval_loss": 0.09559287130832672, |
|
"eval_runtime": 38.4532, |
|
"eval_samples_per_second": 105.323, |
|
"eval_steps_per_second": 3.303, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.425113464447806e-05, |
|
"loss": 0.1434, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.34947049924357e-05, |
|
"loss": 0.0951, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.2738275340393345e-05, |
|
"loss": 0.1042, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.198184568835098e-05, |
|
"loss": 0.0972, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.122541603630862e-05, |
|
"loss": 0.0714, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.0468986384266265e-05, |
|
"loss": 0.1095, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.97125567322239e-05, |
|
"loss": 0.0779, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.895612708018154e-05, |
|
"loss": 0.0869, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.8199697428139184e-05, |
|
"loss": 0.0936, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.744326777609682e-05, |
|
"loss": 0.092, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.668683812405446e-05, |
|
"loss": 0.1006, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 3.5930408472012104e-05, |
|
"loss": 0.0909, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.517397881996974e-05, |
|
"loss": 0.0705, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.441754916792738e-05, |
|
"loss": 0.0935, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 3.3661119515885024e-05, |
|
"loss": 0.0707, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"eval_accuracy": 0.9733333333333334, |
|
"eval_loss": 0.07587520033121109, |
|
"eval_runtime": 25.8049, |
|
"eval_samples_per_second": 156.947, |
|
"eval_steps_per_second": 4.922, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.290468986384266e-05, |
|
"loss": 0.0665, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.21482602118003e-05, |
|
"loss": 0.0556, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.1391830559757944e-05, |
|
"loss": 0.0517, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.063540090771558e-05, |
|
"loss": 0.0705, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.9878971255673223e-05, |
|
"loss": 0.071, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.9122541603630864e-05, |
|
"loss": 0.0466, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.83661119515885e-05, |
|
"loss": 0.0351, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.7609682299546143e-05, |
|
"loss": 0.0698, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.6853252647503784e-05, |
|
"loss": 0.0423, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 2.609682299546142e-05, |
|
"loss": 0.0475, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 2.5340393343419063e-05, |
|
"loss": 0.0491, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.45839636913767e-05, |
|
"loss": 0.0403, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.382753403933434e-05, |
|
"loss": 0.0569, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.3071104387291982e-05, |
|
"loss": 0.0509, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.231467473524962e-05, |
|
"loss": 0.0537, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"eval_accuracy": 0.9767901234567902, |
|
"eval_loss": 0.06802858412265778, |
|
"eval_runtime": 35.3363, |
|
"eval_samples_per_second": 114.613, |
|
"eval_steps_per_second": 3.594, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 2.155824508320726e-05, |
|
"loss": 0.0502, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 2.0801815431164902e-05, |
|
"loss": 0.0452, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 2.004538577912254e-05, |
|
"loss": 0.0352, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.928895612708018e-05, |
|
"loss": 0.0385, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.8532526475037822e-05, |
|
"loss": 0.0441, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.777609682299546e-05, |
|
"loss": 0.0394, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.70196671709531e-05, |
|
"loss": 0.0351, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.6263237518910742e-05, |
|
"loss": 0.0251, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 1.550680786686838e-05, |
|
"loss": 0.044, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.4750378214826023e-05, |
|
"loss": 0.0246, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.3993948562783662e-05, |
|
"loss": 0.0252, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.3237518910741301e-05, |
|
"loss": 0.0305, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.248108925869894e-05, |
|
"loss": 0.0219, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.172465960665658e-05, |
|
"loss": 0.0302, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"eval_accuracy": 0.9851851851851852, |
|
"eval_loss": 0.044658709317445755, |
|
"eval_runtime": 25.6563, |
|
"eval_samples_per_second": 157.856, |
|
"eval_steps_per_second": 4.95, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.0968229954614221e-05, |
|
"loss": 0.0154, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 1.021180030257186e-05, |
|
"loss": 0.0225, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 9.4553706505295e-06, |
|
"loss": 0.0181, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 8.698940998487141e-06, |
|
"loss": 0.0199, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 7.94251134644478e-06, |
|
"loss": 0.0254, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 7.186081694402421e-06, |
|
"loss": 0.0244, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 6.429652042360061e-06, |
|
"loss": 0.0181, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 5.6732223903177e-06, |
|
"loss": 0.0154, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.9167927382753406e-06, |
|
"loss": 0.0164, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 4.16036308623298e-06, |
|
"loss": 0.0111, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 3.40393343419062e-06, |
|
"loss": 0.0199, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.64750378214826e-06, |
|
"loss": 0.0191, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.8910741301059002e-06, |
|
"loss": 0.0171, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.13464447806354e-06, |
|
"loss": 0.0164, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.7821482602118004e-07, |
|
"loss": 0.0225, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"eval_accuracy": 0.9837037037037037, |
|
"eval_loss": 0.048928551375865936, |
|
"eval_runtime": 25.2637, |
|
"eval_samples_per_second": 160.309, |
|
"eval_steps_per_second": 5.027, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"step": 735, |
|
"total_flos": 2.347327658721411e+18, |
|
"train_loss": 0.1689021515602968, |
|
"train_runtime": 1731.9704, |
|
"train_samples_per_second": 54.562, |
|
"train_steps_per_second": 0.424 |
|
} |
|
], |
|
"max_steps": 735, |
|
"num_train_epochs": 5, |
|
"total_flos": 2.347327658721411e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|