|
{ |
|
"best_metric": 0.9975461431772111, |
|
"best_model_checkpoint": "PlantDiseaseDetectorSwinv2/checkpoint-879", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 879, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.034129692832764506, |
|
"grad_norm": 2.5705349445343018, |
|
"learning_rate": 5.681818181818182e-06, |
|
"loss": 3.8022, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06825938566552901, |
|
"grad_norm": 2.408888816833496, |
|
"learning_rate": 1.1363636363636365e-05, |
|
"loss": 3.7314, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.10238907849829351, |
|
"grad_norm": 3.7745347023010254, |
|
"learning_rate": 1.7045454545454546e-05, |
|
"loss": 3.5832, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13651877133105803, |
|
"grad_norm": 5.221632480621338, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 3.279, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.17064846416382254, |
|
"grad_norm": 5.78312873840332, |
|
"learning_rate": 2.8409090909090912e-05, |
|
"loss": 2.6879, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.20477815699658702, |
|
"grad_norm": 6.596199989318848, |
|
"learning_rate": 3.409090909090909e-05, |
|
"loss": 1.8838, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.23890784982935154, |
|
"grad_norm": 6.6335601806640625, |
|
"learning_rate": 3.9772727272727275e-05, |
|
"loss": 1.2296, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.27303754266211605, |
|
"grad_norm": 5.116641044616699, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.7926, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.30716723549488056, |
|
"grad_norm": 5.051211833953857, |
|
"learning_rate": 4.9873577749683945e-05, |
|
"loss": 0.5453, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3412969283276451, |
|
"grad_norm": 5.704665660858154, |
|
"learning_rate": 4.924146649810367e-05, |
|
"loss": 0.427, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.37542662116040953, |
|
"grad_norm": 4.636778354644775, |
|
"learning_rate": 4.860935524652339e-05, |
|
"loss": 0.3498, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.40955631399317405, |
|
"grad_norm": 4.307668209075928, |
|
"learning_rate": 4.797724399494311e-05, |
|
"loss": 0.2841, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.44368600682593856, |
|
"grad_norm": 3.284649610519409, |
|
"learning_rate": 4.734513274336283e-05, |
|
"loss": 0.2521, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4778156996587031, |
|
"grad_norm": 3.6471190452575684, |
|
"learning_rate": 4.6713021491782554e-05, |
|
"loss": 0.2052, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5119453924914675, |
|
"grad_norm": 3.599177837371826, |
|
"learning_rate": 4.608091024020228e-05, |
|
"loss": 0.2033, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5460750853242321, |
|
"grad_norm": 3.1162948608398438, |
|
"learning_rate": 4.5448798988622e-05, |
|
"loss": 0.1772, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5802047781569966, |
|
"grad_norm": 3.1850969791412354, |
|
"learning_rate": 4.4816687737041726e-05, |
|
"loss": 0.1589, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6143344709897611, |
|
"grad_norm": 3.5049831867218018, |
|
"learning_rate": 4.418457648546144e-05, |
|
"loss": 0.147, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6484641638225256, |
|
"grad_norm": 3.1032307147979736, |
|
"learning_rate": 4.355246523388117e-05, |
|
"loss": 0.158, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6825938566552902, |
|
"grad_norm": 4.088365077972412, |
|
"learning_rate": 4.2920353982300885e-05, |
|
"loss": 0.1373, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.7167235494880546, |
|
"grad_norm": 3.2553369998931885, |
|
"learning_rate": 4.2288242730720607e-05, |
|
"loss": 0.137, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7508532423208191, |
|
"grad_norm": 3.4594857692718506, |
|
"learning_rate": 4.165613147914033e-05, |
|
"loss": 0.1241, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.7849829351535836, |
|
"grad_norm": 3.20019793510437, |
|
"learning_rate": 4.102402022756005e-05, |
|
"loss": 0.1049, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8191126279863481, |
|
"grad_norm": 2.817979335784912, |
|
"learning_rate": 4.039190897597978e-05, |
|
"loss": 0.1055, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8532423208191127, |
|
"grad_norm": 2.961674451828003, |
|
"learning_rate": 3.9759797724399494e-05, |
|
"loss": 0.1116, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.8873720136518771, |
|
"grad_norm": 1.5384647846221924, |
|
"learning_rate": 3.912768647281922e-05, |
|
"loss": 0.1147, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.9215017064846417, |
|
"grad_norm": 2.3770840167999268, |
|
"learning_rate": 3.849557522123894e-05, |
|
"loss": 0.0816, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9556313993174061, |
|
"grad_norm": 1.9292172193527222, |
|
"learning_rate": 3.7863463969658666e-05, |
|
"loss": 0.1042, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9897610921501706, |
|
"grad_norm": 1.9912846088409424, |
|
"learning_rate": 3.723135271807838e-05, |
|
"loss": 0.082, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.989971193854689, |
|
"eval_loss": 0.030839812010526657, |
|
"eval_runtime": 85.4261, |
|
"eval_samples_per_second": 109.721, |
|
"eval_steps_per_second": 1.721, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.023890784982935, |
|
"grad_norm": 2.500840902328491, |
|
"learning_rate": 3.659924146649811e-05, |
|
"loss": 0.0819, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.0580204778156996, |
|
"grad_norm": 2.5703957080841064, |
|
"learning_rate": 3.5967130214917824e-05, |
|
"loss": 0.0824, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.0921501706484642, |
|
"grad_norm": 2.538381338119507, |
|
"learning_rate": 3.533501896333755e-05, |
|
"loss": 0.0781, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.1262798634812285, |
|
"grad_norm": 2.861043691635132, |
|
"learning_rate": 3.470290771175727e-05, |
|
"loss": 0.088, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.1604095563139931, |
|
"grad_norm": 1.385380744934082, |
|
"learning_rate": 3.407079646017699e-05, |
|
"loss": 0.0597, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.1945392491467577, |
|
"grad_norm": 2.4261908531188965, |
|
"learning_rate": 3.343868520859672e-05, |
|
"loss": 0.0553, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.2286689419795223, |
|
"grad_norm": 2.731848955154419, |
|
"learning_rate": 3.280657395701643e-05, |
|
"loss": 0.0626, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.2627986348122868, |
|
"grad_norm": 2.3727986812591553, |
|
"learning_rate": 3.217446270543616e-05, |
|
"loss": 0.0557, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.2969283276450512, |
|
"grad_norm": 2.083387851715088, |
|
"learning_rate": 3.1542351453855877e-05, |
|
"loss": 0.0613, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.3310580204778157, |
|
"grad_norm": 2.408942699432373, |
|
"learning_rate": 3.0910240202275605e-05, |
|
"loss": 0.0563, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.36518771331058, |
|
"grad_norm": 3.069532632827759, |
|
"learning_rate": 3.0278128950695323e-05, |
|
"loss": 0.0562, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.3993174061433447, |
|
"grad_norm": 1.5006905794143677, |
|
"learning_rate": 2.964601769911505e-05, |
|
"loss": 0.058, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.4334470989761092, |
|
"grad_norm": 1.9760173559188843, |
|
"learning_rate": 2.9013906447534767e-05, |
|
"loss": 0.0506, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.4675767918088738, |
|
"grad_norm": 1.7346736192703247, |
|
"learning_rate": 2.8381795195954492e-05, |
|
"loss": 0.0583, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.5017064846416384, |
|
"grad_norm": 1.4648014307022095, |
|
"learning_rate": 2.774968394437421e-05, |
|
"loss": 0.0543, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.5358361774744027, |
|
"grad_norm": 0.9175078272819519, |
|
"learning_rate": 2.7117572692793936e-05, |
|
"loss": 0.0524, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.5699658703071673, |
|
"grad_norm": 2.2401745319366455, |
|
"learning_rate": 2.6485461441213654e-05, |
|
"loss": 0.0509, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.6040955631399316, |
|
"grad_norm": 1.0729519128799438, |
|
"learning_rate": 2.5853350189633372e-05, |
|
"loss": 0.0465, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.6382252559726962, |
|
"grad_norm": 1.1426705121994019, |
|
"learning_rate": 2.5221238938053098e-05, |
|
"loss": 0.0492, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.6723549488054608, |
|
"grad_norm": 2.0277466773986816, |
|
"learning_rate": 2.458912768647282e-05, |
|
"loss": 0.0408, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.7064846416382253, |
|
"grad_norm": 1.5704808235168457, |
|
"learning_rate": 2.3957016434892544e-05, |
|
"loss": 0.0504, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.74061433447099, |
|
"grad_norm": 1.7388916015625, |
|
"learning_rate": 2.3324905183312266e-05, |
|
"loss": 0.0516, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.7747440273037542, |
|
"grad_norm": 1.4998822212219238, |
|
"learning_rate": 2.2692793931731988e-05, |
|
"loss": 0.038, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.8088737201365188, |
|
"grad_norm": 1.8269169330596924, |
|
"learning_rate": 2.206068268015171e-05, |
|
"loss": 0.0419, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.8430034129692832, |
|
"grad_norm": 1.9325296878814697, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.0422, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.8771331058020477, |
|
"grad_norm": 2.244913101196289, |
|
"learning_rate": 2.079646017699115e-05, |
|
"loss": 0.0464, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.9112627986348123, |
|
"grad_norm": 1.4655271768569946, |
|
"learning_rate": 2.016434892541087e-05, |
|
"loss": 0.035, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.9453924914675769, |
|
"grad_norm": 1.888059377670288, |
|
"learning_rate": 1.9532237673830593e-05, |
|
"loss": 0.0479, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.9795221843003414, |
|
"grad_norm": 2.545102596282959, |
|
"learning_rate": 1.8900126422250315e-05, |
|
"loss": 0.0427, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9955190440627334, |
|
"eval_loss": 0.011397087946534157, |
|
"eval_runtime": 82.3517, |
|
"eval_samples_per_second": 113.817, |
|
"eval_steps_per_second": 1.785, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.013651877133106, |
|
"grad_norm": 1.9391770362854004, |
|
"learning_rate": 1.8268015170670037e-05, |
|
"loss": 0.0418, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.04778156996587, |
|
"grad_norm": 1.7138864994049072, |
|
"learning_rate": 1.7635903919089762e-05, |
|
"loss": 0.0354, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.0819112627986347, |
|
"grad_norm": 1.8861690759658813, |
|
"learning_rate": 1.7003792667509484e-05, |
|
"loss": 0.035, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.1160409556313993, |
|
"grad_norm": 0.7711435556411743, |
|
"learning_rate": 1.6371681415929206e-05, |
|
"loss": 0.0304, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.150170648464164, |
|
"grad_norm": 1.925860047340393, |
|
"learning_rate": 1.5739570164348927e-05, |
|
"loss": 0.0319, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.1843003412969284, |
|
"grad_norm": 1.1292275190353394, |
|
"learning_rate": 1.510745891276865e-05, |
|
"loss": 0.0319, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.218430034129693, |
|
"grad_norm": 1.2774521112442017, |
|
"learning_rate": 1.4475347661188371e-05, |
|
"loss": 0.0301, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.252559726962457, |
|
"grad_norm": 3.394732713699341, |
|
"learning_rate": 1.3843236409608093e-05, |
|
"loss": 0.034, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.2866894197952217, |
|
"grad_norm": 0.9372479319572449, |
|
"learning_rate": 1.3211125158027813e-05, |
|
"loss": 0.0345, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.3208191126279862, |
|
"grad_norm": 1.1731668710708618, |
|
"learning_rate": 1.2579013906447535e-05, |
|
"loss": 0.0305, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.354948805460751, |
|
"grad_norm": 1.8510732650756836, |
|
"learning_rate": 1.1946902654867258e-05, |
|
"loss": 0.0246, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.3890784982935154, |
|
"grad_norm": 1.2375292778015137, |
|
"learning_rate": 1.1314791403286978e-05, |
|
"loss": 0.0276, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.42320819112628, |
|
"grad_norm": 1.7051323652267456, |
|
"learning_rate": 1.06826801517067e-05, |
|
"loss": 0.0343, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.4573378839590445, |
|
"grad_norm": 1.9090622663497925, |
|
"learning_rate": 1.0050568900126423e-05, |
|
"loss": 0.0295, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.491467576791809, |
|
"grad_norm": 2.412919759750366, |
|
"learning_rate": 9.418457648546145e-06, |
|
"loss": 0.0261, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.5255972696245736, |
|
"grad_norm": 1.2371612787246704, |
|
"learning_rate": 8.786346396965867e-06, |
|
"loss": 0.0264, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.5597269624573378, |
|
"grad_norm": 1.0463935136795044, |
|
"learning_rate": 8.154235145385589e-06, |
|
"loss": 0.021, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.5938566552901023, |
|
"grad_norm": 0.7903943061828613, |
|
"learning_rate": 7.52212389380531e-06, |
|
"loss": 0.0245, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.627986348122867, |
|
"grad_norm": 0.9663693308830261, |
|
"learning_rate": 6.890012642225031e-06, |
|
"loss": 0.0247, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.6621160409556315, |
|
"grad_norm": 0.8555452227592468, |
|
"learning_rate": 6.257901390644753e-06, |
|
"loss": 0.0219, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.696245733788396, |
|
"grad_norm": 2.052835702896118, |
|
"learning_rate": 5.625790139064476e-06, |
|
"loss": 0.0294, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.73037542662116, |
|
"grad_norm": 3.152604579925537, |
|
"learning_rate": 4.993678887484197e-06, |
|
"loss": 0.03, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.7645051194539247, |
|
"grad_norm": 1.7257556915283203, |
|
"learning_rate": 4.361567635903919e-06, |
|
"loss": 0.0278, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.7986348122866893, |
|
"grad_norm": 1.7202658653259277, |
|
"learning_rate": 3.729456384323641e-06, |
|
"loss": 0.0274, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.832764505119454, |
|
"grad_norm": 1.6560910940170288, |
|
"learning_rate": 3.097345132743363e-06, |
|
"loss": 0.0297, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.8668941979522184, |
|
"grad_norm": 2.1152303218841553, |
|
"learning_rate": 2.465233881163085e-06, |
|
"loss": 0.0254, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.901023890784983, |
|
"grad_norm": 1.8419415950775146, |
|
"learning_rate": 1.8331226295828066e-06, |
|
"loss": 0.0257, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.9351535836177476, |
|
"grad_norm": 1.0680209398269653, |
|
"learning_rate": 1.2010113780025286e-06, |
|
"loss": 0.024, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.969283276450512, |
|
"grad_norm": 2.186582565307617, |
|
"learning_rate": 5.689001264222504e-07, |
|
"loss": 0.0341, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9975461431772111, |
|
"eval_loss": 0.007486232556402683, |
|
"eval_runtime": 82.1973, |
|
"eval_samples_per_second": 114.031, |
|
"eval_steps_per_second": 1.788, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 879, |
|
"total_flos": 7.327526150669599e+18, |
|
"train_loss": 0.31240319909021447, |
|
"train_runtime": 6548.0515, |
|
"train_samples_per_second": 34.355, |
|
"train_steps_per_second": 0.134 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 879, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.327526150669599e+18, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|