|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.7711654268508679, |
|
"eval_steps": 500, |
|
"global_step": 10000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.017711654268508677, |
|
"grad_norm": 2915.373291015625, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 152.2409, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.035423308537017355, |
|
"grad_norm": 1864.051025390625, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 221.198, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.053134962805526036, |
|
"grad_norm": 1426.2816162109375, |
|
"learning_rate": 3e-06, |
|
"loss": 363.9534, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.07084661707403471, |
|
"grad_norm": 573.576904296875, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 157.6679, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.08855827134254339, |
|
"grad_norm": 23269.814453125, |
|
"learning_rate": 5e-06, |
|
"loss": 145.8333, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.10626992561105207, |
|
"grad_norm": 378.4466247558594, |
|
"learning_rate": 6e-06, |
|
"loss": 225.834, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.12398157987956075, |
|
"grad_norm": 560.156494140625, |
|
"learning_rate": 7e-06, |
|
"loss": 177.84, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.14169323414806942, |
|
"grad_norm": 389.28936767578125, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 172.5718, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.1594048884165781, |
|
"grad_norm": 290.60308837890625, |
|
"learning_rate": 9e-06, |
|
"loss": 118.532, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.17711654268508678, |
|
"grad_norm": 708.8328857421875, |
|
"learning_rate": 1e-05, |
|
"loss": 102.5411, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.19482819695359546, |
|
"grad_norm": 733.8017578125, |
|
"learning_rate": 9.99695413509548e-06, |
|
"loss": 203.6356, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.21253985122210414, |
|
"grad_norm": 1235.48583984375, |
|
"learning_rate": 9.987820251299121e-06, |
|
"loss": 148.6089, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.23025150549061282, |
|
"grad_norm": 612.6553955078125, |
|
"learning_rate": 9.972609476841368e-06, |
|
"loss": 120.5858, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.2479631597591215, |
|
"grad_norm": 539.2703247070312, |
|
"learning_rate": 9.951340343707852e-06, |
|
"loss": 127.0271, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.26567481402763016, |
|
"grad_norm": 362.9602966308594, |
|
"learning_rate": 9.924038765061042e-06, |
|
"loss": 69.1379, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.28338646829613884, |
|
"grad_norm": 1124.81591796875, |
|
"learning_rate": 9.890738003669029e-06, |
|
"loss": 251.2696, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.3010981225646475, |
|
"grad_norm": 2207.07421875, |
|
"learning_rate": 9.851478631379982e-06, |
|
"loss": 138.321, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.3188097768331562, |
|
"grad_norm": 341.0426940917969, |
|
"learning_rate": 9.806308479691595e-06, |
|
"loss": 56.6384, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.3365214311016649, |
|
"grad_norm": 735.7184448242188, |
|
"learning_rate": 9.755282581475769e-06, |
|
"loss": 130.0931, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.35423308537017356, |
|
"grad_norm": 549.526611328125, |
|
"learning_rate": 9.698463103929542e-06, |
|
"loss": 231.677, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.37194473963868224, |
|
"grad_norm": 607.9448852539062, |
|
"learning_rate": 9.635919272833938e-06, |
|
"loss": 97.9832, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.3896563939071909, |
|
"grad_norm": 666.7014770507812, |
|
"learning_rate": 9.567727288213005e-06, |
|
"loss": 138.039, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.4073680481756996, |
|
"grad_norm": 442.1828918457031, |
|
"learning_rate": 9.493970231495836e-06, |
|
"loss": 109.4226, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.4250797024442083, |
|
"grad_norm": 174.03724670410156, |
|
"learning_rate": 9.414737964294636e-06, |
|
"loss": 70.5078, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.44279135671271697, |
|
"grad_norm": 456.3837585449219, |
|
"learning_rate": 9.330127018922195e-06, |
|
"loss": 88.6896, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.46050301098122565, |
|
"grad_norm": 354.64373779296875, |
|
"learning_rate": 9.24024048078213e-06, |
|
"loss": 106.9264, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.4782146652497343, |
|
"grad_norm": 144.5575408935547, |
|
"learning_rate": 9.145187862775208e-06, |
|
"loss": 75.4347, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.495926319518243, |
|
"grad_norm": 458.1535339355469, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 108.9999, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.5136379737867517, |
|
"grad_norm": 1575.434814453125, |
|
"learning_rate": 8.94005376803361e-06, |
|
"loss": 111.8511, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.5313496280552603, |
|
"grad_norm": 302.79241943359375, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 172.8595, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.549061282323769, |
|
"grad_norm": 1860.42919921875, |
|
"learning_rate": 8.715724127386971e-06, |
|
"loss": 143.5497, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.5667729365922777, |
|
"grad_norm": 297.4872131347656, |
|
"learning_rate": 8.596699001693257e-06, |
|
"loss": 126.711, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.5844845908607864, |
|
"grad_norm": 2128.590087890625, |
|
"learning_rate": 8.473291852294986e-06, |
|
"loss": 72.2636, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.602196245129295, |
|
"grad_norm": 436.14593505859375, |
|
"learning_rate": 8.345653031794292e-06, |
|
"loss": 169.4072, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.6199078993978038, |
|
"grad_norm": 1767.817138671875, |
|
"learning_rate": 8.213938048432697e-06, |
|
"loss": 134.9774, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.6376195536663124, |
|
"grad_norm": 298.466796875, |
|
"learning_rate": 8.078307376628292e-06, |
|
"loss": 91.2486, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.6553312079348211, |
|
"grad_norm": 641.1651000976562, |
|
"learning_rate": 7.938926261462366e-06, |
|
"loss": 145.7362, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.6730428622033298, |
|
"grad_norm": 14466.2060546875, |
|
"learning_rate": 7.795964517353734e-06, |
|
"loss": 169.7186, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.6907545164718385, |
|
"grad_norm": 161.69337463378906, |
|
"learning_rate": 7.649596321166024e-06, |
|
"loss": 81.239, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.7084661707403471, |
|
"grad_norm": 474.57958984375, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 114.6176, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.7261778250088559, |
|
"grad_norm": 1847.68359375, |
|
"learning_rate": 7.347357813929455e-06, |
|
"loss": 111.0047, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.7438894792773645, |
|
"grad_norm": 1322.276611328125, |
|
"learning_rate": 7.191855733945388e-06, |
|
"loss": 91.6528, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.7616011335458732, |
|
"grad_norm": 1325.048583984375, |
|
"learning_rate": 7.033683215379002e-06, |
|
"loss": 94.0535, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.7793127878143818, |
|
"grad_norm": 382.36859130859375, |
|
"learning_rate": 6.873032967079562e-06, |
|
"loss": 74.0244, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.7970244420828906, |
|
"grad_norm": 876.57080078125, |
|
"learning_rate": 6.710100716628345e-06, |
|
"loss": 96.6452, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.8147360963513992, |
|
"grad_norm": 1393.6138916015625, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 118.802, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.832447750619908, |
|
"grad_norm": 406.4674377441406, |
|
"learning_rate": 6.378186779084996e-06, |
|
"loss": 55.8678, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.8501594048884166, |
|
"grad_norm": 1019.0061645507812, |
|
"learning_rate": 6.209609477998339e-06, |
|
"loss": 123.8752, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.8678710591569253, |
|
"grad_norm": 987.013916015625, |
|
"learning_rate": 6.039558454088796e-06, |
|
"loss": 78.0407, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.8855827134254339, |
|
"grad_norm": 849.7422485351562, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 93.2876, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.9032943676939427, |
|
"grad_norm": 451.16192626953125, |
|
"learning_rate": 5.695865504800328e-06, |
|
"loss": 99.8985, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.9210060219624513, |
|
"grad_norm": 1357.7943115234375, |
|
"learning_rate": 5.522642316338268e-06, |
|
"loss": 106.1713, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.9387176762309599, |
|
"grad_norm": 452.63299560546875, |
|
"learning_rate": 5.348782368720627e-06, |
|
"loss": 72.7826, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.9564293304994687, |
|
"grad_norm": 382.8103942871094, |
|
"learning_rate": 5.174497483512506e-06, |
|
"loss": 108.6426, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.9741409847679773, |
|
"grad_norm": 589.746826171875, |
|
"learning_rate": 5e-06, |
|
"loss": 93.5595, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.991852639036486, |
|
"grad_norm": 719.3682250976562, |
|
"learning_rate": 4.825502516487497e-06, |
|
"loss": 79.8001, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 260.0443420410156, |
|
"eval_runtime": 57.7368, |
|
"eval_samples_per_second": 43.473, |
|
"eval_steps_per_second": 10.877, |
|
"step": 5646 |
|
}, |
|
{ |
|
"epoch": 1.0095642933049946, |
|
"grad_norm": 344.79461669921875, |
|
"learning_rate": 4.651217631279374e-06, |
|
"loss": 87.4869, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 1.0272759475735034, |
|
"grad_norm": 387.0537414550781, |
|
"learning_rate": 4.477357683661734e-06, |
|
"loss": 70.2096, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 1.0449876018420121, |
|
"grad_norm": 1872.5281982421875, |
|
"learning_rate": 4.304134495199675e-06, |
|
"loss": 70.9989, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 1.0626992561105206, |
|
"grad_norm": 941.2898559570312, |
|
"learning_rate": 4.131759111665349e-06, |
|
"loss": 83.4253, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.0804109103790294, |
|
"grad_norm": 2921.8916015625, |
|
"learning_rate": 3.960441545911205e-06, |
|
"loss": 74.6986, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 1.098122564647538, |
|
"grad_norm": 317.5779113769531, |
|
"learning_rate": 3.790390522001662e-06, |
|
"loss": 56.6294, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 1.1158342189160468, |
|
"grad_norm": 610.6322021484375, |
|
"learning_rate": 3.6218132209150047e-06, |
|
"loss": 96.5304, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 1.1335458731845554, |
|
"grad_norm": 536.9466552734375, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 88.2391, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 1.151257527453064, |
|
"grad_norm": 707.88330078125, |
|
"learning_rate": 3.289899283371657e-06, |
|
"loss": 88.9819, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.1689691817215728, |
|
"grad_norm": 2059.515380859375, |
|
"learning_rate": 3.12696703292044e-06, |
|
"loss": 57.1338, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 1.1866808359900816, |
|
"grad_norm": 310.9707946777344, |
|
"learning_rate": 2.966316784621e-06, |
|
"loss": 107.5339, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 1.20439249025859, |
|
"grad_norm": 511.1593933105469, |
|
"learning_rate": 2.8081442660546126e-06, |
|
"loss": 71.3359, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 1.2221041445270988, |
|
"grad_norm": 1355.5145263671875, |
|
"learning_rate": 2.6526421860705474e-06, |
|
"loss": 62.7688, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 1.2398157987956075, |
|
"grad_norm": 180.27313232421875, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 138.8191, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.257527453064116, |
|
"grad_norm": 477.2627868652344, |
|
"learning_rate": 2.3504036788339763e-06, |
|
"loss": 68.2528, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 1.2752391073326248, |
|
"grad_norm": 62.73986053466797, |
|
"learning_rate": 2.204035482646267e-06, |
|
"loss": 54.5104, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 1.2929507616011335, |
|
"grad_norm": 164.81520080566406, |
|
"learning_rate": 2.061073738537635e-06, |
|
"loss": 103.5922, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 1.3106624158696423, |
|
"grad_norm": 962.6326293945312, |
|
"learning_rate": 1.9216926233717087e-06, |
|
"loss": 61.3437, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 1.328374070138151, |
|
"grad_norm": 413.64935302734375, |
|
"learning_rate": 1.7860619515673034e-06, |
|
"loss": 102.4006, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.3460857244066595, |
|
"grad_norm": 271.7372741699219, |
|
"learning_rate": 1.6543469682057105e-06, |
|
"loss": 92.6463, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 1.3637973786751683, |
|
"grad_norm": 776.993896484375, |
|
"learning_rate": 1.5267081477050132e-06, |
|
"loss": 131.4324, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 1.381509032943677, |
|
"grad_norm": 412.1511535644531, |
|
"learning_rate": 1.4033009983067454e-06, |
|
"loss": 55.6395, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 1.3992206872121855, |
|
"grad_norm": 664.7413940429688, |
|
"learning_rate": 1.2842758726130283e-06, |
|
"loss": 137.3694, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 1.4169323414806942, |
|
"grad_norm": 350.5196228027344, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 107.8133, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.434643995749203, |
|
"grad_norm": 445.6170349121094, |
|
"learning_rate": 1.0599462319663906e-06, |
|
"loss": 51.7356, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 1.4523556500177117, |
|
"grad_norm": 953.5679931640625, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 84.7836, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 1.4700673042862205, |
|
"grad_norm": 952.9735107421875, |
|
"learning_rate": 8.54812137224792e-07, |
|
"loss": 92.5589, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 1.487778958554729, |
|
"grad_norm": 492.47198486328125, |
|
"learning_rate": 7.597595192178702e-07, |
|
"loss": 55.5006, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 1.5054906128232377, |
|
"grad_norm": 1805.4322509765625, |
|
"learning_rate": 6.698729810778065e-07, |
|
"loss": 96.7545, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.5232022670917464, |
|
"grad_norm": 300.4207458496094, |
|
"learning_rate": 5.852620357053651e-07, |
|
"loss": 86.5161, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 1.540913921360255, |
|
"grad_norm": 1440.614990234375, |
|
"learning_rate": 5.06029768504166e-07, |
|
"loss": 78.0504, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 1.5586255756287637, |
|
"grad_norm": 405.5276184082031, |
|
"learning_rate": 4.322727117869951e-07, |
|
"loss": 76.5312, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 1.5763372298972724, |
|
"grad_norm": 950.4790649414062, |
|
"learning_rate": 3.6408072716606346e-07, |
|
"loss": 89.3738, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 1.594048884165781, |
|
"grad_norm": 64.55994415283203, |
|
"learning_rate": 3.015368960704584e-07, |
|
"loss": 97.9065, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.61176053843429, |
|
"grad_norm": 259.1815490722656, |
|
"learning_rate": 2.447174185242324e-07, |
|
"loss": 93.1754, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 1.6294721927027984, |
|
"grad_norm": 366.2008056640625, |
|
"learning_rate": 1.9369152030840553e-07, |
|
"loss": 73.6778, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 1.6471838469713072, |
|
"grad_norm": 383.7484436035156, |
|
"learning_rate": 1.4852136862001766e-07, |
|
"loss": 48.6865, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 1.664895501239816, |
|
"grad_norm": 392.061767578125, |
|
"learning_rate": 1.0926199633097156e-07, |
|
"loss": 118.0324, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 1.6826071555083244, |
|
"grad_norm": 8477.83984375, |
|
"learning_rate": 7.59612349389599e-08, |
|
"loss": 109.5857, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.7003188097768331, |
|
"grad_norm": 452.080078125, |
|
"learning_rate": 4.865965629214819e-08, |
|
"loss": 95.1972, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 1.7180304640453419, |
|
"grad_norm": 142.1215057373047, |
|
"learning_rate": 2.7390523158633552e-08, |
|
"loss": 59.5104, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 1.7357421183138504, |
|
"grad_norm": 450.3463134765625, |
|
"learning_rate": 1.2179748700879013e-08, |
|
"loss": 79.6601, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 1.7534537725823593, |
|
"grad_norm": 964.9075927734375, |
|
"learning_rate": 3.0458649045211897e-09, |
|
"loss": 82.6603, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 1.7711654268508679, |
|
"grad_norm": 203.1498260498047, |
|
"learning_rate": 0.0, |
|
"loss": 94.6798, |
|
"step": 10000 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 5000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|