|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.7226567019922543, |
|
"eval_steps": 500, |
|
"global_step": 50000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 5.4451772745526446e-05, |
|
"grad_norm": 182.4078369140625, |
|
"learning_rate": 2.7225701061802342e-08, |
|
"loss": 13.9417, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.027225886372763224, |
|
"grad_norm": 6.484030246734619, |
|
"learning_rate": 1.3612850530901172e-05, |
|
"loss": 8.9538, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.05445177274552645, |
|
"grad_norm": 8.51893138885498, |
|
"learning_rate": 2.7225701061802345e-05, |
|
"loss": 6.3731, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.08167765911828967, |
|
"grad_norm": 5.699058532714844, |
|
"learning_rate": 4.083855159270352e-05, |
|
"loss": 5.4332, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.1089035454910529, |
|
"grad_norm": 11.347241401672363, |
|
"learning_rate": 5.445140212360469e-05, |
|
"loss": 4.6858, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.13612943186381612, |
|
"grad_norm": 2.8045220375061035, |
|
"learning_rate": 6.806425265450586e-05, |
|
"loss": 4.1909, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.16335531823657934, |
|
"grad_norm": 2.2558796405792236, |
|
"learning_rate": 8.167710318540703e-05, |
|
"loss": 3.929, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.19058120460934255, |
|
"grad_norm": 1.9897191524505615, |
|
"learning_rate": 9.52899537163082e-05, |
|
"loss": 3.7863, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.2178070909821058, |
|
"grad_norm": 1.875031590461731, |
|
"learning_rate": 9.999458213543955e-05, |
|
"loss": 3.6905, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.245032977354869, |
|
"grad_norm": 1.7890335321426392, |
|
"learning_rate": 9.996535015198725e-05, |
|
"loss": 3.6033, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.27225886372763225, |
|
"grad_norm": 1.7135872840881348, |
|
"learning_rate": 9.99108024021366e-05, |
|
"loss": 3.5396, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.29948475010039544, |
|
"grad_norm": 1.6878308057785034, |
|
"learning_rate": 9.983096652340218e-05, |
|
"loss": 3.4888, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.3267106364731587, |
|
"grad_norm": 1.6131703853607178, |
|
"learning_rate": 9.972588296594496e-05, |
|
"loss": 3.4457, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.3539365228459219, |
|
"grad_norm": 1.5502527952194214, |
|
"learning_rate": 9.959560497207756e-05, |
|
"loss": 3.414, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.3811624092186851, |
|
"grad_norm": 1.5919506549835205, |
|
"learning_rate": 9.944019854928815e-05, |
|
"loss": 3.3923, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.40838829559144835, |
|
"grad_norm": 1.558945655822754, |
|
"learning_rate": 9.925974243679667e-05, |
|
"loss": 3.3694, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.4356141819642116, |
|
"grad_norm": 1.5559173822402954, |
|
"learning_rate": 9.905432806566039e-05, |
|
"loss": 3.3422, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.4628400683369748, |
|
"grad_norm": 1.5621517896652222, |
|
"learning_rate": 9.882405951244874e-05, |
|
"loss": 3.3287, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.490065954709738, |
|
"grad_norm": 1.4939193725585938, |
|
"learning_rate": 9.856905344651121e-05, |
|
"loss": 3.3063, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.5172918410825013, |
|
"grad_norm": 1.4603230953216553, |
|
"learning_rate": 9.828943907086507e-05, |
|
"loss": 3.291, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.5445177274552645, |
|
"grad_norm": 1.4974786043167114, |
|
"learning_rate": 9.798535805673234e-05, |
|
"loss": 3.2838, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.5717436138280277, |
|
"grad_norm": 1.5649632215499878, |
|
"learning_rate": 9.765696447175982e-05, |
|
"loss": 3.2695, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.5989695002007909, |
|
"grad_norm": 1.523169994354248, |
|
"learning_rate": 9.73044247019582e-05, |
|
"loss": 3.2554, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.6261953865735541, |
|
"grad_norm": 1.4261916875839233, |
|
"learning_rate": 9.692791736739979e-05, |
|
"loss": 3.242, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.6534212729463174, |
|
"grad_norm": 1.4566397666931152, |
|
"learning_rate": 9.65276332317178e-05, |
|
"loss": 3.2229, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.6806471593190806, |
|
"grad_norm": 1.4012161493301392, |
|
"learning_rate": 9.610377510545277e-05, |
|
"loss": 3.223, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.7078730456918438, |
|
"grad_norm": 1.4838244915008545, |
|
"learning_rate": 9.565655774329525e-05, |
|
"loss": 3.2162, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.7350989320646071, |
|
"grad_norm": 1.4344415664672852, |
|
"learning_rate": 9.518620773527684e-05, |
|
"loss": 3.2148, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.7623248184373702, |
|
"grad_norm": 1.4415775537490845, |
|
"learning_rate": 9.469296339196459e-05, |
|
"loss": 3.206, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.7895507048101335, |
|
"grad_norm": 1.469138264656067, |
|
"learning_rate": 9.417707462371708e-05, |
|
"loss": 3.1997, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.8167765911828967, |
|
"grad_norm": 1.4184919595718384, |
|
"learning_rate": 9.36388028140632e-05, |
|
"loss": 3.1877, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.8440024775556599, |
|
"grad_norm": 1.4781111478805542, |
|
"learning_rate": 9.307842068726786e-05, |
|
"loss": 3.1893, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.8712283639284232, |
|
"grad_norm": 1.4547381401062012, |
|
"learning_rate": 9.249621217015181e-05, |
|
"loss": 3.1728, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.8984542503011864, |
|
"grad_norm": 1.4702293872833252, |
|
"learning_rate": 9.189247224823542e-05, |
|
"loss": 3.1727, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.9256801366739495, |
|
"grad_norm": 1.5003573894500732, |
|
"learning_rate": 9.126750681627938e-05, |
|
"loss": 3.1648, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.9529060230467128, |
|
"grad_norm": 1.43069589138031, |
|
"learning_rate": 9.062163252329815e-05, |
|
"loss": 3.1589, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.980131909419476, |
|
"grad_norm": 1.443028450012207, |
|
"learning_rate": 8.99551766121245e-05, |
|
"loss": 3.1447, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.0073918281502052, |
|
"grad_norm": 1.4987826347351074, |
|
"learning_rate": 8.926847675360655e-05, |
|
"loss": 3.1253, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.0346177145229685, |
|
"grad_norm": 1.514776349067688, |
|
"learning_rate": 8.856188087552123e-05, |
|
"loss": 3.0292, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.0618436008957317, |
|
"grad_norm": 1.464104175567627, |
|
"learning_rate": 8.783574698629109e-05, |
|
"loss": 3.0248, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.089069487268495, |
|
"grad_norm": 1.4995203018188477, |
|
"learning_rate": 8.709044299359327e-05, |
|
"loss": 3.0316, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.1162953736412582, |
|
"grad_norm": 1.5172643661499023, |
|
"learning_rate": 8.632634651795318e-05, |
|
"loss": 3.0303, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.1435212600140212, |
|
"grad_norm": 1.5371851921081543, |
|
"learning_rate": 8.554384470141671e-05, |
|
"loss": 3.0278, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.1707471463867845, |
|
"grad_norm": 1.4607304334640503, |
|
"learning_rate": 8.474333401139847e-05, |
|
"loss": 3.0206, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.1979730327595477, |
|
"grad_norm": 1.486690878868103, |
|
"learning_rate": 8.392522003980496e-05, |
|
"loss": 3.0251, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.225198919132311, |
|
"grad_norm": 1.4945247173309326, |
|
"learning_rate": 8.308991729753484e-05, |
|
"loss": 3.03, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.2524248055050742, |
|
"grad_norm": 1.4576785564422607, |
|
"learning_rate": 8.223784900446006e-05, |
|
"loss": 3.0219, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.2796506918778374, |
|
"grad_norm": 1.5146127939224243, |
|
"learning_rate": 8.136944687499468e-05, |
|
"loss": 3.015, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.3068765782506007, |
|
"grad_norm": 1.5249332189559937, |
|
"learning_rate": 8.048515089935956e-05, |
|
"loss": 3.0185, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.334102464623364, |
|
"grad_norm": 1.4822596311569214, |
|
"learning_rate": 7.958540912065415e-05, |
|
"loss": 3.0271, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.3613283509961271, |
|
"grad_norm": 1.5259217023849487, |
|
"learning_rate": 7.867067740784801e-05, |
|
"loss": 3.0188, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.3885542373688904, |
|
"grad_norm": 1.5163941383361816, |
|
"learning_rate": 7.774141922480738e-05, |
|
"loss": 3.0165, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.4157801237416536, |
|
"grad_norm": 1.6021302938461304, |
|
"learning_rate": 7.679810539547351e-05, |
|
"loss": 3.0178, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.4430060101144169, |
|
"grad_norm": 1.4801899194717407, |
|
"learning_rate": 7.584121386531205e-05, |
|
"loss": 3.0167, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.4702318964871801, |
|
"grad_norm": 1.478881597518921, |
|
"learning_rate": 7.487122945915404e-05, |
|
"loss": 3.0134, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.4974577828599434, |
|
"grad_norm": 1.5363502502441406, |
|
"learning_rate": 7.388864363555151e-05, |
|
"loss": 3.0162, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.5246836692327066, |
|
"grad_norm": 1.5000182390213013, |
|
"learning_rate": 7.289395423777183e-05, |
|
"loss": 3.0088, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.5519095556054698, |
|
"grad_norm": 1.4747732877731323, |
|
"learning_rate": 7.188766524155734e-05, |
|
"loss": 3.0004, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 1.5791354419782329, |
|
"grad_norm": 1.5123411417007446, |
|
"learning_rate": 7.08702864997777e-05, |
|
"loss": 3.0021, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.606361328350996, |
|
"grad_norm": 1.5053893327713013, |
|
"learning_rate": 6.984233348410447e-05, |
|
"loss": 3.0063, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 1.6335872147237593, |
|
"grad_norm": 1.4705040454864502, |
|
"learning_rate": 6.88043270238391e-05, |
|
"loss": 2.992, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.6608131010965226, |
|
"grad_norm": 1.4783118963241577, |
|
"learning_rate": 6.775679304202598e-05, |
|
"loss": 2.9957, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 1.6880389874692858, |
|
"grad_norm": 1.4604405164718628, |
|
"learning_rate": 6.670026228898501e-05, |
|
"loss": 3.0012, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 1.715264873842049, |
|
"grad_norm": 1.4742037057876587, |
|
"learning_rate": 6.563527007339827e-05, |
|
"loss": 2.9946, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 1.742490760214812, |
|
"grad_norm": 1.5092357397079468, |
|
"learning_rate": 6.456235599108689e-05, |
|
"loss": 2.9953, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 1.7697166465875753, |
|
"grad_norm": 1.4896336793899536, |
|
"learning_rate": 6.348206365161614e-05, |
|
"loss": 2.982, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 1.7969425329603386, |
|
"grad_norm": 1.5187143087387085, |
|
"learning_rate": 6.239494040286657e-05, |
|
"loss": 2.9954, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 1.8241684193331018, |
|
"grad_norm": 1.5126278400421143, |
|
"learning_rate": 6.13015370537113e-05, |
|
"loss": 2.9847, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 1.851394305705865, |
|
"grad_norm": 1.4799253940582275, |
|
"learning_rate": 6.0202407594939615e-05, |
|
"loss": 2.9843, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 1.8786201920786283, |
|
"grad_norm": 1.5327789783477783, |
|
"learning_rate": 5.9098108918568354e-05, |
|
"loss": 2.9804, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 1.9058460784513915, |
|
"grad_norm": 1.544638991355896, |
|
"learning_rate": 5.798920053568351e-05, |
|
"loss": 2.977, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 1.9330719648241548, |
|
"grad_norm": 1.5565699338912964, |
|
"learning_rate": 5.687624429295468e-05, |
|
"loss": 2.9804, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 1.960297851196918, |
|
"grad_norm": 1.4633305072784424, |
|
"learning_rate": 5.575980408796623e-05, |
|
"loss": 2.9758, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 1.9875237375696813, |
|
"grad_norm": 1.519126534461975, |
|
"learning_rate": 5.464044558350929e-05, |
|
"loss": 2.9697, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.0147836563004105, |
|
"grad_norm": 1.5512584447860718, |
|
"learning_rate": 5.3518735920979434e-05, |
|
"loss": 2.8931, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.0420095426731737, |
|
"grad_norm": 1.5433844327926636, |
|
"learning_rate": 5.239524343302511e-05, |
|
"loss": 2.8394, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.069235429045937, |
|
"grad_norm": 1.5760645866394043, |
|
"learning_rate": 5.127053735559263e-05, |
|
"loss": 2.8356, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.0964613154187, |
|
"grad_norm": 1.6160222291946411, |
|
"learning_rate": 5.0145187539513326e-05, |
|
"loss": 2.8333, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 2.1236872017914634, |
|
"grad_norm": 1.5705721378326416, |
|
"learning_rate": 4.901976416177933e-05, |
|
"loss": 2.8355, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.1509130881642267, |
|
"grad_norm": 1.5703595876693726, |
|
"learning_rate": 4.7894837436654e-05, |
|
"loss": 2.8399, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 2.17813897453699, |
|
"grad_norm": 1.5459660291671753, |
|
"learning_rate": 4.677097732676345e-05, |
|
"loss": 2.8474, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.205364860909753, |
|
"grad_norm": 1.6205192804336548, |
|
"learning_rate": 4.56487532543157e-05, |
|
"loss": 2.8455, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 2.2325907472825164, |
|
"grad_norm": 1.546263337135315, |
|
"learning_rate": 4.4528733812593465e-05, |
|
"loss": 2.8378, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 2.2598166336552796, |
|
"grad_norm": 1.641790509223938, |
|
"learning_rate": 4.341148647786699e-05, |
|
"loss": 2.8439, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 2.2870425200280424, |
|
"grad_norm": 1.5860189199447632, |
|
"learning_rate": 4.2297577321873025e-05, |
|
"loss": 2.8485, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.3142684064008057, |
|
"grad_norm": 1.578778624534607, |
|
"learning_rate": 4.118757072500509e-05, |
|
"loss": 2.8428, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 2.341494292773569, |
|
"grad_norm": 1.6200742721557617, |
|
"learning_rate": 4.008202909036093e-05, |
|
"loss": 2.8439, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 2.368720179146332, |
|
"grad_norm": 1.5700623989105225, |
|
"learning_rate": 3.898151255879183e-05, |
|
"loss": 2.8421, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 2.3959460655190954, |
|
"grad_norm": 1.608221173286438, |
|
"learning_rate": 3.788657872509786e-05, |
|
"loss": 2.8472, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 2.4231719518918586, |
|
"grad_norm": 1.5805708169937134, |
|
"learning_rate": 3.679778235551349e-05, |
|
"loss": 2.8437, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 2.450397838264622, |
|
"grad_norm": 1.5437690019607544, |
|
"learning_rate": 3.571567510662597e-05, |
|
"loss": 2.839, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 2.477623724637385, |
|
"grad_norm": 1.5659681558609009, |
|
"learning_rate": 3.464080524586959e-05, |
|
"loss": 2.8338, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 2.5048496110101484, |
|
"grad_norm": 1.5578417778015137, |
|
"learning_rate": 3.357371737373694e-05, |
|
"loss": 2.8417, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 2.5320754973829116, |
|
"grad_norm": 1.6232887506484985, |
|
"learning_rate": 3.2514952147848014e-05, |
|
"loss": 2.8424, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 2.559301383755675, |
|
"grad_norm": 1.580432415008545, |
|
"learning_rate": 3.146504600901725e-05, |
|
"loss": 2.8325, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 2.586527270128438, |
|
"grad_norm": 1.589545488357544, |
|
"learning_rate": 3.0424530909456973e-05, |
|
"loss": 2.8342, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 2.6137531565012013, |
|
"grad_norm": 1.5450959205627441, |
|
"learning_rate": 2.9393934043254943e-05, |
|
"loss": 2.838, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 2.6409790428739646, |
|
"grad_norm": 1.59015953540802, |
|
"learning_rate": 2.8373777579262905e-05, |
|
"loss": 2.8379, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 2.668204929246728, |
|
"grad_norm": 1.586267113685608, |
|
"learning_rate": 2.7364578396531094e-05, |
|
"loss": 2.8316, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 2.695430815619491, |
|
"grad_norm": 1.6080377101898193, |
|
"learning_rate": 2.636684782242303e-05, |
|
"loss": 2.8333, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 2.7226567019922543, |
|
"grad_norm": 1.6002167463302612, |
|
"learning_rate": 2.5381091373543046e-05, |
|
"loss": 2.8373, |
|
"step": 50000 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 73456, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 50000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.074111485812618e+19, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|