|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.496543551254197, |
|
"eval_steps": 16, |
|
"global_step": 395, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.006320363420896702, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 2e-05, |
|
"loss": 0.6497, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006320363420896702, |
|
"eval_loss": 0.5998793244361877, |
|
"eval_runtime": 56.2331, |
|
"eval_samples_per_second": 18.957, |
|
"eval_steps_per_second": 18.957, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.012640726841793404, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 4e-05, |
|
"loss": 0.6349, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.018961090262690106, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 6e-05, |
|
"loss": 0.5832, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.025281453683586808, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 8e-05, |
|
"loss": 0.557, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03160181710448351, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0001, |
|
"loss": 0.4966, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03792218052538021, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00012, |
|
"loss": 0.3771, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04424254394627691, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 0.00014, |
|
"loss": 0.318, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.050562907367173615, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00016, |
|
"loss": 0.296, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05688327078807032, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00018, |
|
"loss": 0.2682, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.06320363420896702, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3039, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06952399762986372, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 0.00019999770790755575, |
|
"loss": 0.3096, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07584436105076042, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 0.00019999083173529673, |
|
"loss": 0.2575, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.08216472447165712, |
|
"grad_norm": 0.1689453125, |
|
"learning_rate": 0.00019997937179843937, |
|
"loss": 0.289, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08848508789255383, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 0.0001999633286223284, |
|
"loss": 0.2878, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.09480545131345053, |
|
"grad_norm": 0.12353515625, |
|
"learning_rate": 0.00019994270294241266, |
|
"loss": 0.2274, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.10112581473434723, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 0.00019991749570421146, |
|
"loss": 0.2252, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.10112581473434723, |
|
"eval_loss": 0.25023654103279114, |
|
"eval_runtime": 56.1603, |
|
"eval_samples_per_second": 18.981, |
|
"eval_steps_per_second": 18.981, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.10744617815524393, |
|
"grad_norm": 0.1298828125, |
|
"learning_rate": 0.0001998877080632712, |
|
"loss": 0.2512, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.11376654157614063, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 0.00019985334138511237, |
|
"loss": 0.2659, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.12008690499703734, |
|
"grad_norm": 0.142578125, |
|
"learning_rate": 0.00019981439724516716, |
|
"loss": 0.2415, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.12640726841793404, |
|
"grad_norm": 0.126953125, |
|
"learning_rate": 0.0001997708774287068, |
|
"loss": 0.2661, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.13272763183883074, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 0.00019972278393076023, |
|
"loss": 0.2046, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.13904799525972744, |
|
"grad_norm": 0.11865234375, |
|
"learning_rate": 0.0001996701189560223, |
|
"loss": 0.2087, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.14536835868062414, |
|
"grad_norm": 0.1435546875, |
|
"learning_rate": 0.00019961288491875278, |
|
"loss": 0.2246, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.15168872210152085, |
|
"grad_norm": 0.119140625, |
|
"learning_rate": 0.00019955108444266585, |
|
"loss": 0.1831, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.15800908552241755, |
|
"grad_norm": 0.1376953125, |
|
"learning_rate": 0.00019948472036080949, |
|
"loss": 0.2398, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.16432944894331425, |
|
"grad_norm": 0.1201171875, |
|
"learning_rate": 0.00019941379571543596, |
|
"loss": 0.189, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.17064981236421095, |
|
"grad_norm": 0.12890625, |
|
"learning_rate": 0.00019933831375786216, |
|
"loss": 0.2156, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.17697017578510765, |
|
"grad_norm": 0.1259765625, |
|
"learning_rate": 0.00019925827794832056, |
|
"loss": 0.2012, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.18329053920600435, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 0.00019917369195580063, |
|
"loss": 0.1602, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.18961090262690106, |
|
"grad_norm": 0.12890625, |
|
"learning_rate": 0.00019908455965788067, |
|
"loss": 0.1976, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.19593126604779776, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 0.00019899088514055004, |
|
"loss": 0.1874, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.20225162946869446, |
|
"grad_norm": 0.1357421875, |
|
"learning_rate": 0.00019889267269802176, |
|
"loss": 0.2024, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.20225162946869446, |
|
"eval_loss": 0.2020280808210373, |
|
"eval_runtime": 56.2737, |
|
"eval_samples_per_second": 18.943, |
|
"eval_steps_per_second": 18.943, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.20857199288959116, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.00019878992683253582, |
|
"loss": 0.1819, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.21489235631048786, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.00019868265225415265, |
|
"loss": 0.1794, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.22121271973138457, |
|
"grad_norm": 0.12060546875, |
|
"learning_rate": 0.00019857085388053723, |
|
"loss": 0.1943, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.22753308315228127, |
|
"grad_norm": 0.134765625, |
|
"learning_rate": 0.00019845453683673368, |
|
"loss": 0.2265, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.23385344657317797, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 0.00019833370645493047, |
|
"loss": 0.181, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.24017380999407467, |
|
"grad_norm": 0.12353515625, |
|
"learning_rate": 0.0001982083682742156, |
|
"loss": 0.1946, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.24649417341497137, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.00019807852804032305, |
|
"loss": 0.1418, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.2528145368358681, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 0.00019794419170536916, |
|
"loss": 0.1651, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2591349002567648, |
|
"grad_norm": 0.1337890625, |
|
"learning_rate": 0.00019780536542758, |
|
"loss": 0.1821, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2654552636776615, |
|
"grad_norm": 0.12353515625, |
|
"learning_rate": 0.00019766205557100868, |
|
"loss": 0.1913, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2717756270985582, |
|
"grad_norm": 0.12451171875, |
|
"learning_rate": 0.00019751426870524407, |
|
"loss": 0.194, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2780959905194549, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 0.00019736201160510931, |
|
"loss": 0.1832, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2844163539403516, |
|
"grad_norm": 0.12353515625, |
|
"learning_rate": 0.0001972052912503514, |
|
"loss": 0.19, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2907367173612483, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 0.00019704411482532116, |
|
"loss": 0.1875, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.297057080782145, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.00019687848971864389, |
|
"loss": 0.184, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.3033774442030417, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.0001967084235228807, |
|
"loss": 0.1581, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.3033774442030417, |
|
"eval_loss": 0.18037649989128113, |
|
"eval_runtime": 56.5509, |
|
"eval_samples_per_second": 18.85, |
|
"eval_steps_per_second": 18.85, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.3096978076239384, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00019653392403418043, |
|
"loss": 0.1766, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.3160181710448351, |
|
"grad_norm": 0.11669921875, |
|
"learning_rate": 0.0001963549992519223, |
|
"loss": 0.1888, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3223385344657318, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.00019617165737834916, |
|
"loss": 0.139, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.3286588978866285, |
|
"grad_norm": 0.12451171875, |
|
"learning_rate": 0.0001959839068181914, |
|
"loss": 0.1845, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3349792613075252, |
|
"grad_norm": 0.12060546875, |
|
"learning_rate": 0.00019579175617828187, |
|
"loss": 0.2043, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.3412996247284219, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 0.00019559521426716118, |
|
"loss": 0.1678, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.3476199881493186, |
|
"grad_norm": 0.11669921875, |
|
"learning_rate": 0.0001953942900946739, |
|
"loss": 0.1671, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3539403515702153, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 0.00019518899287155556, |
|
"loss": 0.1724, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.360260714991112, |
|
"grad_norm": 0.126953125, |
|
"learning_rate": 0.0001949793320090105, |
|
"loss": 0.177, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.3665810784120087, |
|
"grad_norm": 0.1259765625, |
|
"learning_rate": 0.00019476531711828027, |
|
"loss": 0.1644, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.3729014418329054, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 0.0001945469580102031, |
|
"loss": 0.1564, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.3792218052538021, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.0001943242646947643, |
|
"loss": 0.1353, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.3855421686746988, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.00019409724738063714, |
|
"loss": 0.1622, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.3918625320955955, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.00019386591647471506, |
|
"loss": 0.1564, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.3981828955164922, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.00019363028258163447, |
|
"loss": 0.176, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.4045032589373889, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 0.00019339035650328869, |
|
"loss": 0.1912, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.4045032589373889, |
|
"eval_loss": 0.1681559830904007, |
|
"eval_runtime": 56.4912, |
|
"eval_samples_per_second": 18.87, |
|
"eval_steps_per_second": 18.87, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.4108236223582856, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 0.0001931461492383327, |
|
"loss": 0.1959, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.4171439857791823, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.00019289767198167916, |
|
"loss": 0.1379, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.423464349200079, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 0.00019264493612398481, |
|
"loss": 0.1669, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.42978471262097573, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 0.0001923879532511287, |
|
"loss": 0.1279, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.43610507604187243, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.0001921267351436808, |
|
"loss": 0.1535, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.44242543946276913, |
|
"grad_norm": 0.11962890625, |
|
"learning_rate": 0.0001918612937763622, |
|
"loss": 0.1697, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.44874580288366583, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.00019159164131749587, |
|
"loss": 0.166, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.45506616630456254, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.00019131779012844912, |
|
"loss": 0.1508, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.46138652972545924, |
|
"grad_norm": 0.1201171875, |
|
"learning_rate": 0.00019103975276306678, |
|
"loss": 0.1617, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.46770689314635594, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 0.00019075754196709572, |
|
"loss": 0.1436, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.47402725656725264, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.0001904711706776006, |
|
"loss": 0.1408, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.48034761998814934, |
|
"grad_norm": 0.1171875, |
|
"learning_rate": 0.00019018065202237083, |
|
"loss": 0.1594, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.48666798340904605, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 0.00018988599931931866, |
|
"loss": 0.1394, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.49298834682994275, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.0001895872260758688, |
|
"loss": 0.1448, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.49930871025083945, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.00018928434598833912, |
|
"loss": 0.156, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.5056290736717362, |
|
"grad_norm": 0.125, |
|
"learning_rate": 0.00018897737294131284, |
|
"loss": 0.1692, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5056290736717362, |
|
"eval_loss": 0.15801414847373962, |
|
"eval_runtime": 57.0109, |
|
"eval_samples_per_second": 18.698, |
|
"eval_steps_per_second": 18.698, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5119494370926329, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.00018866632100700197, |
|
"loss": 0.1318, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.5182698005135296, |
|
"grad_norm": 0.1357421875, |
|
"learning_rate": 0.0001883512044446023, |
|
"loss": 0.203, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.5245901639344263, |
|
"grad_norm": 0.126953125, |
|
"learning_rate": 0.00018803203769963967, |
|
"loss": 0.1968, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.530910527355323, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.0001877088354033077, |
|
"loss": 0.139, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.5372308907762197, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.0001873816123717973, |
|
"loss": 0.1301, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5435512541971164, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.0001870503836056172, |
|
"loss": 0.1253, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.5498716176180131, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 0.00018671516428890648, |
|
"loss": 0.1575, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5561919810389098, |
|
"grad_norm": 0.12353515625, |
|
"learning_rate": 0.00018637596978873835, |
|
"loss": 0.1627, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5625123444598065, |
|
"grad_norm": 0.1279296875, |
|
"learning_rate": 0.00018603281565441585, |
|
"loss": 0.1762, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.5688327078807032, |
|
"grad_norm": 0.1259765625, |
|
"learning_rate": 0.00018568571761675893, |
|
"loss": 0.1537, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5751530713015999, |
|
"grad_norm": 0.1259765625, |
|
"learning_rate": 0.00018533469158738344, |
|
"loss": 0.1752, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.5814734347224966, |
|
"grad_norm": 0.11865234375, |
|
"learning_rate": 0.0001849797536579715, |
|
"loss": 0.1441, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.5877937981433933, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.00018462092009953408, |
|
"loss": 0.1471, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.59411416156429, |
|
"grad_norm": 0.1201171875, |
|
"learning_rate": 0.0001842582073616649, |
|
"loss": 0.1584, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.6004345249851867, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00018389163207178656, |
|
"loss": 0.1255, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6067548884060834, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 0.000183521211034388, |
|
"loss": 0.1401, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.6067548884060834, |
|
"eval_loss": 0.1516103893518448, |
|
"eval_runtime": 57.9856, |
|
"eval_samples_per_second": 18.384, |
|
"eval_steps_per_second": 18.384, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.6130752518269801, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.00018314696123025454, |
|
"loss": 0.1359, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.6193956152478768, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.00018276889981568906, |
|
"loss": 0.1354, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.6257159786687735, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.00018238704412172586, |
|
"loss": 0.1315, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.6320363420896702, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.0001820014116533359, |
|
"loss": 0.1522, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6383567055105669, |
|
"grad_norm": 0.11767578125, |
|
"learning_rate": 0.00018161202008862458, |
|
"loss": 0.1754, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.6446770689314636, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.00018121888727802113, |
|
"loss": 0.1368, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.6509974323523603, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.00018082203124346045, |
|
"loss": 0.14, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.657317795773257, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 0.0001804214701775569, |
|
"loss": 0.157, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.6636381591941537, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.00018001722244277035, |
|
"loss": 0.1575, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6699585226150504, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.00017960930657056438, |
|
"loss": 0.1229, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.6762788860359471, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.00017919774126055673, |
|
"loss": 0.1294, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.6825992494568438, |
|
"grad_norm": 0.09716796875, |
|
"learning_rate": 0.00017878254537966216, |
|
"loss": 0.1381, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.6889196128777405, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 0.0001783637379612275, |
|
"loss": 0.1494, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.6952399762986372, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.00017794133820415916, |
|
"loss": 0.1527, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.7015603397195339, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.00017751536547204295, |
|
"loss": 0.1335, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.7078807031404306, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.0001770858392922565, |
|
"loss": 0.1204, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.7078807031404306, |
|
"eval_loss": 0.14627417922019958, |
|
"eval_runtime": 56.8973, |
|
"eval_samples_per_second": 18.736, |
|
"eval_steps_per_second": 18.736, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.7142010665613273, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.00017665277935507398, |
|
"loss": 0.122, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.720521429982224, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.00017621620551276366, |
|
"loss": 0.134, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.7268417934031207, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.00017577613777867762, |
|
"loss": 0.1185, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.7331621568240174, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 0.00017533259632633442, |
|
"loss": 0.1337, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.7394825202449141, |
|
"grad_norm": 0.130859375, |
|
"learning_rate": 0.00017488560148849427, |
|
"loss": 0.1503, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.7458028836658108, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.00017443517375622704, |
|
"loss": 0.1594, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.7521232470867075, |
|
"grad_norm": 0.1181640625, |
|
"learning_rate": 0.0001739813337779727, |
|
"loss": 0.1397, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.7584436105076042, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00017352410235859503, |
|
"loss": 0.1314, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.7647639739285009, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 0.0001730635004584276, |
|
"loss": 0.1466, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.7710843373493976, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.0001725995491923131, |
|
"loss": 0.1147, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.7774047007702943, |
|
"grad_norm": 0.134765625, |
|
"learning_rate": 0.0001721322698286354, |
|
"loss": 0.1637, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.783725064191191, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 0.00017166168378834448, |
|
"loss": 0.138, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.7900454276120877, |
|
"grad_norm": 0.11767578125, |
|
"learning_rate": 0.00017118781264397446, |
|
"loss": 0.1511, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.7963657910329844, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 0.00017071067811865476, |
|
"loss": 0.1407, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.8026861544538811, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 0.0001702303020851142, |
|
"loss": 0.1342, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.8090065178747778, |
|
"grad_norm": 0.11865234375, |
|
"learning_rate": 0.00016974670656467824, |
|
"loss": 0.1336, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.8090065178747778, |
|
"eval_loss": 0.14203742146492004, |
|
"eval_runtime": 58.4188, |
|
"eval_samples_per_second": 18.248, |
|
"eval_steps_per_second": 18.248, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.8153268812956745, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.0001692599137262597, |
|
"loss": 0.1543, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.8216472447165712, |
|
"grad_norm": 0.0888671875, |
|
"learning_rate": 0.00016876994588534234, |
|
"loss": 0.1116, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.827967608137468, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.00016827682550295785, |
|
"loss": 0.1267, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.8342879715583646, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.0001677805751846563, |
|
"loss": 0.1361, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.8406083349792614, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.00016728121767946977, |
|
"loss": 0.1372, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.846928698400158, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.00016677877587886956, |
|
"loss": 0.1258, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.8532490618210548, |
|
"grad_norm": 0.11669921875, |
|
"learning_rate": 0.00016627327281571678, |
|
"loss": 0.1427, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.8595694252419515, |
|
"grad_norm": 0.1328125, |
|
"learning_rate": 0.00016576473166320644, |
|
"loss": 0.187, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.8658897886628482, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.00016525317573380525, |
|
"loss": 0.1417, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.8722101520837449, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.00016473862847818277, |
|
"loss": 0.1399, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.8785305155046416, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.00016422111348413657, |
|
"loss": 0.141, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.8848508789255383, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00016370065447551078, |
|
"loss": 0.1236, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.891171242346435, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.0001631772753111086, |
|
"loss": 0.1236, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.8974916057673317, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00016265099998359866, |
|
"loss": 0.128, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.9038119691882284, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.00016212185261841499, |
|
"loss": 0.1227, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.9101323326091251, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 0.00016158985747265108, |
|
"loss": 0.1339, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.9101323326091251, |
|
"eval_loss": 0.1379576325416565, |
|
"eval_runtime": 61.2763, |
|
"eval_samples_per_second": 17.397, |
|
"eval_steps_per_second": 17.397, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.9164526960300218, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.00016105503893394806, |
|
"loss": 0.1255, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.9227730594509185, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.00016051742151937655, |
|
"loss": 0.1237, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.9290934228718152, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0001599770298743128, |
|
"loss": 0.1212, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.9354137862927119, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.000159433888771309, |
|
"loss": 0.1402, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.9417341497136086, |
|
"grad_norm": 0.11865234375, |
|
"learning_rate": 0.00015888802310895742, |
|
"loss": 0.1475, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.9480545131345053, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 0.00015833945791074943, |
|
"loss": 0.1133, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.954374876555402, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.00015778821832392777, |
|
"loss": 0.1336, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.9606952399762987, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 0.0001572343296183344, |
|
"loss": 0.1479, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.9670156033971954, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 0.00015667781718525157, |
|
"loss": 0.1291, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.9733359668180921, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00015611870653623825, |
|
"loss": 0.1337, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.9796563302389888, |
|
"grad_norm": 0.09228515625, |
|
"learning_rate": 0.00015555702330196023, |
|
"loss": 0.1024, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.9859766936598855, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.0001549927932310155, |
|
"loss": 0.1565, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.9922970570807822, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 0.0001544260421887537, |
|
"loss": 0.1328, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.9986174205016789, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 0.00015385679615609042, |
|
"loss": 0.164, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.0049377839225755, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.00015328508122831636, |
|
"loss": 0.1184, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.0112581473434723, |
|
"grad_norm": 0.08935546875, |
|
"learning_rate": 0.00015271092361390077, |
|
"loss": 0.101, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.0112581473434723, |
|
"eval_loss": 0.13463501632213593, |
|
"eval_runtime": 57.8932, |
|
"eval_samples_per_second": 18.413, |
|
"eval_steps_per_second": 18.413, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.017578510764369, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.0001521343496332903, |
|
"loss": 0.118, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.0238988741852657, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 0.00015155538571770218, |
|
"loss": 0.0956, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.0302192376061623, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.00015097405840791276, |
|
"loss": 0.1089, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.036539601027059, |
|
"grad_norm": 0.0927734375, |
|
"learning_rate": 0.00015039039435304078, |
|
"loss": 0.093, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.0428599644479557, |
|
"grad_norm": 0.126953125, |
|
"learning_rate": 0.00014980442030932558, |
|
"loss": 0.1443, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.0491803278688525, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.00014921616313890072, |
|
"loss": 0.1039, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.055500691289749, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 0.00014862564980856258, |
|
"loss": 0.0798, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.061821054710646, |
|
"grad_norm": 0.12060546875, |
|
"learning_rate": 0.00014803290738853395, |
|
"loss": 0.1199, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.0681414181315425, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 0.00014743796305122331, |
|
"loss": 0.1028, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.0744617815524393, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.00014684084406997903, |
|
"loss": 0.097, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.080782144973336, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00014624157781783926, |
|
"loss": 0.0943, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.0871025083942327, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.0001456401917662769, |
|
"loss": 0.0981, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.0934228718151293, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 0.00014503671348394057, |
|
"loss": 0.1008, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.0997432352360261, |
|
"grad_norm": 0.12060546875, |
|
"learning_rate": 0.00014443117063539038, |
|
"loss": 0.1239, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.1060635986569227, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 0.00014382359097983013, |
|
"loss": 0.1035, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.1123839620778195, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.00014321400236983457, |
|
"loss": 0.0871, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.1123839620778195, |
|
"eval_loss": 0.13297832012176514, |
|
"eval_runtime": 57.7156, |
|
"eval_samples_per_second": 18.47, |
|
"eval_steps_per_second": 18.47, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.1187043254987161, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 0.00014260243275007265, |
|
"loss": 0.1073, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.125024688919613, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.00014198891015602646, |
|
"loss": 0.1054, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.1313450523405095, |
|
"grad_norm": 0.0947265625, |
|
"learning_rate": 0.00014137346271270604, |
|
"loss": 0.0977, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.1376654157614063, |
|
"grad_norm": 0.08837890625, |
|
"learning_rate": 0.0001407561186333601, |
|
"loss": 0.0839, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.143985779182303, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00014013690621818262, |
|
"loss": 0.1173, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.1503061426031997, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00013951585385301555, |
|
"loss": 0.113, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.1566265060240963, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.0001388929900080476, |
|
"loss": 0.0938, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.1629468694449931, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.000138268343236509, |
|
"loss": 0.0953, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.1692672328658897, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.00013764194217336264, |
|
"loss": 0.0954, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.1755875962867866, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.00013701381553399145, |
|
"loss": 0.0971, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.1819079597076831, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.00013638399211288188, |
|
"loss": 0.0995, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.18822832312858, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 0.000135752500782304, |
|
"loss": 0.1199, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.1945486865494765, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.00013511937049098805, |
|
"loss": 0.0885, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.2008690499703734, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00013448463026279704, |
|
"loss": 0.0957, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.20718941339127, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 0.0001338483091953967, |
|
"loss": 0.0753, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.2135097768121668, |
|
"grad_norm": 0.12353515625, |
|
"learning_rate": 0.0001332104364589212, |
|
"loss": 0.1035, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.2135097768121668, |
|
"eval_loss": 0.13200590014457703, |
|
"eval_runtime": 58.9558, |
|
"eval_samples_per_second": 18.081, |
|
"eval_steps_per_second": 18.081, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.2198301402330634, |
|
"grad_norm": 0.115234375, |
|
"learning_rate": 0.00013257104129463614, |
|
"loss": 0.104, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.2261505036539602, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.000131930153013598, |
|
"loss": 0.0966, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.2324708670748568, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.00013128780099531056, |
|
"loss": 0.1062, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.2387912304957536, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 0.00013064401468637792, |
|
"loss": 0.1027, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.2451115939166502, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 0.0001299988235991548, |
|
"loss": 0.0927, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.251431957337547, |
|
"grad_norm": 0.11865234375, |
|
"learning_rate": 0.00012935225731039348, |
|
"loss": 0.1031, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.2577523207584436, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.00012870434545988812, |
|
"loss": 0.109, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.2640726841793404, |
|
"grad_norm": 0.0888671875, |
|
"learning_rate": 0.00012805511774911584, |
|
"loss": 0.0835, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.270393047600237, |
|
"grad_norm": 0.09228515625, |
|
"learning_rate": 0.00012740460393987526, |
|
"loss": 0.0902, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.2767134110211338, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.00012675283385292212, |
|
"loss": 0.114, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.2830337744420304, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.0001260998373666022, |
|
"loss": 0.0887, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.2893541378629272, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 0.00012544564441548182, |
|
"loss": 0.095, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.2956745012838238, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.00012479028498897535, |
|
"loss": 0.0883, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.3019948647047206, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.00012413378912997058, |
|
"loss": 0.0824, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.3083152281256172, |
|
"grad_norm": 0.091796875, |
|
"learning_rate": 0.0001234761869334515, |
|
"loss": 0.0736, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.314635591546514, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 0.0001228175085451186, |
|
"loss": 0.1025, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.314635591546514, |
|
"eval_loss": 0.13002794981002808, |
|
"eval_runtime": 57.2937, |
|
"eval_samples_per_second": 18.606, |
|
"eval_steps_per_second": 18.606, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.3209559549674106, |
|
"grad_norm": 0.1220703125, |
|
"learning_rate": 0.00012215778416000707, |
|
"loss": 0.1188, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.3272763183883074, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.00012149704402110243, |
|
"loss": 0.101, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.333596681809204, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.00012083531841795425, |
|
"loss": 0.1031, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.3399170452301008, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.00012017263768528775, |
|
"loss": 0.0974, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.3462374086509974, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00011950903220161285, |
|
"loss": 0.1009, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.3525577720718942, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.00011884453238783185, |
|
"loss": 0.1017, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.3588781354927908, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00011817916870584482, |
|
"loss": 0.095, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.3651984989136876, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.00011751297165715309, |
|
"loss": 0.1064, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.3715188623345842, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 0.00011684597178146115, |
|
"loss": 0.106, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.377839225755481, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.0001161781996552765, |
|
"loss": 0.0836, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.3841595891763776, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.00011550968589050799, |
|
"loss": 0.0933, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.3904799525972744, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 0.00011484046113306262, |
|
"loss": 0.0874, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.396800316018171, |
|
"grad_norm": 0.11669921875, |
|
"learning_rate": 0.0001141705560614406, |
|
"loss": 0.1175, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.4031206794390678, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00011350000138532902, |
|
"loss": 0.1018, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.4094410428599644, |
|
"grad_norm": 0.11865234375, |
|
"learning_rate": 0.00011282882784419398, |
|
"loss": 0.1207, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.4157614062808612, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.00011215706620587149, |
|
"loss": 0.0936, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.4157614062808612, |
|
"eval_loss": 0.12633191049098969, |
|
"eval_runtime": 57.953, |
|
"eval_samples_per_second": 18.394, |
|
"eval_steps_per_second": 18.394, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.4220817697017578, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.00011148474726515716, |
|
"loss": 0.0982, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.4284021331226546, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00011081190184239419, |
|
"loss": 0.0912, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.4347224965435512, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 0.0001101385607820608, |
|
"loss": 0.1165, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.441042859964448, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 0.0001094647549513561, |
|
"loss": 0.0996, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.4473632233853446, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 0.00010879051523878522, |
|
"loss": 0.0946, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.4536835868062414, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.00010811587255274313, |
|
"loss": 0.1126, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.460003950227138, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.00010744085782009792, |
|
"loss": 0.0875, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.4663243136480348, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 0.00010676550198477293, |
|
"loss": 0.0989, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.4726446770689314, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.00010608983600632831, |
|
"loss": 0.1057, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.4789650404898282, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.00010541389085854176, |
|
"loss": 0.0906, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.4852854039107248, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.00010473769752798859, |
|
"loss": 0.0872, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.4916057673316216, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 0.00010406128701262128, |
|
"loss": 0.1047, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.4979261307525182, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 0.00010338469032034845, |
|
"loss": 0.0926, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.5042464941734148, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 0.00010270793846761347, |
|
"loss": 0.0905, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.5105668575943116, |
|
"grad_norm": 0.11279296875, |
|
"learning_rate": 0.00010203106247797243, |
|
"loss": 0.0996, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.5168872210152085, |
|
"grad_norm": 0.09326171875, |
|
"learning_rate": 0.00010135409338067219, |
|
"loss": 0.0797, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.5168872210152085, |
|
"eval_loss": 0.1241099089384079, |
|
"eval_runtime": 60.1424, |
|
"eval_samples_per_second": 17.725, |
|
"eval_steps_per_second": 17.725, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.523207584436105, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 0.00010067706220922785, |
|
"loss": 0.0839, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.5295279478570016, |
|
"grad_norm": 0.123046875, |
|
"learning_rate": 0.0001, |
|
"loss": 0.0984, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.5358483112778984, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 9.932293779077216e-05, |
|
"loss": 0.0893, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.5421686746987953, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 9.864590661932783e-05, |
|
"loss": 0.0972, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.5484890381196919, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 9.796893752202758e-05, |
|
"loss": 0.0886, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.5548094015405884, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 9.729206153238657e-05, |
|
"loss": 0.0971, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.5611297649614853, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 9.661530967965156e-05, |
|
"loss": 0.108, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.567450128382382, |
|
"grad_norm": 0.09521484375, |
|
"learning_rate": 9.59387129873787e-05, |
|
"loss": 0.0826, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.5737704918032787, |
|
"grad_norm": 0.09326171875, |
|
"learning_rate": 9.526230247201142e-05, |
|
"loss": 0.0875, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.5800908552241753, |
|
"grad_norm": 0.078125, |
|
"learning_rate": 9.458610914145826e-05, |
|
"loss": 0.0742, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.586411218645072, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 9.391016399367172e-05, |
|
"loss": 0.0981, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.5927315820659689, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 9.323449801522709e-05, |
|
"loss": 0.1035, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.5990519454868655, |
|
"grad_norm": 0.119140625, |
|
"learning_rate": 9.255914217990211e-05, |
|
"loss": 0.111, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.605372308907762, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 9.18841274472569e-05, |
|
"loss": 0.0855, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.6116926723286589, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 9.120948476121479e-05, |
|
"loss": 0.0889, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.6180130357495557, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 9.05352450486439e-05, |
|
"loss": 0.1014, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.6180130357495557, |
|
"eval_loss": 0.12204406410455704, |
|
"eval_runtime": 58.4358, |
|
"eval_samples_per_second": 18.242, |
|
"eval_steps_per_second": 18.242, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.6243333991704523, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 8.986143921793923e-05, |
|
"loss": 0.0858, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.6306537625913489, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 8.918809815760585e-05, |
|
"loss": 0.1131, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.6369741260122457, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 8.851525273484286e-05, |
|
"loss": 0.1018, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.6432944894331425, |
|
"grad_norm": 0.126953125, |
|
"learning_rate": 8.78429337941285e-05, |
|
"loss": 0.1183, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.649614852854039, |
|
"grad_norm": 0.123046875, |
|
"learning_rate": 8.717117215580606e-05, |
|
"loss": 0.1097, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.6559352162749357, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 8.649999861467099e-05, |
|
"loss": 0.095, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.6622555796958325, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 8.582944393855941e-05, |
|
"loss": 0.0839, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.6685759431167293, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 8.515953886693739e-05, |
|
"loss": 0.0958, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.674896306537626, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 8.449031410949206e-05, |
|
"loss": 0.0951, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.6812166699585225, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 8.382180034472353e-05, |
|
"loss": 0.0948, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.6875370333794193, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 8.315402821853886e-05, |
|
"loss": 0.0828, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.693857396800316, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 8.248702834284693e-05, |
|
"loss": 0.0984, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.7001777602212127, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 8.18208312941552e-05, |
|
"loss": 0.0949, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.7064981236421093, |
|
"grad_norm": 0.1240234375, |
|
"learning_rate": 8.115546761216822e-05, |
|
"loss": 0.1149, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.712818487063006, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 8.049096779838719e-05, |
|
"loss": 0.1048, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.719138850483903, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 7.982736231471224e-05, |
|
"loss": 0.0984, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.719138850483903, |
|
"eval_loss": 0.11962020397186279, |
|
"eval_runtime": 57.5297, |
|
"eval_samples_per_second": 18.53, |
|
"eval_steps_per_second": 18.53, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.7254592139047995, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 7.916468158204576e-05, |
|
"loss": 0.0984, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.731779577325696, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 7.85029559788976e-05, |
|
"loss": 0.1061, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.738099940746593, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 7.784221583999298e-05, |
|
"loss": 0.087, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.7444203041674897, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 7.718249145488142e-05, |
|
"loss": 0.0869, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.7507406675883863, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 7.652381306654851e-05, |
|
"loss": 0.112, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.757061031009283, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 7.586621087002945e-05, |
|
"loss": 0.0796, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.7633813944301797, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 7.520971501102469e-05, |
|
"loss": 0.0835, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.7697017578510765, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 7.455435558451823e-05, |
|
"loss": 0.1032, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.7760221212719731, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 7.390016263339782e-05, |
|
"loss": 0.0877, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.7823424846928697, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 7.324716614707793e-05, |
|
"loss": 0.086, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.7886628481137665, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 7.259539606012478e-05, |
|
"loss": 0.0813, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.7949832115346633, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 7.194488225088417e-05, |
|
"loss": 0.0899, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.80130357495556, |
|
"grad_norm": 0.1171875, |
|
"learning_rate": 7.129565454011189e-05, |
|
"loss": 0.1086, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.8076239383764565, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 7.064774268960653e-05, |
|
"loss": 0.1115, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.8139443017973533, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 7.000117640084526e-05, |
|
"loss": 0.0908, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.8202646652182501, |
|
"grad_norm": 0.11181640625, |
|
"learning_rate": 6.93559853136221e-05, |
|
"loss": 0.1078, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.8202646652182501, |
|
"eval_loss": 0.11839400231838226, |
|
"eval_runtime": 58.7653, |
|
"eval_samples_per_second": 18.14, |
|
"eval_steps_per_second": 18.14, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.8265850286391467, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 6.871219900468947e-05, |
|
"loss": 0.0937, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.8329053920600433, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 6.806984698640202e-05, |
|
"loss": 0.0832, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.8392257554809401, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 6.742895870536388e-05, |
|
"loss": 0.0926, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.845546118901837, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 6.678956354107882e-05, |
|
"loss": 0.0926, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.8518664823227335, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 6.615169080460331e-05, |
|
"loss": 0.0896, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.8581868457436301, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 6.551536973720298e-05, |
|
"loss": 0.0857, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.864507209164527, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 6.488062950901198e-05, |
|
"loss": 0.0735, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.8708275725854238, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 6.4247499217696e-05, |
|
"loss": 0.0998, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.8771479360063204, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 6.361600788711816e-05, |
|
"loss": 0.0926, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.883468299427217, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 6.298618446600856e-05, |
|
"loss": 0.0913, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.8897886628481138, |
|
"grad_norm": 0.09033203125, |
|
"learning_rate": 6.23580578266374e-05, |
|
"loss": 0.0759, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.8961090262690106, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 6.173165676349103e-05, |
|
"loss": 0.0897, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.9024293896899072, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 6.11070099919524e-05, |
|
"loss": 0.0881, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.9087497531108037, |
|
"grad_norm": 0.1181640625, |
|
"learning_rate": 6.048414614698448e-05, |
|
"loss": 0.1177, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.9150701165317006, |
|
"grad_norm": 0.1171875, |
|
"learning_rate": 5.9863093781817394e-05, |
|
"loss": 0.1063, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.9213904799525974, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 5.924388136663992e-05, |
|
"loss": 0.0803, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.9213904799525974, |
|
"eval_loss": 0.1170881986618042, |
|
"eval_runtime": 57.9811, |
|
"eval_samples_per_second": 18.385, |
|
"eval_steps_per_second": 18.385, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.927710843373494, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 5.862653728729397e-05, |
|
"loss": 0.0821, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.9340312067943906, |
|
"grad_norm": 0.09716796875, |
|
"learning_rate": 5.801108984397354e-05, |
|
"loss": 0.0752, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.9403515702152874, |
|
"grad_norm": 0.0947265625, |
|
"learning_rate": 5.739756724992736e-05, |
|
"loss": 0.0751, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.9466719336361842, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 5.6785997630165435e-05, |
|
"loss": 0.1088, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.9529922970570808, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 5.61764090201699e-05, |
|
"loss": 0.0907, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.9593126604779774, |
|
"grad_norm": 0.12158203125, |
|
"learning_rate": 5.5568829364609664e-05, |
|
"loss": 0.0985, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.9656330238988742, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 5.4963286516059496e-05, |
|
"loss": 0.0851, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.971953387319771, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 5.435980823372311e-05, |
|
"loss": 0.0865, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.9782737507406676, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 5.375842218216076e-05, |
|
"loss": 0.0911, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.9845941141615642, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 5.3159155930021e-05, |
|
"loss": 0.0886, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.990914477582461, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 5.25620369487767e-05, |
|
"loss": 0.0853, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.9972348410033578, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 5.196709261146606e-05, |
|
"loss": 0.0692, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.0035552044242544, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 5.1374350191437446e-05, |
|
"loss": 0.0688, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.009875567845151, |
|
"grad_norm": 0.08251953125, |
|
"learning_rate": 5.078383686109926e-05, |
|
"loss": 0.0725, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.016195931266048, |
|
"grad_norm": 0.08447265625, |
|
"learning_rate": 5.0195579690674447e-05, |
|
"loss": 0.0642, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.0225162946869446, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 4.9609605646959226e-05, |
|
"loss": 0.0658, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.0225162946869446, |
|
"eval_loss": 0.1164059117436409, |
|
"eval_runtime": 59.182, |
|
"eval_samples_per_second": 18.012, |
|
"eval_steps_per_second": 18.012, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.028836658107841, |
|
"grad_norm": 0.0751953125, |
|
"learning_rate": 4.902594159208723e-05, |
|
"loss": 0.0557, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.035157021528738, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 4.844461428229782e-05, |
|
"loss": 0.0746, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.041477384949635, |
|
"grad_norm": 0.08447265625, |
|
"learning_rate": 4.786565036670972e-05, |
|
"loss": 0.0609, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.0477977483705314, |
|
"grad_norm": 0.08837890625, |
|
"learning_rate": 4.728907638609925e-05, |
|
"loss": 0.0649, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.054118111791428, |
|
"grad_norm": 0.0888671875, |
|
"learning_rate": 4.6714918771683646e-05, |
|
"loss": 0.0517, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.0604384752123246, |
|
"grad_norm": 0.09326171875, |
|
"learning_rate": 4.614320384390959e-05, |
|
"loss": 0.0733, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.0667588386332216, |
|
"grad_norm": 0.0947265625, |
|
"learning_rate": 4.557395781124632e-05, |
|
"loss": 0.0671, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.073079202054118, |
|
"grad_norm": 0.0908203125, |
|
"learning_rate": 4.500720676898452e-05, |
|
"loss": 0.0593, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.079399565475015, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 4.444297669803981e-05, |
|
"loss": 0.0666, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.0857199288959114, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 4.388129346376178e-05, |
|
"loss": 0.0788, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.092040292316808, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 4.3322182814748436e-05, |
|
"loss": 0.0504, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.098360655737705, |
|
"grad_norm": 0.08837890625, |
|
"learning_rate": 4.276567038166563e-05, |
|
"loss": 0.0472, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.1046810191586016, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 4.221178167607226e-05, |
|
"loss": 0.0598, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.111001382579498, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 4.16605420892506e-05, |
|
"loss": 0.0528, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.1173217460003952, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 4.111197689104258e-05, |
|
"loss": 0.0621, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.123642109421292, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 4.0566111228691064e-05, |
|
"loss": 0.0517, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.123642109421292, |
|
"eval_loss": 0.12141799181699753, |
|
"eval_runtime": 58.3864, |
|
"eval_samples_per_second": 18.258, |
|
"eval_steps_per_second": 18.258, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.1299624728421884, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 4.002297012568722e-05, |
|
"loss": 0.0682, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.136282836263085, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 3.948257848062351e-05, |
|
"loss": 0.0599, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.1426031996839816, |
|
"grad_norm": 0.09326171875, |
|
"learning_rate": 3.894496106605197e-05, |
|
"loss": 0.0565, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.1489235631048786, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 3.841014252734896e-05, |
|
"loss": 0.0807, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.1552439265257752, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 3.787814738158504e-05, |
|
"loss": 0.0604, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.161564289946672, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 3.734900001640135e-05, |
|
"loss": 0.0576, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.167884653367569, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 3.6822724688891416e-05, |
|
"loss": 0.0784, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 2.1742050167884654, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 3.629934552448925e-05, |
|
"loss": 0.0648, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.180525380209362, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 3.5778886515863474e-05, |
|
"loss": 0.0664, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.1868457436302586, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 3.5261371521817244e-05, |
|
"loss": 0.0655, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.1931661070511552, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 3.4746824266194744e-05, |
|
"loss": 0.0628, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 2.1994864704720523, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 3.423526833679355e-05, |
|
"loss": 0.064, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.205806833892949, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 3.3726727184283236e-05, |
|
"loss": 0.0692, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.2121271973138454, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 3.322122412113047e-05, |
|
"loss": 0.0604, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.2184475607347425, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 3.271878232053025e-05, |
|
"loss": 0.0802, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.224767924155639, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 3.2219424815343735e-05, |
|
"loss": 0.0598, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.224767924155639, |
|
"eval_loss": 0.12026113271713257, |
|
"eval_runtime": 59.3025, |
|
"eval_samples_per_second": 17.976, |
|
"eval_steps_per_second": 17.976, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.2310882875765357, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 3.172317449704216e-05, |
|
"loss": 0.0664, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.2374086509974322, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 3.123005411465766e-05, |
|
"loss": 0.0519, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.243729014418329, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 3.0740086273740295e-05, |
|
"loss": 0.0685, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.250049377839226, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 3.0253293435321793e-05, |
|
"loss": 0.0597, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.2563697412601225, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 2.9769697914885862e-05, |
|
"loss": 0.0543, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.262690104681019, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 2.9289321881345254e-05, |
|
"loss": 0.0827, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.269010468101916, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 2.881218735602553e-05, |
|
"loss": 0.0663, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.2753308315228127, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 2.8338316211655536e-05, |
|
"loss": 0.0665, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.2816511949437093, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 2.7867730171364624e-05, |
|
"loss": 0.0671, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.287971558364606, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 2.7400450807686938e-05, |
|
"loss": 0.0564, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.2942919217855025, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 2.6936499541572445e-05, |
|
"loss": 0.058, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.3006122852063995, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 2.647589764140499e-05, |
|
"loss": 0.0643, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.306932648627296, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 2.60186662220273e-05, |
|
"loss": 0.0642, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.3132530120481927, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 2.5564826243772966e-05, |
|
"loss": 0.0612, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.3195733754690897, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 2.5114398511505734e-05, |
|
"loss": 0.0729, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.3258937388899863, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 2.4667403673665623e-05, |
|
"loss": 0.0704, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.3258937388899863, |
|
"eval_loss": 0.11980412155389786, |
|
"eval_runtime": 58.0487, |
|
"eval_samples_per_second": 18.364, |
|
"eval_steps_per_second": 18.364, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.332214102310883, |
|
"grad_norm": 0.09326171875, |
|
"learning_rate": 2.4223862221322424e-05, |
|
"loss": 0.0616, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.3385344657317795, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 2.3783794487236365e-05, |
|
"loss": 0.0584, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.344854829152676, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 2.3347220644926028e-05, |
|
"loss": 0.0618, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.351175192573573, |
|
"grad_norm": 0.10986328125, |
|
"learning_rate": 2.2914160707743538e-05, |
|
"loss": 0.0775, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.3574955559944697, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 2.248463452795705e-05, |
|
"loss": 0.0481, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.3638159194153663, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 2.205866179584084e-05, |
|
"loss": 0.0654, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.3701362828362633, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 2.1636262038772504e-05, |
|
"loss": 0.0733, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.37645664625716, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 2.121745462033784e-05, |
|
"loss": 0.0737, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.3827770096780565, |
|
"grad_norm": 0.10888671875, |
|
"learning_rate": 2.080225873944328e-05, |
|
"loss": 0.0506, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.389097373098953, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 2.0390693429435627e-05, |
|
"loss": 0.0737, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.3954177365198497, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 1.998277755722965e-05, |
|
"loss": 0.0542, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.4017380999407467, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 1.957852982244309e-05, |
|
"loss": 0.0603, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.4080584633616433, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 1.9177968756539567e-05, |
|
"loss": 0.0624, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.41437882678254, |
|
"grad_norm": 0.13671875, |
|
"learning_rate": 1.87811127219789e-05, |
|
"loss": 0.0809, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.420699190203437, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 1.838797991137543e-05, |
|
"loss": 0.068, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 2.4270195536243335, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 1.7998588346664115e-05, |
|
"loss": 0.0787, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.4270195536243335, |
|
"eval_loss": 0.11915146559476852, |
|
"eval_runtime": 58.9409, |
|
"eval_samples_per_second": 18.086, |
|
"eval_steps_per_second": 18.086, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.43333991704523, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 1.761295587827416e-05, |
|
"loss": 0.064, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.4396602804661267, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 1.7231100184310956e-05, |
|
"loss": 0.067, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.4459806438870233, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 1.6853038769745467e-05, |
|
"loss": 0.0582, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.4523010073079203, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 1.6478788965611993e-05, |
|
"loss": 0.0672, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.458621370728817, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 1.6108367928213476e-05, |
|
"loss": 0.0514, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 2.4649417341497135, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 1.5741792638335095e-05, |
|
"loss": 0.0551, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.4712620975706105, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 1.5379079900465953e-05, |
|
"loss": 0.0482, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 2.477582460991507, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 1.502024634202851e-05, |
|
"loss": 0.0622, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.4839028244124037, |
|
"grad_norm": 0.0947265625, |
|
"learning_rate": 1.4665308412616596e-05, |
|
"loss": 0.0605, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 2.4902231878333003, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 1.4314282383241096e-05, |
|
"loss": 0.0622, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.496543551254197, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 1.3967184345584173e-05, |
|
"loss": 0.0605, |
|
"step": 395 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 474, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 79, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.92788670068949e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|