|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.946107784431138, |
|
"eval_steps": 500, |
|
"global_step": 123, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.5533928871154785, |
|
"learning_rate": 5e-06, |
|
"loss": 0.9102, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.562413454055786, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8938, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.4130544662475586, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.8939, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.9644792079925537, |
|
"learning_rate": 2e-05, |
|
"loss": 0.8619, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.8332546949386597, |
|
"learning_rate": 1.9996515418688493e-05, |
|
"loss": 0.7327, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.0436848402023315, |
|
"learning_rate": 1.998606410321534e-05, |
|
"loss": 0.5738, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.0107005834579468, |
|
"learning_rate": 1.9968653337272262e-05, |
|
"loss": 0.5469, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7438812851905823, |
|
"learning_rate": 1.9944295254705187e-05, |
|
"loss": 0.4875, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5902363061904907, |
|
"learning_rate": 1.9913006831057967e-05, |
|
"loss": 0.4739, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.46615442633628845, |
|
"learning_rate": 1.9874809871741877e-05, |
|
"loss": 0.4424, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.5380907654762268, |
|
"learning_rate": 1.982973099683902e-05, |
|
"loss": 0.4509, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.47004133462905884, |
|
"learning_rate": 1.977780162255041e-05, |
|
"loss": 0.4342, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.40097948908805847, |
|
"learning_rate": 1.9719057939301477e-05, |
|
"loss": 0.4242, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.34752586483955383, |
|
"learning_rate": 1.9653540886520387e-05, |
|
"loss": 0.3988, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.3399445712566376, |
|
"learning_rate": 1.9581296124106682e-05, |
|
"loss": 0.3897, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.33792683482170105, |
|
"learning_rate": 1.9502374000610152e-05, |
|
"loss": 0.4071, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.3393308222293854, |
|
"learning_rate": 1.941682951814212e-05, |
|
"loss": 0.4146, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.317430704832077, |
|
"learning_rate": 1.932472229404356e-05, |
|
"loss": 0.3902, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.31500697135925293, |
|
"learning_rate": 1.922611651933683e-05, |
|
"loss": 0.3921, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.2933339476585388, |
|
"learning_rate": 1.912108091398988e-05, |
|
"loss": 0.3684, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.2962222695350647, |
|
"learning_rate": 1.900968867902419e-05, |
|
"loss": 0.3622, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.28932732343673706, |
|
"learning_rate": 1.8892017445499812e-05, |
|
"loss": 0.3745, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.29461416602134705, |
|
"learning_rate": 1.876814922041299e-05, |
|
"loss": 0.3853, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.28936246037483215, |
|
"learning_rate": 1.8638170329544164e-05, |
|
"loss": 0.3924, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.2772411108016968, |
|
"learning_rate": 1.8502171357296144e-05, |
|
"loss": 0.3611, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.42885950207710266, |
|
"learning_rate": 1.8360247083564343e-05, |
|
"loss": 0.3865, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.26259008049964905, |
|
"learning_rate": 1.8212496417683135e-05, |
|
"loss": 0.3817, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.27198928594589233, |
|
"learning_rate": 1.805902232949435e-05, |
|
"loss": 0.3696, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.288399338722229, |
|
"learning_rate": 1.789993177758588e-05, |
|
"loss": 0.364, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.26658785343170166, |
|
"learning_rate": 1.773533563475053e-05, |
|
"loss": 0.3552, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.29077863693237305, |
|
"learning_rate": 1.7565348610716963e-05, |
|
"loss": 0.3662, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.27203258872032166, |
|
"learning_rate": 1.7390089172206594e-05, |
|
"loss": 0.3743, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.28761982917785645, |
|
"learning_rate": 1.720967946037225e-05, |
|
"loss": 0.3666, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2662566304206848, |
|
"learning_rate": 1.7024245205675986e-05, |
|
"loss": 0.3663, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2690587341785431, |
|
"learning_rate": 1.6833915640265485e-05, |
|
"loss": 0.3469, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.2774984836578369, |
|
"learning_rate": 1.6638823407910085e-05, |
|
"loss": 0.3595, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.2686326801776886, |
|
"learning_rate": 1.6439104471559157e-05, |
|
"loss": 0.3503, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.26562628149986267, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 0.3634, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.2668813467025757, |
|
"learning_rate": 1.6026346363792565e-05, |
|
"loss": 0.3594, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.265369713306427, |
|
"learning_rate": 1.58135948502146e-05, |
|
"loss": 0.3506, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2864444851875305, |
|
"learning_rate": 1.5596791747843083e-05, |
|
"loss": 0.3611, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.2715173065662384, |
|
"learning_rate": 1.5376088150285777e-05, |
|
"loss": 0.3526, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.2655029892921448, |
|
"learning_rate": 1.515163786946896e-05, |
|
"loss": 0.3097, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.2665596604347229, |
|
"learning_rate": 1.4923597328443423e-05, |
|
"loss": 0.3064, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.26131537556648254, |
|
"learning_rate": 1.4692125452370664e-05, |
|
"loss": 0.2972, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.2644971013069153, |
|
"learning_rate": 1.4457383557765385e-05, |
|
"loss": 0.2917, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.26761534810066223, |
|
"learning_rate": 1.4219535240071378e-05, |
|
"loss": 0.3019, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.24523305892944336, |
|
"learning_rate": 1.397874625964921e-05, |
|
"loss": 0.2933, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.26386815309524536, |
|
"learning_rate": 1.3735184426255117e-05, |
|
"loss": 0.299, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.26587986946105957, |
|
"learning_rate": 1.348901948209167e-05, |
|
"loss": 0.304, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.2641456723213196, |
|
"learning_rate": 1.324042298351166e-05, |
|
"loss": 0.3009, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.30507728457450867, |
|
"learning_rate": 1.2989568181457704e-05, |
|
"loss": 0.297, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.29200825095176697, |
|
"learning_rate": 1.2736629900720832e-05, |
|
"loss": 0.2976, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.2573084235191345, |
|
"learning_rate": 1.248178441810224e-05, |
|
"loss": 0.286, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.2846490144729614, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 0.2859, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.29042041301727295, |
|
"learning_rate": 1.1967083476448282e-05, |
|
"loss": 0.2852, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.2833590805530548, |
|
"learning_rate": 1.1707586720869375e-05, |
|
"loss": 0.2949, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.2739700675010681, |
|
"learning_rate": 1.1446899920335407e-05, |
|
"loss": 0.2878, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.26602038741111755, |
|
"learning_rate": 1.118520475171703e-05, |
|
"loss": 0.2812, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.2630026042461395, |
|
"learning_rate": 1.092268359463302e-05, |
|
"loss": 0.2766, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.2863544225692749, |
|
"learning_rate": 1.0659519404346955e-05, |
|
"loss": 0.2794, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.3084492087364197, |
|
"learning_rate": 1.0395895584262696e-05, |
|
"loss": 0.2816, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.27484026551246643, |
|
"learning_rate": 1.013199585810759e-05, |
|
"loss": 0.2805, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.2850649952888489, |
|
"learning_rate": 9.868004141892412e-06, |
|
"loss": 0.2842, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.27223044633865356, |
|
"learning_rate": 9.604104415737309e-06, |
|
"loss": 0.2728, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.28590163588523865, |
|
"learning_rate": 9.340480595653047e-06, |
|
"loss": 0.291, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.41988322138786316, |
|
"learning_rate": 9.07731640536698e-06, |
|
"loss": 0.2809, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.33170831203460693, |
|
"learning_rate": 8.814795248282974e-06, |
|
"loss": 0.3017, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.27779626846313477, |
|
"learning_rate": 8.553100079664598e-06, |
|
"loss": 0.272, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.2752010226249695, |
|
"learning_rate": 8.292413279130625e-06, |
|
"loss": 0.2926, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.28389397263526917, |
|
"learning_rate": 8.03291652355172e-06, |
|
"loss": 0.2984, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.2841671407222748, |
|
"learning_rate": 7.774790660436857e-06, |
|
"loss": 0.2966, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.2771681249141693, |
|
"learning_rate": 7.518215581897763e-06, |
|
"loss": 0.2879, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.2740575075149536, |
|
"learning_rate": 7.263370099279173e-06, |
|
"loss": 0.2868, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.2688918709754944, |
|
"learning_rate": 7.010431818542298e-06, |
|
"loss": 0.2799, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.28831538558006287, |
|
"learning_rate": 6.759577016488343e-06, |
|
"loss": 0.2852, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.28444015979766846, |
|
"learning_rate": 6.510980517908334e-06, |
|
"loss": 0.296, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.2855989634990692, |
|
"learning_rate": 6.264815573744884e-06, |
|
"loss": 0.3021, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.3444156348705292, |
|
"learning_rate": 6.021253740350793e-06, |
|
"loss": 0.2756, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.292364239692688, |
|
"learning_rate": 5.780464759928623e-06, |
|
"loss": 0.2872, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.28317102789878845, |
|
"learning_rate": 5.542616442234618e-06, |
|
"loss": 0.2889, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.28034478425979614, |
|
"learning_rate": 5.307874547629339e-06, |
|
"loss": 0.2829, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.27621331810951233, |
|
"learning_rate": 5.076402671556578e-06, |
|
"loss": 0.2806, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.26098138093948364, |
|
"learning_rate": 4.848362130531039e-06, |
|
"loss": 0.2667, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.2581479549407959, |
|
"learning_rate": 4.623911849714226e-06, |
|
"loss": 0.2336, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.2613183856010437, |
|
"learning_rate": 4.403208252156921e-06, |
|
"loss": 0.2484, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.26595166325569153, |
|
"learning_rate": 4.186405149785403e-06, |
|
"loss": 0.2485, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.2694242596626282, |
|
"learning_rate": 3.973653636207437e-06, |
|
"loss": 0.2327, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.27296847105026245, |
|
"learning_rate": 3.7651019814126656e-06, |
|
"loss": 0.2476, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.27051979303359985, |
|
"learning_rate": 3.560895528440844e-06, |
|
"loss": 0.247, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.2605610191822052, |
|
"learning_rate": 3.361176592089919e-06, |
|
"loss": 0.2512, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.2650965750217438, |
|
"learning_rate": 3.1660843597345137e-06, |
|
"loss": 0.2497, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.2880423963069916, |
|
"learning_rate": 2.975754794324015e-06, |
|
"loss": 0.2342, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.2823397219181061, |
|
"learning_rate": 2.7903205396277546e-06, |
|
"loss": 0.2507, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.28528815507888794, |
|
"learning_rate": 2.6099108277934105e-06, |
|
"loss": 0.262, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.27001848816871643, |
|
"learning_rate": 2.4346513892830427e-06, |
|
"loss": 0.247, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.2607811689376831, |
|
"learning_rate": 2.2646643652494693e-06, |
|
"loss": 0.2348, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.26551198959350586, |
|
"learning_rate": 2.100068222414121e-06, |
|
"loss": 0.2413, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.277545690536499, |
|
"learning_rate": 1.9409776705056514e-06, |
|
"loss": 0.2557, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.26287129521369934, |
|
"learning_rate": 1.7875035823168641e-06, |
|
"loss": 0.2422, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.2761671245098114, |
|
"learning_rate": 1.6397529164356606e-06, |
|
"loss": 0.2465, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.2713651657104492, |
|
"learning_rate": 1.4978286427038602e-06, |
|
"loss": 0.2394, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.25854483246803284, |
|
"learning_rate": 1.3618296704558364e-06, |
|
"loss": 0.2301, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.2666904330253601, |
|
"learning_rate": 1.2318507795870138e-06, |
|
"loss": 0.2445, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.29085126519203186, |
|
"learning_rate": 1.1079825545001887e-06, |
|
"loss": 0.2379, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.27726316452026367, |
|
"learning_rate": 9.903113209758098e-07, |
|
"loss": 0.2473, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.2835868000984192, |
|
"learning_rate": 8.789190860101226e-07, |
|
"loss": 0.2472, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.26035404205322266, |
|
"learning_rate": 7.738834806631712e-07, |
|
"loss": 0.2296, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.30347537994384766, |
|
"learning_rate": 6.752777059564431e-07, |
|
"loss": 0.2668, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.6443763971328735, |
|
"learning_rate": 5.831704818578842e-07, |
|
"loss": 0.2407, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.2779424488544464, |
|
"learning_rate": 4.976259993898503e-07, |
|
"loss": 0.2385, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.26803234219551086, |
|
"learning_rate": 4.187038758933204e-07, |
|
"loss": 0.243, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.2761984169483185, |
|
"learning_rate": 3.4645911347961357e-07, |
|
"loss": 0.2367, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.2672320306301117, |
|
"learning_rate": 2.809420606985236e-07, |
|
"loss": 0.2425, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.25563764572143555, |
|
"learning_rate": 2.2219837744959284e-07, |
|
"loss": 0.2351, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.2778139114379883, |
|
"learning_rate": 1.7026900316098217e-07, |
|
"loss": 0.2384, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.31073540449142456, |
|
"learning_rate": 1.2519012825812804e-07, |
|
"loss": 0.2444, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.2570035457611084, |
|
"learning_rate": 8.699316894203225e-08, |
|
"loss": 0.237, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.3120572865009308, |
|
"learning_rate": 5.5704745294815624e-08, |
|
"loss": 0.2413, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.2754634618759155, |
|
"learning_rate": 3.134666272774034e-08, |
|
"loss": 0.235, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.2690906524658203, |
|
"learning_rate": 1.3935896784663671e-08, |
|
"loss": 0.2361, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.26298609375953674, |
|
"learning_rate": 3.4845813115114147e-09, |
|
"loss": 0.2336, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.2775000333786011, |
|
"learning_rate": 0.0, |
|
"loss": 0.2431, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"step": 123, |
|
"total_flos": 3.803134263348429e+16, |
|
"train_loss": 0.3296439991733892, |
|
"train_runtime": 519.019, |
|
"train_samples_per_second": 30.797, |
|
"train_steps_per_second": 0.237 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 123, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 1000, |
|
"total_flos": 3.803134263348429e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|