Yi-34b-alpaca-cot-lora / trainer_state.json
zhouliang
Upload 13 files
8ec225c
raw
history blame contribute delete
No virus
28.8 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 2337,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"learning_rate": 1.9999096463326385e-06,
"loss": 1.9904,
"step": 10
},
{
"epoch": 0.03,
"learning_rate": 1.9996386016581256e-06,
"loss": 1.9027,
"step": 20
},
{
"epoch": 0.04,
"learning_rate": 1.9991869149562214e-06,
"loss": 1.8885,
"step": 30
},
{
"epoch": 0.05,
"learning_rate": 1.9985546678500257e-06,
"loss": 1.9943,
"step": 40
},
{
"epoch": 0.06,
"learning_rate": 1.997741974591229e-06,
"loss": 1.9145,
"step": 50
},
{
"epoch": 0.08,
"learning_rate": 1.9967489820394627e-06,
"loss": 1.9079,
"step": 60
},
{
"epoch": 0.09,
"learning_rate": 1.995575869635765e-06,
"loss": 1.8246,
"step": 70
},
{
"epoch": 0.1,
"learning_rate": 1.994222849370152e-06,
"loss": 1.759,
"step": 80
},
{
"epoch": 0.12,
"learning_rate": 1.9926901657433085e-06,
"loss": 1.6747,
"step": 90
},
{
"epoch": 0.13,
"learning_rate": 1.990978095722409e-06,
"loss": 1.5992,
"step": 100
},
{
"epoch": 0.14,
"learning_rate": 1.9890869486910627e-06,
"loss": 1.5288,
"step": 110
},
{
"epoch": 0.15,
"learning_rate": 1.9870170663934104e-06,
"loss": 1.5143,
"step": 120
},
{
"epoch": 0.17,
"learning_rate": 1.9847688228723645e-06,
"loss": 1.4542,
"step": 130
},
{
"epoch": 0.18,
"learning_rate": 1.9823426244020197e-06,
"loss": 1.4081,
"step": 140
},
{
"epoch": 0.19,
"learning_rate": 1.979738909414235e-06,
"loss": 1.377,
"step": 150
},
{
"epoch": 0.21,
"learning_rate": 1.9769581484194063e-06,
"loss": 1.3373,
"step": 160
},
{
"epoch": 0.22,
"learning_rate": 1.9740008439214417e-06,
"loss": 1.3031,
"step": 170
},
{
"epoch": 0.23,
"learning_rate": 1.9708675303269545e-06,
"loss": 1.3004,
"step": 180
},
{
"epoch": 0.24,
"learning_rate": 1.9675587738486934e-06,
"loss": 1.2555,
"step": 190
},
{
"epoch": 0.26,
"learning_rate": 1.9640751724032234e-06,
"loss": 1.277,
"step": 200
},
{
"epoch": 0.27,
"learning_rate": 1.960417355502876e-06,
"loss": 1.2142,
"step": 210
},
{
"epoch": 0.28,
"learning_rate": 1.9565859841419945e-06,
"loss": 1.2023,
"step": 220
},
{
"epoch": 0.3,
"learning_rate": 1.9525817506774864e-06,
"loss": 1.2301,
"step": 230
},
{
"epoch": 0.31,
"learning_rate": 1.948405378703708e-06,
"loss": 1.217,
"step": 240
},
{
"epoch": 0.32,
"learning_rate": 1.9440576229217078e-06,
"loss": 1.1637,
"step": 250
},
{
"epoch": 0.33,
"learning_rate": 1.939539269002845e-06,
"loss": 1.214,
"step": 260
},
{
"epoch": 0.35,
"learning_rate": 1.9348511334468137e-06,
"loss": 1.1571,
"step": 270
},
{
"epoch": 0.36,
"learning_rate": 1.9299940634340954e-06,
"loss": 1.1329,
"step": 280
},
{
"epoch": 0.37,
"learning_rate": 1.9249689366728658e-06,
"loss": 1.1524,
"step": 290
},
{
"epoch": 0.39,
"learning_rate": 1.91977666124039e-06,
"loss": 1.1853,
"step": 300
},
{
"epoch": 0.4,
"learning_rate": 1.9144181754189207e-06,
"loss": 1.1614,
"step": 310
},
{
"epoch": 0.41,
"learning_rate": 1.90889444752615e-06,
"loss": 1.1166,
"step": 320
},
{
"epoch": 0.42,
"learning_rate": 1.903206475740223e-06,
"loss": 1.1298,
"step": 330
},
{
"epoch": 0.44,
"learning_rate": 1.8973552879193612e-06,
"loss": 1.1577,
"step": 340
},
{
"epoch": 0.45,
"learning_rate": 1.8913419414161202e-06,
"loss": 1.1421,
"step": 350
},
{
"epoch": 0.46,
"learning_rate": 1.88516752288632e-06,
"loss": 1.1569,
"step": 360
},
{
"epoch": 0.47,
"learning_rate": 1.8788331480926763e-06,
"loss": 1.0961,
"step": 370
},
{
"epoch": 0.49,
"learning_rate": 1.872339961703175e-06,
"loss": 1.1173,
"step": 380
},
{
"epoch": 0.5,
"learning_rate": 1.8656891370842223e-06,
"loss": 1.0712,
"step": 390
},
{
"epoch": 0.51,
"learning_rate": 1.8588818760886094e-06,
"loss": 1.1055,
"step": 400
},
{
"epoch": 0.53,
"learning_rate": 1.851919408838327e-06,
"loss": 1.1006,
"step": 410
},
{
"epoch": 0.54,
"learning_rate": 1.8448029935022754e-06,
"loss": 1.083,
"step": 420
},
{
"epoch": 0.55,
"learning_rate": 1.8375339160689023e-06,
"loss": 1.0588,
"step": 430
},
{
"epoch": 0.56,
"learning_rate": 1.830113490113817e-06,
"loss": 1.0791,
"step": 440
},
{
"epoch": 0.58,
"learning_rate": 1.822543056562416e-06,
"loss": 1.0623,
"step": 450
},
{
"epoch": 0.59,
"learning_rate": 1.8148239834475695e-06,
"loss": 1.1126,
"step": 460
},
{
"epoch": 0.6,
"learning_rate": 1.806957665662406e-06,
"loss": 1.0696,
"step": 470
},
{
"epoch": 0.62,
"learning_rate": 1.7989455247082472e-06,
"loss": 1.0573,
"step": 480
},
{
"epoch": 0.63,
"learning_rate": 1.7907890084377301e-06,
"loss": 1.0789,
"step": 490
},
{
"epoch": 0.64,
"learning_rate": 1.7824895907931706e-06,
"loss": 1.0964,
"step": 500
},
{
"epoch": 0.65,
"learning_rate": 1.7740487715402106e-06,
"loss": 1.0551,
"step": 510
},
{
"epoch": 0.67,
"learning_rate": 1.7654680759968007e-06,
"loss": 1.0563,
"step": 520
},
{
"epoch": 0.68,
"learning_rate": 1.756749054757562e-06,
"loss": 1.0522,
"step": 530
},
{
"epoch": 0.69,
"learning_rate": 1.747893283413585e-06,
"loss": 1.0446,
"step": 540
},
{
"epoch": 0.71,
"learning_rate": 1.738902362267705e-06,
"loss": 1.0667,
"step": 550
},
{
"epoch": 0.72,
"learning_rate": 1.72977791604532e-06,
"loss": 1.0395,
"step": 560
},
{
"epoch": 0.73,
"learning_rate": 1.7205215936007869e-06,
"loss": 1.039,
"step": 570
},
{
"epoch": 0.74,
"learning_rate": 1.7111350676194647e-06,
"loss": 1.0987,
"step": 580
},
{
"epoch": 0.76,
"learning_rate": 1.701620034315445e-06,
"loss": 1.0748,
"step": 590
},
{
"epoch": 0.77,
"learning_rate": 1.6919782131250366e-06,
"loss": 1.0446,
"step": 600
},
{
"epoch": 0.78,
"learning_rate": 1.6822113463960483e-06,
"loss": 1.0279,
"step": 610
},
{
"epoch": 0.8,
"learning_rate": 1.6723211990729355e-06,
"loss": 1.054,
"step": 620
},
{
"epoch": 0.81,
"learning_rate": 1.6623095583778613e-06,
"loss": 1.0391,
"step": 630
},
{
"epoch": 0.82,
"learning_rate": 1.652178233487731e-06,
"loss": 1.0812,
"step": 640
},
{
"epoch": 0.83,
"learning_rate": 1.6419290552072634e-06,
"loss": 1.0826,
"step": 650
},
{
"epoch": 0.85,
"learning_rate": 1.6315638756381484e-06,
"loss": 1.0497,
"step": 660
},
{
"epoch": 0.86,
"learning_rate": 1.6210845678443602e-06,
"loss": 1.0329,
"step": 670
},
{
"epoch": 0.87,
"learning_rate": 1.6104930255136794e-06,
"loss": 1.0663,
"step": 680
},
{
"epoch": 0.89,
"learning_rate": 1.5997911626154914e-06,
"loss": 1.059,
"step": 690
},
{
"epoch": 0.9,
"learning_rate": 1.5889809130549174e-06,
"loss": 1.0123,
"step": 700
},
{
"epoch": 0.91,
"learning_rate": 1.578064230323343e-06,
"loss": 1.0264,
"step": 710
},
{
"epoch": 0.92,
"learning_rate": 1.5670430871454081e-06,
"loss": 1.0411,
"step": 720
},
{
"epoch": 0.94,
"learning_rate": 1.555919475122523e-06,
"loss": 1.0428,
"step": 730
},
{
"epoch": 0.95,
"learning_rate": 1.544695404372968e-06,
"loss": 0.9994,
"step": 740
},
{
"epoch": 0.96,
"learning_rate": 1.533372903168653e-06,
"loss": 1.0452,
"step": 750
},
{
"epoch": 0.98,
"learning_rate": 1.5219540175685937e-06,
"loss": 1.02,
"step": 760
},
{
"epoch": 0.99,
"learning_rate": 1.5104408110491716e-06,
"loss": 1.0564,
"step": 770
},
{
"epoch": 1.0,
"learning_rate": 1.4988353641312515e-06,
"loss": 0.9889,
"step": 780
},
{
"epoch": 1.01,
"learning_rate": 1.487139774004214e-06,
"loss": 1.0452,
"step": 790
},
{
"epoch": 1.03,
"learning_rate": 1.4753561541469787e-06,
"loss": 1.0399,
"step": 800
},
{
"epoch": 1.04,
"learning_rate": 1.463486633946084e-06,
"loss": 1.0438,
"step": 810
},
{
"epoch": 1.05,
"learning_rate": 1.4515333583108893e-06,
"loss": 1.0134,
"step": 820
},
{
"epoch": 1.07,
"learning_rate": 1.439498487285976e-06,
"loss": 0.9949,
"step": 830
},
{
"epoch": 1.08,
"learning_rate": 1.427384195660811e-06,
"loss": 1.0184,
"step": 840
},
{
"epoch": 1.09,
"learning_rate": 1.4151926725767455e-06,
"loss": 1.0246,
"step": 850
},
{
"epoch": 1.1,
"learning_rate": 1.4029261211314222e-06,
"loss": 1.0428,
"step": 860
},
{
"epoch": 1.12,
"learning_rate": 1.3905867579806596e-06,
"loss": 1.0078,
"step": 870
},
{
"epoch": 1.13,
"learning_rate": 1.3781768129378844e-06,
"loss": 1.0223,
"step": 880
},
{
"epoch": 1.14,
"learning_rate": 1.3656985285711895e-06,
"loss": 0.9788,
"step": 890
},
{
"epoch": 1.16,
"learning_rate": 1.3531541597980845e-06,
"loss": 1.0417,
"step": 900
},
{
"epoch": 1.17,
"learning_rate": 1.340545973478016e-06,
"loss": 1.0315,
"step": 910
},
{
"epoch": 1.18,
"learning_rate": 1.32787624800273e-06,
"loss": 0.9775,
"step": 920
},
{
"epoch": 1.19,
"learning_rate": 1.3151472728845492e-06,
"loss": 0.9992,
"step": 930
},
{
"epoch": 1.21,
"learning_rate": 1.3023613483426399e-06,
"loss": 0.957,
"step": 940
},
{
"epoch": 1.22,
"learning_rate": 1.2895207848873487e-06,
"loss": 1.0227,
"step": 950
},
{
"epoch": 1.23,
"learning_rate": 1.2766279029026735e-06,
"loss": 1.0311,
"step": 960
},
{
"epoch": 1.25,
"learning_rate": 1.2636850322269553e-06,
"loss": 1.0237,
"step": 970
},
{
"epoch": 1.26,
"learning_rate": 1.250694511731857e-06,
"loss": 1.0338,
"step": 980
},
{
"epoch": 1.27,
"learning_rate": 1.2376586888997145e-06,
"loss": 1.0259,
"step": 990
},
{
"epoch": 1.28,
"learning_rate": 1.224579919399327e-06,
"loss": 1.0058,
"step": 1000
},
{
"epoch": 1.3,
"learning_rate": 1.2114605666602728e-06,
"loss": 0.9968,
"step": 1010
},
{
"epoch": 1.31,
"learning_rate": 1.1983030014458184e-06,
"loss": 0.9984,
"step": 1020
},
{
"epoch": 1.32,
"learning_rate": 1.1851096014245055e-06,
"loss": 1.0126,
"step": 1030
},
{
"epoch": 1.34,
"learning_rate": 1.1718827507404873e-06,
"loss": 1.0064,
"step": 1040
},
{
"epoch": 1.35,
"learning_rate": 1.1586248395826983e-06,
"loss": 0.9816,
"step": 1050
},
{
"epoch": 1.36,
"learning_rate": 1.1453382637529276e-06,
"loss": 1.0116,
"step": 1060
},
{
"epoch": 1.37,
"learning_rate": 1.1320254242328805e-06,
"loss": 0.9933,
"step": 1070
},
{
"epoch": 1.39,
"learning_rate": 1.1186887267503053e-06,
"loss": 1.0558,
"step": 1080
},
{
"epoch": 1.4,
"learning_rate": 1.1053305813442574e-06,
"loss": 0.9552,
"step": 1090
},
{
"epoch": 1.41,
"learning_rate": 1.0919534019295898e-06,
"loss": 0.9877,
"step": 1100
},
{
"epoch": 1.42,
"learning_rate": 1.078559605860741e-06,
"loss": 1.0232,
"step": 1110
},
{
"epoch": 1.44,
"learning_rate": 1.0651516134949003e-06,
"loss": 1.0201,
"step": 1120
},
{
"epoch": 1.45,
"learning_rate": 1.0517318477546319e-06,
"loss": 1.0047,
"step": 1130
},
{
"epoch": 1.46,
"learning_rate": 1.0383027336900353e-06,
"loss": 0.9861,
"step": 1140
},
{
"epoch": 1.48,
"learning_rate": 1.0248666980405212e-06,
"loss": 1.0059,
"step": 1150
},
{
"epoch": 1.49,
"learning_rate": 1.011426168796281e-06,
"loss": 0.9714,
"step": 1160
},
{
"epoch": 1.5,
"learning_rate": 9.979835747595316e-07,
"loss": 1.0207,
"step": 1170
},
{
"epoch": 1.51,
"learning_rate": 9.845413451056125e-07,
"loss": 1.0167,
"step": 1180
},
{
"epoch": 1.53,
"learning_rate": 9.71101908944018e-07,
"loss": 0.9979,
"step": 1190
},
{
"epoch": 1.54,
"learning_rate": 9.576676948794375e-07,
"loss": 1.0047,
"step": 1200
},
{
"epoch": 1.55,
"learning_rate": 9.442411305728873e-07,
"loss": 1.0121,
"step": 1210
},
{
"epoch": 1.57,
"learning_rate": 9.308246423030185e-07,
"loss": 1.0015,
"step": 1220
},
{
"epoch": 1.58,
"learning_rate": 9.174206545276677e-07,
"loss": 1.0325,
"step": 1230
},
{
"epoch": 1.59,
"learning_rate": 9.040315894457404e-07,
"loss": 1.019,
"step": 1240
},
{
"epoch": 1.6,
"learning_rate": 8.906598665595016e-07,
"loss": 0.993,
"step": 1250
},
{
"epoch": 1.62,
"learning_rate": 8.773079022373553e-07,
"loss": 0.9917,
"step": 1260
},
{
"epoch": 1.63,
"learning_rate": 8.63978109277187e-07,
"loss": 0.9697,
"step": 1270
},
{
"epoch": 1.64,
"learning_rate": 8.506728964703549e-07,
"loss": 0.973,
"step": 1280
},
{
"epoch": 1.66,
"learning_rate": 8.37394668166404e-07,
"loss": 1.0183,
"step": 1290
},
{
"epoch": 1.67,
"learning_rate": 8.241458238385798e-07,
"loss": 1.0098,
"step": 1300
},
{
"epoch": 1.68,
"learning_rate": 8.109287576502299e-07,
"loss": 0.9765,
"step": 1310
},
{
"epoch": 1.69,
"learning_rate": 7.977458580221578e-07,
"loss": 1.01,
"step": 1320
},
{
"epoch": 1.71,
"learning_rate": 7.845995072010187e-07,
"loss": 1.0378,
"step": 1330
},
{
"epoch": 1.72,
"learning_rate": 7.714920808288313e-07,
"loss": 0.9665,
"step": 1340
},
{
"epoch": 1.73,
"learning_rate": 7.584259475136804e-07,
"loss": 0.9927,
"step": 1350
},
{
"epoch": 1.75,
"learning_rate": 7.454034684016923e-07,
"loss": 0.9571,
"step": 1360
},
{
"epoch": 1.76,
"learning_rate": 7.324269967503587e-07,
"loss": 0.9813,
"step": 1370
},
{
"epoch": 1.77,
"learning_rate": 7.19498877503286e-07,
"loss": 0.9641,
"step": 1380
},
{
"epoch": 1.78,
"learning_rate": 7.066214468664467e-07,
"loss": 0.9696,
"step": 1390
},
{
"epoch": 1.8,
"learning_rate": 6.937970318860085e-07,
"loss": 0.9824,
"step": 1400
},
{
"epoch": 1.81,
"learning_rate": 6.810279500278223e-07,
"loss": 0.9829,
"step": 1410
},
{
"epoch": 1.82,
"learning_rate": 6.683165087586377e-07,
"loss": 1.0099,
"step": 1420
},
{
"epoch": 1.84,
"learning_rate": 6.556650051291264e-07,
"loss": 0.9737,
"step": 1430
},
{
"epoch": 1.85,
"learning_rate": 6.430757253587901e-07,
"loss": 0.9962,
"step": 1440
},
{
"epoch": 1.86,
"learning_rate": 6.305509444228219e-07,
"loss": 0.9736,
"step": 1450
},
{
"epoch": 1.87,
"learning_rate": 6.180929256410027e-07,
"loss": 0.9813,
"step": 1460
},
{
"epoch": 1.89,
"learning_rate": 6.057039202687022e-07,
"loss": 1.0075,
"step": 1470
},
{
"epoch": 1.9,
"learning_rate": 5.93386167090062e-07,
"loss": 0.9858,
"step": 1480
},
{
"epoch": 1.91,
"learning_rate": 5.811418920134277e-07,
"loss": 0.992,
"step": 1490
},
{
"epoch": 1.93,
"learning_rate": 5.689733076691148e-07,
"loss": 0.9919,
"step": 1500
},
{
"epoch": 1.94,
"learning_rate": 5.56882613009567e-07,
"loss": 0.9786,
"step": 1510
},
{
"epoch": 1.95,
"learning_rate": 5.448719929119915e-07,
"loss": 0.9814,
"step": 1520
},
{
"epoch": 1.96,
"learning_rate": 5.329436177835339e-07,
"loss": 0.9847,
"step": 1530
},
{
"epoch": 1.98,
"learning_rate": 5.210996431690722e-07,
"loss": 0.9752,
"step": 1540
},
{
"epoch": 1.99,
"learning_rate": 5.093422093616909e-07,
"loss": 1.0272,
"step": 1550
},
{
"epoch": 2.0,
"learning_rate": 4.976734410159165e-07,
"loss": 1.0187,
"step": 1560
},
{
"epoch": 2.02,
"learning_rate": 4.860954467637762e-07,
"loss": 0.9922,
"step": 1570
},
{
"epoch": 2.03,
"learning_rate": 4.7461031883375335e-07,
"loss": 0.9881,
"step": 1580
},
{
"epoch": 2.04,
"learning_rate": 4.632201326727041e-07,
"loss": 0.9972,
"step": 1590
},
{
"epoch": 2.05,
"learning_rate": 4.519269465708125e-07,
"loss": 0.9841,
"step": 1600
},
{
"epoch": 2.07,
"learning_rate": 4.407328012896393e-07,
"loss": 1.0147,
"step": 1610
},
{
"epoch": 2.08,
"learning_rate": 4.2963971969334254e-07,
"loss": 0.9832,
"step": 1620
},
{
"epoch": 2.09,
"learning_rate": 4.186497063831316e-07,
"loss": 1.0031,
"step": 1630
},
{
"epoch": 2.11,
"learning_rate": 4.0776474733502007e-07,
"loss": 0.9694,
"step": 1640
},
{
"epoch": 2.12,
"learning_rate": 3.9698680954094645e-07,
"loss": 0.9836,
"step": 1650
},
{
"epoch": 2.13,
"learning_rate": 3.8631784065332253e-07,
"loss": 0.9895,
"step": 1660
},
{
"epoch": 2.14,
"learning_rate": 3.7575976863308156e-07,
"loss": 0.9894,
"step": 1670
},
{
"epoch": 2.16,
"learning_rate": 3.653145014012766e-07,
"loss": 0.9978,
"step": 1680
},
{
"epoch": 2.17,
"learning_rate": 3.5498392649431087e-07,
"loss": 0.9492,
"step": 1690
},
{
"epoch": 2.18,
"learning_rate": 3.447699107228412e-07,
"loss": 0.9666,
"step": 1700
},
{
"epoch": 2.2,
"learning_rate": 3.3467429983443476e-07,
"loss": 0.9963,
"step": 1710
},
{
"epoch": 2.21,
"learning_rate": 3.2469891818002715e-07,
"loss": 0.9629,
"step": 1720
},
{
"epoch": 2.22,
"learning_rate": 3.148455683842507e-07,
"loss": 1.0002,
"step": 1730
},
{
"epoch": 2.23,
"learning_rate": 3.0511603101968475e-07,
"loss": 1.0029,
"step": 1740
},
{
"epoch": 2.25,
"learning_rate": 2.9551206428509446e-07,
"loss": 0.9456,
"step": 1750
},
{
"epoch": 2.26,
"learning_rate": 2.860354036877113e-07,
"loss": 0.9792,
"step": 1760
},
{
"epoch": 2.27,
"learning_rate": 2.7668776172961375e-07,
"loss": 0.9614,
"step": 1770
},
{
"epoch": 2.28,
"learning_rate": 2.6747082759826613e-07,
"loss": 1.0142,
"step": 1780
},
{
"epoch": 2.3,
"learning_rate": 2.583862668612693e-07,
"loss": 0.9993,
"step": 1790
},
{
"epoch": 2.31,
"learning_rate": 2.4943572116538205e-07,
"loss": 1.0057,
"step": 1800
},
{
"epoch": 2.32,
"learning_rate": 2.4062080793986004e-07,
"loss": 0.9717,
"step": 1810
},
{
"epoch": 2.34,
"learning_rate": 2.3194312010417927e-07,
"loss": 1.0034,
"step": 1820
},
{
"epoch": 2.35,
"learning_rate": 2.2340422578017958e-07,
"loss": 0.9612,
"step": 1830
},
{
"epoch": 2.36,
"learning_rate": 2.150056680086958e-07,
"loss": 0.9932,
"step": 1840
},
{
"epoch": 2.37,
"learning_rate": 2.0674896447071833e-07,
"loss": 1.0122,
"step": 1850
},
{
"epoch": 2.39,
"learning_rate": 1.9863560721313698e-07,
"loss": 1.0008,
"step": 1860
},
{
"epoch": 2.4,
"learning_rate": 1.9066706237911756e-07,
"loss": 1.0085,
"step": 1870
},
{
"epoch": 2.41,
"learning_rate": 1.8284476994315835e-07,
"loss": 0.9867,
"step": 1880
},
{
"epoch": 2.43,
"learning_rate": 1.7517014345087766e-07,
"loss": 0.987,
"step": 1890
},
{
"epoch": 2.44,
"learning_rate": 1.6764456976357277e-07,
"loss": 0.9703,
"step": 1900
},
{
"epoch": 2.45,
"learning_rate": 1.6026940880760797e-07,
"loss": 0.9949,
"step": 1910
},
{
"epoch": 2.46,
"learning_rate": 1.5304599332866197e-07,
"loss": 1.0132,
"step": 1920
},
{
"epoch": 2.48,
"learning_rate": 1.459756286508945e-07,
"loss": 1.0076,
"step": 1930
},
{
"epoch": 2.49,
"learning_rate": 1.390595924410609e-07,
"loss": 0.9801,
"step": 1940
},
{
"epoch": 2.5,
"learning_rate": 1.322991344776323e-07,
"loss": 0.9947,
"step": 1950
},
{
"epoch": 2.52,
"learning_rate": 1.256954764249486e-07,
"loss": 0.9898,
"step": 1960
},
{
"epoch": 2.53,
"learning_rate": 1.1924981161245574e-07,
"loss": 1.0,
"step": 1970
},
{
"epoch": 2.54,
"learning_rate": 1.1296330481906247e-07,
"loss": 0.9637,
"step": 1980
},
{
"epoch": 2.55,
"learning_rate": 1.0683709206265635e-07,
"loss": 1.0058,
"step": 1990
},
{
"epoch": 2.57,
"learning_rate": 1.0087228039481643e-07,
"loss": 1.0164,
"step": 2000
},
{
"epoch": 2.58,
"learning_rate": 9.506994770076115e-08,
"loss": 0.9956,
"step": 2010
},
{
"epoch": 2.59,
"learning_rate": 8.94311425045674e-08,
"loss": 0.9945,
"step": 2020
},
{
"epoch": 2.61,
"learning_rate": 8.395688377969235e-08,
"loss": 0.9916,
"step": 2030
},
{
"epoch": 2.62,
"learning_rate": 7.864816076484049e-08,
"loss": 0.998,
"step": 2040
},
{
"epoch": 2.63,
"learning_rate": 7.350593278519823e-08,
"loss": 0.9892,
"step": 2050
},
{
"epoch": 2.64,
"learning_rate": 6.853112907907854e-08,
"loss": 0.9772,
"step": 2060
},
{
"epoch": 2.66,
"learning_rate": 6.372464862999949e-08,
"loss": 0.9784,
"step": 2070
},
{
"epoch": 2.67,
"learning_rate": 5.908736000423309e-08,
"loss": 0.9986,
"step": 2080
},
{
"epoch": 2.68,
"learning_rate": 5.462010119384664e-08,
"loss": 0.978,
"step": 2090
},
{
"epoch": 2.7,
"learning_rate": 5.0323679465273605e-08,
"loss": 0.9783,
"step": 2100
},
{
"epoch": 2.71,
"learning_rate": 4.619887121343324e-08,
"loss": 0.9555,
"step": 2110
},
{
"epoch": 2.72,
"learning_rate": 4.2246421821431123e-08,
"loss": 0.9857,
"step": 2120
},
{
"epoch": 2.73,
"learning_rate": 3.846704552586244e-08,
"loss": 0.961,
"step": 2130
},
{
"epoch": 2.75,
"learning_rate": 3.4861425287744276e-08,
"loss": 0.9973,
"step": 2140
},
{
"epoch": 2.76,
"learning_rate": 3.143021266910029e-08,
"loss": 0.9497,
"step": 2150
},
{
"epoch": 2.77,
"learning_rate": 2.8174027715217263e-08,
"loss": 0.9707,
"step": 2160
},
{
"epoch": 2.79,
"learning_rate": 2.5093458842599946e-08,
"loss": 0.9564,
"step": 2170
},
{
"epoch": 2.8,
"learning_rate": 2.218906273263843e-08,
"loss": 0.9395,
"step": 2180
},
{
"epoch": 2.81,
"learning_rate": 1.9461364231012856e-08,
"loss": 0.9569,
"step": 2190
},
{
"epoch": 2.82,
"learning_rate": 1.6910856252849382e-08,
"loss": 1.0239,
"step": 2200
},
{
"epoch": 2.84,
"learning_rate": 1.4537999693646885e-08,
"loss": 0.9657,
"step": 2210
},
{
"epoch": 2.85,
"learning_rate": 1.2343223345989917e-08,
"loss": 0.9456,
"step": 2220
},
{
"epoch": 2.86,
"learning_rate": 1.0326923822062461e-08,
"loss": 1.0241,
"step": 2230
},
{
"epoch": 2.88,
"learning_rate": 8.489465481977708e-09,
"loss": 1.0016,
"step": 2240
},
{
"epoch": 2.89,
"learning_rate": 6.83118036793473e-09,
"loss": 0.9713,
"step": 2250
},
{
"epoch": 2.9,
"learning_rate": 5.352368144216801e-09,
"loss": 0.9896,
"step": 2260
},
{
"epoch": 2.91,
"learning_rate": 4.053296043039389e-09,
"loss": 0.976,
"step": 2270
},
{
"epoch": 2.93,
"learning_rate": 2.934198816259559e-09,
"loss": 1.0155,
"step": 2280
},
{
"epoch": 2.94,
"learning_rate": 1.9952786929543495e-09,
"loss": 0.9782,
"step": 2290
},
{
"epoch": 2.95,
"learning_rate": 1.236705342876898e-09,
"loss": 1.015,
"step": 2300
},
{
"epoch": 2.97,
"learning_rate": 6.586158457954072e-10,
"loss": 0.9742,
"step": 2310
},
{
"epoch": 2.98,
"learning_rate": 2.611146667221842e-10,
"loss": 1.0113,
"step": 2320
},
{
"epoch": 2.99,
"learning_rate": 4.4273637035852074e-11,
"loss": 0.9955,
"step": 2330
},
{
"epoch": 3.0,
"step": 2337,
"total_flos": 9336786417352704.0,
"train_loss": 1.0705227502962438,
"train_runtime": 220133.6189,
"train_samples_per_second": 1.019,
"train_steps_per_second": 0.011
}
],
"logging_steps": 10,
"max_steps": 2337,
"num_train_epochs": 3,
"save_steps": 1000,
"total_flos": 9336786417352704.0,
"trial_name": null,
"trial_params": null
}