lapp0 commited on
Commit
c194d10
·
verified ·
1 Parent(s): 74a73b2

End of training

Browse files
README.md CHANGED
@@ -16,13 +16,13 @@ This student model is distilled from the teacher model [gpt2](https://huggingfac
16
  The [Distily](https://github.com/lapp0/distily) library was used for this distillation.
17
 
18
  It achieves the following results on the evaluation set:
19
- - eval_enwikippl: 1375.5275
20
- - eval_frwikippl: 6766.5586
21
- - eval_zhwikippl: 17695.9277
22
- - eval_loss: 8329.1631
23
- - eval_runtime: 64.7405
24
- - eval_samples_per_second: 46.339
25
- - eval_steps_per_second: 11.585
26
 
27
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
28
  should probably proofread and complete it, then remove this comment.
@@ -65,45 +65,45 @@ Peak GPU Memory: 8.3354 GB
65
  | step | epoch | enwikippl | frwikippl | loss | runtime | samples_per_second | steps_per_second | zhwikippl |
66
  | --- | --- | --- | --- | --- | --- | --- | --- | --- |
67
  | **teacher eval** | | 30.2385 | 57.2728 | | | | | 18.1772 |
68
- | 0 | 0 | 59938.1289 | 59232.2031 | 331532.9688 | 64.3677 | 46.607 | 11.652 | 56882.4922 |
69
- | 500 | 0.0269 | 3156.7812 | 13407.7314 | 11482.2617 | 64.7552 | 46.328 | 11.582 | 46374.0977 |
70
- | 1000 | 0.0539 | 2456.4883 | 10286.8799 | 10423.125 | 66.7762 | 44.926 | 11.232 | 37178.7227 |
71
- | 1500 | 0.0808 | 2211.4065 | 9323.9248 | 9931.5410 | 68.5121 | 43.788 | 10.947 | 30078.5703 |
72
- | 2000 | 0.1077 | 2036.7498 | 8931.4150 | 9687.9463 | 65.8573 | 45.553 | 11.388 | 28182.8398 |
73
- | 2500 | 0.1347 | 1920.9956 | 8044.7114 | 9415.5947 | 65.1775 | 46.028 | 11.507 | 24796.8789 |
74
- | 3000 | 0.1616 | 1873.2666 | 8170.4624 | 9294.6562 | 65.2306 | 45.991 | 11.498 | 24111.1641 |
75
- | 3500 | 0.1886 | 1794.6318 | 8009.0542 | 9103.3604 | 64.6784 | 46.383 | 11.596 | 24227.3574 |
76
- | 4000 | 0.2155 | 1767.7296 | 7658.9282 | 9101.5039 | 65.4052 | 45.868 | 11.467 | 22171.6113 |
77
- | 4500 | 0.2424 | 1662.8684 | 7530.4146 | 8929.0029 | 64.9694 | 46.176 | 11.544 | 22204.2188 |
78
- | 5000 | 0.2694 | 1649.6533 | 7726.7241 | 8838.6562 | 65.1184 | 46.07 | 11.517 | 22987.1426 |
79
- | 5500 | 0.2963 | 1599.3251 | 7247.0552 | 8786.3145 | 64.8678 | 46.248 | 11.562 | 20469.9453 |
80
- | 6000 | 0.3232 | 1576.4832 | 7656.2266 | 8733.5889 | 63.9508 | 46.911 | 11.728 | 22970.2676 |
81
- | 6500 | 0.3502 | 1542.9945 | 7010.3413 | 8645.2266 | 64.1329 | 46.778 | 11.694 | 19388.3926 |
82
- | 7000 | 0.3771 | 1508.8114 | 6926.3296 | 8571.9463 | 64.1586 | 46.759 | 11.69 | 19860.0664 |
83
- | 7500 | 0.4040 | 1468.5557 | 6836.5732 | 8549.0029 | 64.4335 | 46.56 | 11.64 | 18730.5410 |
84
- | 8000 | 0.4310 | 1446.2615 | 6887.3745 | 8446.8584 | 64.7914 | 46.302 | 11.576 | 18665.6152 |
85
- | 8500 | 0.4579 | 1424.2772 | 6938.0576 | 8386.9863 | 64.6538 | 46.401 | 11.6 | 19722.6406 |
86
- | 9000 | 0.4848 | 1396.2977 | 6694.8984 | 8320.6289 | 64.4737 | 46.531 | 11.633 | 16747.5215 |
87
- | 9500 | 0.5118 | 1375.5275 | 6766.5586 | 8329.1631 | 64.7405 | 46.339 | 11.585 | 17695.9277 |
88
- | 10000 | 0.5387 | 1362.8486 | 6724.2305 | 8235.6797 | 64.3591 | 46.613 | 11.653 | 18138.5488 |
89
- | 10500 | 0.5657 | 1323.8888 | 6641.3037 | 8259.5732 | 64.5004 | 46.511 | 11.628 | 18308.9023 |
90
- | 11000 | 0.5926 | 1313.7504 | 6603.9517 | 8208.4697 | 63.9043 | 46.945 | 11.736 | 16451.6074 |
91
- | 11500 | 0.6195 | 1301.4640 | 6654.8970 | 8135.1362 | 63.8702 | 46.97 | 11.743 | 17253.7539 |
92
- | 12000 | 0.6465 | 1276.9374 | 6700.5708 | 8126.4321 | 63.8094 | 47.015 | 11.754 | 17780.0176 |
93
- | 12500 | 0.6734 | 1261.5624 | 6390.4980 | 8023.9038 | 64.2272 | 46.709 | 11.677 | 16394.5977 |
94
- | 13000 | 0.7003 | 1266.7156 | 6481.2490 | 8042.4531 | 63.9608 | 46.904 | 11.726 | 17241.0879 |
95
- | 13500 | 0.7273 | 1234.4520 | 6369.3550 | 8034.0479 | 64.8274 | 46.277 | 11.569 | 16078.0498 |
96
- | 14000 | 0.7542 | 1225.3788 | 6342.9141 | 7984.6187 | 64.523 | 46.495 | 11.624 | 16475.8027 |
97
- | 14500 | 0.7811 | 1191.1580 | 6149.5986 | 7939.2002 | 64.9092 | 46.218 | 11.555 | 14996.4697 |
98
- | 15000 | 0.8081 | 1184.7468 | 6450.7070 | 7919.1147 | 65.2526 | 45.975 | 11.494 | 16544.1367 |
99
- | 15500 | 0.8350 | 1166.7611 | 6195.7266 | 7864.8428 | 64.9659 | 46.178 | 11.545 | 14867.8652 |
100
- | 16000 | 0.8620 | 1162.8715 | 6133.1406 | 7857.3867 | 64.8395 | 46.268 | 11.567 | 14692.2109 |
101
- | 16500 | 0.8889 | 1153.1372 | 6122.7734 | 7785.0986 | 65.0504 | 46.118 | 11.53 | 15377.7285 |
102
- | 17000 | 0.9158 | 1137.9264 | 6099.0776 | 7778.0054 | 64.9115 | 46.217 | 11.554 | 14169.0732 |
103
- | 17500 | 0.9428 | 1129.4969 | 5922.5732 | 7767.8188 | 64.6909 | 46.374 | 11.594 | 13724.9141 |
104
- | 18000 | 0.9697 | 1111.5293 | 5840.4692 | 7714.6880 | 65.0093 | 46.147 | 11.537 | 12743.6494 |
105
- | 18500 | 0.9966 | 1116.8070 | 5738.8276 | 7720.9282 | 64.6499 | 46.404 | 11.601 | 12507.6094 |
106
- | 18562 | 1.0000 | 1113.0409 | 5667.2534 | 7727.6904 | 64.516 | 46.5 | 11.625 | 12154.4287 |
107
 
108
  ### Framework versions
109
  - Distily 0.2.0
 
16
  The [Distily](https://github.com/lapp0/distily) library was used for this distillation.
17
 
18
  It achieves the following results on the evaluation set:
19
+ - eval_enwikippl: 1466.9598
20
+ - eval_frwikippl: 6589.9976
21
+ - eval_zhwikippl: 19049.6328
22
+ - eval_loss: 8530.3359
23
+ - eval_runtime: 64.7254
24
+ - eval_samples_per_second: 46.35
25
+ - eval_steps_per_second: 11.587
26
 
27
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
28
  should probably proofread and complete it, then remove this comment.
 
65
  | step | epoch | enwikippl | frwikippl | loss | runtime | samples_per_second | steps_per_second | zhwikippl |
66
  | --- | --- | --- | --- | --- | --- | --- | --- | --- |
67
  | **teacher eval** | | 30.2385 | 57.2728 | | | | | 18.1772 |
68
+ | 0 | 0 | 55332.9297 | 57511.9648 | 333834.9375 | 64.4894 | 46.519 | 11.63 | 57797.4375 |
69
+ | 500 | 0.0269 | 3397.8057 | 14195.7314 | 11200.1709 | 64.3161 | 46.645 | 11.661 | 46176.3906 |
70
+ | 1000 | 0.0539 | 2565.4185 | 11100.7803 | 10401.7070 | 64.9732 | 46.173 | 11.543 | 40786.25 |
71
+ | 1500 | 0.0808 | 2280.1555 | 9752.9180 | 10029.2695 | 65.1147 | 46.073 | 11.518 | 34300.0664 |
72
+ | 2000 | 0.1077 | 2111.7202 | 8617.1777 | 9861.6855 | 65.0861 | 46.093 | 11.523 | 27128.5918 |
73
+ | 2500 | 0.1347 | 1990.7386 | 8209.1553 | 9601.2373 | 64.8934 | 46.23 | 11.557 | 25209.2168 |
74
+ | 3000 | 0.1616 | 1918.3867 | 7799.5220 | 9467.9785 | 64.886 | 46.235 | 11.559 | 22736.8027 |
75
+ | 3500 | 0.1886 | 1818.1265 | 7551.1548 | 9349.7920 | 64.7154 | 46.357 | 11.589 | 22582.4883 |
76
+ | 4000 | 0.2155 | 1769.4467 | 7458.5562 | 9246.7197 | 64.7466 | 46.334 | 11.584 | 21114.0508 |
77
+ | 4500 | 0.2424 | 1728.6010 | 7363.9741 | 9099.1787 | 65.1202 | 46.069 | 11.517 | 20729.8926 |
78
+ | 5000 | 0.2694 | 1704.3433 | 7453.2944 | 9068.9062 | 64.69 | 46.375 | 11.594 | 21740.6367 |
79
+ | 5500 | 0.2963 | 1664.6129 | 7184.9824 | 8969.5039 | 64.2668 | 46.68 | 11.67 | 20534.2910 |
80
+ | 6000 | 0.3232 | 1631.8164 | 7198.6724 | 8898.6348 | 65.558 | 45.761 | 11.44 | 22204.2188 |
81
+ | 6500 | 0.3502 | 1589.2347 | 6884.9448 | 8812.0322 | 64.8035 | 46.294 | 11.573 | 19131.2129 |
82
+ | 7000 | 0.3771 | 1553.9370 | 6727.0781 | 8747.2002 | 65.3644 | 45.897 | 11.474 | 18709.2949 |
83
+ | 7500 | 0.4040 | 1540.8395 | 6779.4512 | 8707.7334 | 64.9958 | 46.157 | 11.539 | 18515.4297 |
84
+ | 8000 | 0.4310 | 1519.5702 | 6720.9155 | 8684.7471 | 65.1941 | 46.016 | 11.504 | 19323.7656 |
85
+ | 8500 | 0.4579 | 1499.4967 | 6702.9292 | 8618.3145 | 64.6164 | 46.428 | 11.607 | 20303.8691 |
86
+ | 9000 | 0.4848 | 1468.8694 | 6597.9023 | 8579.7764 | 65.1809 | 46.026 | 11.506 | 19187.4902 |
87
+ | 9500 | 0.5118 | 1466.9598 | 6589.9976 | 8530.3359 | 64.7254 | 46.35 | 11.587 | 19049.6328 |
88
+ | 10000 | 0.5387 | 1450.3381 | 6594.1782 | 8527.4131 | 65.1904 | 46.019 | 11.505 | 20619.4590 |
89
+ | 10500 | 0.5657 | 1422.2881 | 6539.0815 | 8491.7549 | 64.9945 | 46.158 | 11.539 | 20106.9180 |
90
+ | 11000 | 0.5926 | 1413.1234 | 6447.0659 | 8481.6855 | 65.107 | 46.078 | 11.52 | 18302.7910 |
91
+ | 11500 | 0.6195 | 1399.7990 | 6463.4536 | 8433.2803 | 64.732 | 46.345 | 11.586 | 18501.8398 |
92
+ | 12000 | 0.6465 | 1386.2769 | 6439.3423 | 8387.9043 | 64.7399 | 46.339 | 11.585 | 18306.4570 |
93
+ | 12500 | 0.6734 | 1381.0126 | 6380.1401 | 8346.6777 | 64.7944 | 46.3 | 11.575 | 19072.5371 |
94
+ | 13000 | 0.7003 | 1360.2582 | 6364.1938 | 8351.8828 | 64.608 | 46.434 | 11.608 | 18941.8262 |
95
+ | 13500 | 0.7273 | 1355.2496 | 6337.5508 | 8364.6289 | 64.4743 | 46.53 | 11.633 | 18354.1797 |
96
+ | 14000 | 0.7542 | 1342.7577 | 6132.9243 | 8351.3281 | 64.4281 | 46.564 | 11.641 | 18108.3027 |
97
+ | 14500 | 0.7811 | 1324.4287 | 6172.4019 | 8299.2109 | 64.0768 | 46.819 | 11.705 | 17864.5078 |
98
+ | 15000 | 0.8081 | 1311.8136 | 6250.3555 | 8288.9170 | 63.9884 | 46.883 | 11.721 | 18093.8008 |
99
+ | 15500 | 0.8350 | 1300.1758 | 6161.9678 | 8240.8105 | 65.0003 | 46.154 | 11.538 | 18435.2441 |
100
+ | 16000 | 0.8620 | 1294.5092 | 6087.9023 | 8225.1836 | 65.3075 | 45.937 | 11.484 | 18195.5664 |
101
+ | 16500 | 0.8889 | 1272.7550 | 6124.9282 | 8187.4561 | 64.7644 | 46.322 | 11.58 | 18905.1719 |
102
+ | 17000 | 0.9158 | 1271.9396 | 6117.1646 | 8179.8828 | 66.1093 | 45.379 | 11.345 | 17912.2910 |
103
+ | 17500 | 0.9428 | 1263.8173 | 5966.3726 | 8165.7280 | 64.1579 | 46.76 | 11.69 | 16779.9922 |
104
+ | 18000 | 0.9697 | 1245.9607 | 6065.6255 | 8219.2422 | 64.3092 | 46.65 | 11.662 | 17666.4180 |
105
+ | 18500 | 0.9966 | 1240.7706 | 6013.2476 | 8146.3145 | 64.5002 | 46.511 | 11.628 | 16597.2520 |
106
+ | 18562 | 1.0000 | 1242.8444 | 5899.8604 | 8136.0962 | 64.3726 | 46.604 | 11.651 | 16160.9238 |
107
 
108
  ### Framework versions
109
  - Distily 0.2.0
logs/optim=paged_adamw_32bit/events.out.tfevents.1723354205.93d6cbb3ad53 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97475d50c797a0228b79191e95b9474c42cdf7844de4533d955767d32167f4cc
3
+ size 253