samhitmantrala
commited on
Commit
•
810547d
1
Parent(s):
a8e5f03
End of training
Browse files- README.md +356 -0
- config.json +46 -0
- generation_config.json +6 -0
- merges.txt +0 -0
- model.safetensors +3 -0
- runs/Mar29_06-37-46_652a1ab04a7e/events.out.tfevents.1711694268.652a1ab04a7e.11735.0 +3 -0
- runs/Mar29_06-51-34_652a1ab04a7e/events.out.tfevents.1711695095.652a1ab04a7e.11735.1 +3 -0
- runs/Mar29_06-51-50_652a1ab04a7e/events.out.tfevents.1711695111.652a1ab04a7e.11735.2 +3 -0
- runs/Mar29_06-51-50_652a1ab04a7e/events.out.tfevents.1711695272.652a1ab04a7e.11735.3 +3 -0
- special_tokens_map.json +6 -0
- tokenizer.json +0 -0
- tokenizer_config.json +20 -0
- training_args.bin +3 -0
- vocab.json +0 -0
README.md
ADDED
@@ -0,0 +1,356 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: apache-2.0
|
3 |
+
base_model: distilgpt2
|
4 |
+
tags:
|
5 |
+
- generated_from_trainer
|
6 |
+
model-index:
|
7 |
+
- name: cricket
|
8 |
+
results: []
|
9 |
+
---
|
10 |
+
|
11 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
12 |
+
should probably proofread and complete it, then remove this comment. -->
|
13 |
+
|
14 |
+
# cricket
|
15 |
+
|
16 |
+
This model is a fine-tuned version of [distilgpt2](https://huggingface.co/distilgpt2) on an unknown dataset.
|
17 |
+
It achieves the following results on the evaluation set:
|
18 |
+
- Loss: 6.4183
|
19 |
+
|
20 |
+
## Model description
|
21 |
+
|
22 |
+
More information needed
|
23 |
+
|
24 |
+
## Intended uses & limitations
|
25 |
+
|
26 |
+
More information needed
|
27 |
+
|
28 |
+
## Training and evaluation data
|
29 |
+
|
30 |
+
More information needed
|
31 |
+
|
32 |
+
## Training procedure
|
33 |
+
|
34 |
+
### Training hyperparameters
|
35 |
+
|
36 |
+
The following hyperparameters were used during training:
|
37 |
+
- learning_rate: 0.0002
|
38 |
+
- train_batch_size: 64
|
39 |
+
- eval_batch_size: 64
|
40 |
+
- seed: 42
|
41 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
42 |
+
- lr_scheduler_type: linear
|
43 |
+
- num_epochs: 300
|
44 |
+
|
45 |
+
### Training results
|
46 |
+
|
47 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
48 |
+
|:-------------:|:-----:|:----:|:---------------:|
|
49 |
+
| No log | 1.0 | 1 | 4.6074 |
|
50 |
+
| No log | 2.0 | 2 | 3.7590 |
|
51 |
+
| No log | 3.0 | 3 | 3.7045 |
|
52 |
+
| No log | 4.0 | 4 | 3.7289 |
|
53 |
+
| No log | 5.0 | 5 | 3.8731 |
|
54 |
+
| No log | 6.0 | 6 | 4.0495 |
|
55 |
+
| No log | 7.0 | 7 | 4.2240 |
|
56 |
+
| No log | 8.0 | 8 | 4.4105 |
|
57 |
+
| No log | 9.0 | 9 | 4.5493 |
|
58 |
+
| No log | 10.0 | 10 | 4.6877 |
|
59 |
+
| No log | 11.0 | 11 | 4.7907 |
|
60 |
+
| No log | 12.0 | 12 | 4.8642 |
|
61 |
+
| No log | 13.0 | 13 | 4.9226 |
|
62 |
+
| No log | 14.0 | 14 | 5.0049 |
|
63 |
+
| No log | 15.0 | 15 | 5.1230 |
|
64 |
+
| No log | 16.0 | 16 | 5.2132 |
|
65 |
+
| No log | 17.0 | 17 | 5.2580 |
|
66 |
+
| No log | 18.0 | 18 | 5.2836 |
|
67 |
+
| No log | 19.0 | 19 | 5.2780 |
|
68 |
+
| No log | 20.0 | 20 | 5.2579 |
|
69 |
+
| No log | 21.0 | 21 | 5.2451 |
|
70 |
+
| No log | 22.0 | 22 | 5.2365 |
|
71 |
+
| No log | 23.0 | 23 | 5.2346 |
|
72 |
+
| No log | 24.0 | 24 | 5.2518 |
|
73 |
+
| No log | 25.0 | 25 | 5.2774 |
|
74 |
+
| No log | 26.0 | 26 | 5.3097 |
|
75 |
+
| No log | 27.0 | 27 | 5.3400 |
|
76 |
+
| No log | 28.0 | 28 | 5.3913 |
|
77 |
+
| No log | 29.0 | 29 | 5.4408 |
|
78 |
+
| No log | 30.0 | 30 | 5.4822 |
|
79 |
+
| No log | 31.0 | 31 | 5.5076 |
|
80 |
+
| No log | 32.0 | 32 | 5.5590 |
|
81 |
+
| No log | 33.0 | 33 | 5.6134 |
|
82 |
+
| No log | 34.0 | 34 | 5.6600 |
|
83 |
+
| No log | 35.0 | 35 | 5.6953 |
|
84 |
+
| No log | 36.0 | 36 | 5.7297 |
|
85 |
+
| No log | 37.0 | 37 | 5.7620 |
|
86 |
+
| No log | 38.0 | 38 | 5.8111 |
|
87 |
+
| No log | 39.0 | 39 | 5.8563 |
|
88 |
+
| No log | 40.0 | 40 | 5.8710 |
|
89 |
+
| No log | 41.0 | 41 | 5.8723 |
|
90 |
+
| No log | 42.0 | 42 | 5.8851 |
|
91 |
+
| No log | 43.0 | 43 | 5.9023 |
|
92 |
+
| No log | 44.0 | 44 | 5.9218 |
|
93 |
+
| No log | 45.0 | 45 | 5.9323 |
|
94 |
+
| No log | 46.0 | 46 | 5.9256 |
|
95 |
+
| No log | 47.0 | 47 | 5.9269 |
|
96 |
+
| No log | 48.0 | 48 | 5.9236 |
|
97 |
+
| No log | 49.0 | 49 | 5.9186 |
|
98 |
+
| No log | 50.0 | 50 | 5.9071 |
|
99 |
+
| No log | 51.0 | 51 | 5.8891 |
|
100 |
+
| No log | 52.0 | 52 | 5.8800 |
|
101 |
+
| No log | 53.0 | 53 | 5.8782 |
|
102 |
+
| No log | 54.0 | 54 | 5.8825 |
|
103 |
+
| No log | 55.0 | 55 | 5.8933 |
|
104 |
+
| No log | 56.0 | 56 | 5.9076 |
|
105 |
+
| No log | 57.0 | 57 | 5.9233 |
|
106 |
+
| No log | 58.0 | 58 | 5.9443 |
|
107 |
+
| No log | 59.0 | 59 | 5.9652 |
|
108 |
+
| No log | 60.0 | 60 | 5.9870 |
|
109 |
+
| No log | 61.0 | 61 | 6.0095 |
|
110 |
+
| No log | 62.0 | 62 | 6.0313 |
|
111 |
+
| No log | 63.0 | 63 | 6.0558 |
|
112 |
+
| No log | 64.0 | 64 | 6.0709 |
|
113 |
+
| No log | 65.0 | 65 | 6.0709 |
|
114 |
+
| No log | 66.0 | 66 | 6.0735 |
|
115 |
+
| No log | 67.0 | 67 | 6.0716 |
|
116 |
+
| No log | 68.0 | 68 | 6.0587 |
|
117 |
+
| No log | 69.0 | 69 | 6.0493 |
|
118 |
+
| No log | 70.0 | 70 | 6.0352 |
|
119 |
+
| No log | 71.0 | 71 | 6.0237 |
|
120 |
+
| No log | 72.0 | 72 | 6.0197 |
|
121 |
+
| No log | 73.0 | 73 | 6.0148 |
|
122 |
+
| No log | 74.0 | 74 | 6.0151 |
|
123 |
+
| No log | 75.0 | 75 | 6.0233 |
|
124 |
+
| No log | 76.0 | 76 | 6.0378 |
|
125 |
+
| No log | 77.0 | 77 | 6.0594 |
|
126 |
+
| No log | 78.0 | 78 | 6.0826 |
|
127 |
+
| No log | 79.0 | 79 | 6.1049 |
|
128 |
+
| No log | 80.0 | 80 | 6.1233 |
|
129 |
+
| No log | 81.0 | 81 | 6.1417 |
|
130 |
+
| No log | 82.0 | 82 | 6.1617 |
|
131 |
+
| No log | 83.0 | 83 | 6.1835 |
|
132 |
+
| No log | 84.0 | 84 | 6.1985 |
|
133 |
+
| No log | 85.0 | 85 | 6.2137 |
|
134 |
+
| No log | 86.0 | 86 | 6.2269 |
|
135 |
+
| No log | 87.0 | 87 | 6.2450 |
|
136 |
+
| No log | 88.0 | 88 | 6.2646 |
|
137 |
+
| No log | 89.0 | 89 | 6.2846 |
|
138 |
+
| No log | 90.0 | 90 | 6.2978 |
|
139 |
+
| No log | 91.0 | 91 | 6.3107 |
|
140 |
+
| No log | 92.0 | 92 | 6.3255 |
|
141 |
+
| No log | 93.0 | 93 | 6.3417 |
|
142 |
+
| No log | 94.0 | 94 | 6.3559 |
|
143 |
+
| No log | 95.0 | 95 | 6.3679 |
|
144 |
+
| No log | 96.0 | 96 | 6.3794 |
|
145 |
+
| No log | 97.0 | 97 | 6.3561 |
|
146 |
+
| No log | 98.0 | 98 | 6.3274 |
|
147 |
+
| No log | 99.0 | 99 | 6.3042 |
|
148 |
+
| No log | 100.0 | 100 | 6.2891 |
|
149 |
+
| No log | 101.0 | 101 | 6.2644 |
|
150 |
+
| No log | 102.0 | 102 | 6.2372 |
|
151 |
+
| No log | 103.0 | 103 | 6.2225 |
|
152 |
+
| No log | 104.0 | 104 | 6.2174 |
|
153 |
+
| No log | 105.0 | 105 | 6.2239 |
|
154 |
+
| No log | 106.0 | 106 | 6.2080 |
|
155 |
+
| No log | 107.0 | 107 | 6.1955 |
|
156 |
+
| No log | 108.0 | 108 | 6.1800 |
|
157 |
+
| No log | 109.0 | 109 | 6.1664 |
|
158 |
+
| No log | 110.0 | 110 | 6.1527 |
|
159 |
+
| No log | 111.0 | 111 | 6.1455 |
|
160 |
+
| No log | 112.0 | 112 | 6.1408 |
|
161 |
+
| No log | 113.0 | 113 | 6.1408 |
|
162 |
+
| No log | 114.0 | 114 | 6.1450 |
|
163 |
+
| No log | 115.0 | 115 | 6.1543 |
|
164 |
+
| No log | 116.0 | 116 | 6.1423 |
|
165 |
+
| No log | 117.0 | 117 | 6.1347 |
|
166 |
+
| No log | 118.0 | 118 | 6.1202 |
|
167 |
+
| No log | 119.0 | 119 | 6.0935 |
|
168 |
+
| No log | 120.0 | 120 | 6.0795 |
|
169 |
+
| No log | 121.0 | 121 | 6.0698 |
|
170 |
+
| No log | 122.0 | 122 | 6.0718 |
|
171 |
+
| No log | 123.0 | 123 | 6.0779 |
|
172 |
+
| No log | 124.0 | 124 | 6.0872 |
|
173 |
+
| No log | 125.0 | 125 | 6.0986 |
|
174 |
+
| No log | 126.0 | 126 | 6.1104 |
|
175 |
+
| No log | 127.0 | 127 | 6.1281 |
|
176 |
+
| No log | 128.0 | 128 | 6.1455 |
|
177 |
+
| No log | 129.0 | 129 | 6.1631 |
|
178 |
+
| No log | 130.0 | 130 | 6.1812 |
|
179 |
+
| No log | 131.0 | 131 | 6.1999 |
|
180 |
+
| No log | 132.0 | 132 | 6.2176 |
|
181 |
+
| No log | 133.0 | 133 | 6.2295 |
|
182 |
+
| No log | 134.0 | 134 | 6.2345 |
|
183 |
+
| No log | 135.0 | 135 | 6.2413 |
|
184 |
+
| No log | 136.0 | 136 | 6.2493 |
|
185 |
+
| No log | 137.0 | 137 | 6.2490 |
|
186 |
+
| No log | 138.0 | 138 | 6.2469 |
|
187 |
+
| No log | 139.0 | 139 | 6.2482 |
|
188 |
+
| No log | 140.0 | 140 | 6.2495 |
|
189 |
+
| No log | 141.0 | 141 | 6.2534 |
|
190 |
+
| No log | 142.0 | 142 | 6.2593 |
|
191 |
+
| No log | 143.0 | 143 | 6.2660 |
|
192 |
+
| No log | 144.0 | 144 | 6.2749 |
|
193 |
+
| No log | 145.0 | 145 | 6.2797 |
|
194 |
+
| No log | 146.0 | 146 | 6.2928 |
|
195 |
+
| No log | 147.0 | 147 | 6.3081 |
|
196 |
+
| No log | 148.0 | 148 | 6.3190 |
|
197 |
+
| No log | 149.0 | 149 | 6.3309 |
|
198 |
+
| No log | 150.0 | 150 | 6.3388 |
|
199 |
+
| No log | 151.0 | 151 | 6.3372 |
|
200 |
+
| No log | 152.0 | 152 | 6.3382 |
|
201 |
+
| No log | 153.0 | 153 | 6.3416 |
|
202 |
+
| No log | 154.0 | 154 | 6.3459 |
|
203 |
+
| No log | 155.0 | 155 | 6.3521 |
|
204 |
+
| No log | 156.0 | 156 | 6.3538 |
|
205 |
+
| No log | 157.0 | 157 | 6.3584 |
|
206 |
+
| No log | 158.0 | 158 | 6.3713 |
|
207 |
+
| No log | 159.0 | 159 | 6.3841 |
|
208 |
+
| No log | 160.0 | 160 | 6.3957 |
|
209 |
+
| No log | 161.0 | 161 | 6.4029 |
|
210 |
+
| No log | 162.0 | 162 | 6.4090 |
|
211 |
+
| No log | 163.0 | 163 | 6.4138 |
|
212 |
+
| No log | 164.0 | 164 | 6.4156 |
|
213 |
+
| No log | 165.0 | 165 | 6.4203 |
|
214 |
+
| No log | 166.0 | 166 | 6.4225 |
|
215 |
+
| No log | 167.0 | 167 | 6.4274 |
|
216 |
+
| No log | 168.0 | 168 | 6.4317 |
|
217 |
+
| No log | 169.0 | 169 | 6.4359 |
|
218 |
+
| No log | 170.0 | 170 | 6.4408 |
|
219 |
+
| No log | 171.0 | 171 | 6.4456 |
|
220 |
+
| No log | 172.0 | 172 | 6.4513 |
|
221 |
+
| No log | 173.0 | 173 | 6.4549 |
|
222 |
+
| No log | 174.0 | 174 | 6.4586 |
|
223 |
+
| No log | 175.0 | 175 | 6.4622 |
|
224 |
+
| No log | 176.0 | 176 | 6.4658 |
|
225 |
+
| No log | 177.0 | 177 | 6.4694 |
|
226 |
+
| No log | 178.0 | 178 | 6.4732 |
|
227 |
+
| No log | 179.0 | 179 | 6.4776 |
|
228 |
+
| No log | 180.0 | 180 | 6.4819 |
|
229 |
+
| No log | 181.0 | 181 | 6.4865 |
|
230 |
+
| No log | 182.0 | 182 | 6.4879 |
|
231 |
+
| No log | 183.0 | 183 | 6.4907 |
|
232 |
+
| No log | 184.0 | 184 | 6.4935 |
|
233 |
+
| No log | 185.0 | 185 | 6.4970 |
|
234 |
+
| No log | 186.0 | 186 | 6.5001 |
|
235 |
+
| No log | 187.0 | 187 | 6.5038 |
|
236 |
+
| No log | 188.0 | 188 | 6.5079 |
|
237 |
+
| No log | 189.0 | 189 | 6.5117 |
|
238 |
+
| No log | 190.0 | 190 | 6.5157 |
|
239 |
+
| No log | 191.0 | 191 | 6.5191 |
|
240 |
+
| No log | 192.0 | 192 | 6.5226 |
|
241 |
+
| No log | 193.0 | 193 | 6.5260 |
|
242 |
+
| No log | 194.0 | 194 | 6.5295 |
|
243 |
+
| No log | 195.0 | 195 | 6.5327 |
|
244 |
+
| No log | 196.0 | 196 | 6.5293 |
|
245 |
+
| No log | 197.0 | 197 | 6.5252 |
|
246 |
+
| No log | 198.0 | 198 | 6.5218 |
|
247 |
+
| No log | 199.0 | 199 | 6.5191 |
|
248 |
+
| No log | 200.0 | 200 | 6.5168 |
|
249 |
+
| No log | 201.0 | 201 | 6.5154 |
|
250 |
+
| No log | 202.0 | 202 | 6.5144 |
|
251 |
+
| No log | 203.0 | 203 | 6.5111 |
|
252 |
+
| No log | 204.0 | 204 | 6.5082 |
|
253 |
+
| No log | 205.0 | 205 | 6.5059 |
|
254 |
+
| No log | 206.0 | 206 | 6.5040 |
|
255 |
+
| No log | 207.0 | 207 | 6.5029 |
|
256 |
+
| No log | 208.0 | 208 | 6.5024 |
|
257 |
+
| No log | 209.0 | 209 | 6.5030 |
|
258 |
+
| No log | 210.0 | 210 | 6.5035 |
|
259 |
+
| No log | 211.0 | 211 | 6.4887 |
|
260 |
+
| No log | 212.0 | 212 | 6.4752 |
|
261 |
+
| No log | 213.0 | 213 | 6.4641 |
|
262 |
+
| No log | 214.0 | 214 | 6.4550 |
|
263 |
+
| No log | 215.0 | 215 | 6.4473 |
|
264 |
+
| No log | 216.0 | 216 | 6.4406 |
|
265 |
+
| No log | 217.0 | 217 | 6.4351 |
|
266 |
+
| No log | 218.0 | 218 | 6.4303 |
|
267 |
+
| No log | 219.0 | 219 | 6.4264 |
|
268 |
+
| No log | 220.0 | 220 | 6.4213 |
|
269 |
+
| No log | 221.0 | 221 | 6.4170 |
|
270 |
+
| No log | 222.0 | 222 | 6.4136 |
|
271 |
+
| No log | 223.0 | 223 | 6.4067 |
|
272 |
+
| No log | 224.0 | 224 | 6.4001 |
|
273 |
+
| No log | 225.0 | 225 | 6.3944 |
|
274 |
+
| No log | 226.0 | 226 | 6.3902 |
|
275 |
+
| No log | 227.0 | 227 | 6.3868 |
|
276 |
+
| No log | 228.0 | 228 | 6.3851 |
|
277 |
+
| No log | 229.0 | 229 | 6.3843 |
|
278 |
+
| No log | 230.0 | 230 | 6.3840 |
|
279 |
+
| No log | 231.0 | 231 | 6.3840 |
|
280 |
+
| No log | 232.0 | 232 | 6.3843 |
|
281 |
+
| No log | 233.0 | 233 | 6.3850 |
|
282 |
+
| No log | 234.0 | 234 | 6.3820 |
|
283 |
+
| No log | 235.0 | 235 | 6.3797 |
|
284 |
+
| No log | 236.0 | 236 | 6.3783 |
|
285 |
+
| No log | 237.0 | 237 | 6.3774 |
|
286 |
+
| No log | 238.0 | 238 | 6.3769 |
|
287 |
+
| No log | 239.0 | 239 | 6.3776 |
|
288 |
+
| No log | 240.0 | 240 | 6.3784 |
|
289 |
+
| No log | 241.0 | 241 | 6.3791 |
|
290 |
+
| No log | 242.0 | 242 | 6.3799 |
|
291 |
+
| No log | 243.0 | 243 | 6.3809 |
|
292 |
+
| No log | 244.0 | 244 | 6.3823 |
|
293 |
+
| No log | 245.0 | 245 | 6.3840 |
|
294 |
+
| No log | 246.0 | 246 | 6.3860 |
|
295 |
+
| No log | 247.0 | 247 | 6.3879 |
|
296 |
+
| No log | 248.0 | 248 | 6.3900 |
|
297 |
+
| No log | 249.0 | 249 | 6.3920 |
|
298 |
+
| No log | 250.0 | 250 | 6.3940 |
|
299 |
+
| No log | 251.0 | 251 | 6.3962 |
|
300 |
+
| No log | 252.0 | 252 | 6.3987 |
|
301 |
+
| No log | 253.0 | 253 | 6.4011 |
|
302 |
+
| No log | 254.0 | 254 | 6.4034 |
|
303 |
+
| No log | 255.0 | 255 | 6.4058 |
|
304 |
+
| No log | 256.0 | 256 | 6.4086 |
|
305 |
+
| No log | 257.0 | 257 | 6.4076 |
|
306 |
+
| No log | 258.0 | 258 | 6.4070 |
|
307 |
+
| No log | 259.0 | 259 | 6.4074 |
|
308 |
+
| No log | 260.0 | 260 | 6.4081 |
|
309 |
+
| No log | 261.0 | 261 | 6.4088 |
|
310 |
+
| No log | 262.0 | 262 | 6.4095 |
|
311 |
+
| No log | 263.0 | 263 | 6.4105 |
|
312 |
+
| No log | 264.0 | 264 | 6.4114 |
|
313 |
+
| No log | 265.0 | 265 | 6.4123 |
|
314 |
+
| No log | 266.0 | 266 | 6.4102 |
|
315 |
+
| No log | 267.0 | 267 | 6.4083 |
|
316 |
+
| No log | 268.0 | 268 | 6.4068 |
|
317 |
+
| No log | 269.0 | 269 | 6.4058 |
|
318 |
+
| No log | 270.0 | 270 | 6.4050 |
|
319 |
+
| No log | 271.0 | 271 | 6.4044 |
|
320 |
+
| No log | 272.0 | 272 | 6.4038 |
|
321 |
+
| No log | 273.0 | 273 | 6.4034 |
|
322 |
+
| No log | 274.0 | 274 | 6.4032 |
|
323 |
+
| No log | 275.0 | 275 | 6.4031 |
|
324 |
+
| No log | 276.0 | 276 | 6.4030 |
|
325 |
+
| No log | 277.0 | 277 | 6.4032 |
|
326 |
+
| No log | 278.0 | 278 | 6.4036 |
|
327 |
+
| No log | 279.0 | 279 | 6.4047 |
|
328 |
+
| No log | 280.0 | 280 | 6.4061 |
|
329 |
+
| No log | 281.0 | 281 | 6.4074 |
|
330 |
+
| No log | 282.0 | 282 | 6.4087 |
|
331 |
+
| No log | 283.0 | 283 | 6.4099 |
|
332 |
+
| No log | 284.0 | 284 | 6.4112 |
|
333 |
+
| No log | 285.0 | 285 | 6.4123 |
|
334 |
+
| No log | 286.0 | 286 | 6.4133 |
|
335 |
+
| No log | 287.0 | 287 | 6.4143 |
|
336 |
+
| No log | 288.0 | 288 | 6.4151 |
|
337 |
+
| No log | 289.0 | 289 | 6.4158 |
|
338 |
+
| No log | 290.0 | 290 | 6.4165 |
|
339 |
+
| No log | 291.0 | 291 | 6.4171 |
|
340 |
+
| No log | 292.0 | 292 | 6.4175 |
|
341 |
+
| No log | 293.0 | 293 | 6.4179 |
|
342 |
+
| No log | 294.0 | 294 | 6.4180 |
|
343 |
+
| No log | 295.0 | 295 | 6.4180 |
|
344 |
+
| No log | 296.0 | 296 | 6.4181 |
|
345 |
+
| No log | 297.0 | 297 | 6.4181 |
|
346 |
+
| No log | 298.0 | 298 | 6.4182 |
|
347 |
+
| No log | 299.0 | 299 | 6.4183 |
|
348 |
+
| No log | 300.0 | 300 | 6.4183 |
|
349 |
+
|
350 |
+
|
351 |
+
### Framework versions
|
352 |
+
|
353 |
+
- Transformers 4.38.2
|
354 |
+
- Pytorch 2.2.1+cu121
|
355 |
+
- Datasets 2.18.0
|
356 |
+
- Tokenizers 0.15.2
|
config.json
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "distilgpt2",
|
3 |
+
"_num_labels": 1,
|
4 |
+
"activation_function": "gelu_new",
|
5 |
+
"architectures": [
|
6 |
+
"GPT2LMHeadModel"
|
7 |
+
],
|
8 |
+
"attn_pdrop": 0.1,
|
9 |
+
"bos_token_id": 50256,
|
10 |
+
"embd_pdrop": 0.1,
|
11 |
+
"eos_token_id": 50256,
|
12 |
+
"id2label": {
|
13 |
+
"0": "LABEL_0"
|
14 |
+
},
|
15 |
+
"initializer_range": 0.02,
|
16 |
+
"label2id": {
|
17 |
+
"LABEL_0": 0
|
18 |
+
},
|
19 |
+
"layer_norm_epsilon": 1e-05,
|
20 |
+
"model_type": "gpt2",
|
21 |
+
"n_ctx": 1024,
|
22 |
+
"n_embd": 768,
|
23 |
+
"n_head": 12,
|
24 |
+
"n_inner": null,
|
25 |
+
"n_layer": 6,
|
26 |
+
"n_positions": 1024,
|
27 |
+
"reorder_and_upcast_attn": false,
|
28 |
+
"resid_pdrop": 0.1,
|
29 |
+
"scale_attn_by_inverse_layer_idx": false,
|
30 |
+
"scale_attn_weights": true,
|
31 |
+
"summary_activation": null,
|
32 |
+
"summary_first_dropout": 0.1,
|
33 |
+
"summary_proj_to_labels": true,
|
34 |
+
"summary_type": "cls_index",
|
35 |
+
"summary_use_proj": true,
|
36 |
+
"task_specific_params": {
|
37 |
+
"text-generation": {
|
38 |
+
"do_sample": true,
|
39 |
+
"max_length": 50
|
40 |
+
}
|
41 |
+
},
|
42 |
+
"torch_dtype": "float32",
|
43 |
+
"transformers_version": "4.38.2",
|
44 |
+
"use_cache": true,
|
45 |
+
"vocab_size": 50257
|
46 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 50256,
|
4 |
+
"eos_token_id": 50256,
|
5 |
+
"transformers_version": "4.38.2"
|
6 |
+
}
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d9ab1c34dc3f8168fb68e53c2e8310f9276d022668a5c87ee960d4f3c45c6b6f
|
3 |
+
size 327657928
|
runs/Mar29_06-37-46_652a1ab04a7e/events.out.tfevents.1711694268.652a1ab04a7e.11735.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:175fac6b65de7e6bd38d2f8fe9c042126dec4415f5f8b47dcea937f42720e91c
|
3 |
+
size 10544
|
runs/Mar29_06-51-34_652a1ab04a7e/events.out.tfevents.1711695095.652a1ab04a7e.11735.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:71ef91b1efdf3e495c44c24a6fe4b417ff936334a37b249a497ea188a60ce2e6
|
3 |
+
size 10544
|
runs/Mar29_06-51-50_652a1ab04a7e/events.out.tfevents.1711695111.652a1ab04a7e.11735.2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c9a4d64d063e1f0651200deb036be92cee99f52401ed328a5dc55e2babc21701
|
3 |
+
size 85896
|
runs/Mar29_06-51-50_652a1ab04a7e/events.out.tfevents.1711695272.652a1ab04a7e.11735.3
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:847f11e57a42ea66d97cd88713e8a7ba812c68c54fec97cd3de4bb9651e5ffb7
|
3 |
+
size 359
|
special_tokens_map.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": "<|endoftext|>",
|
3 |
+
"eos_token": "<|endoftext|>",
|
4 |
+
"pad_token": "<|endoftext|>",
|
5 |
+
"unk_token": "<|endoftext|>"
|
6 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"added_tokens_decoder": {
|
4 |
+
"50256": {
|
5 |
+
"content": "<|endoftext|>",
|
6 |
+
"lstrip": false,
|
7 |
+
"normalized": true,
|
8 |
+
"rstrip": false,
|
9 |
+
"single_word": false,
|
10 |
+
"special": true
|
11 |
+
}
|
12 |
+
},
|
13 |
+
"bos_token": "<|endoftext|>",
|
14 |
+
"clean_up_tokenization_spaces": true,
|
15 |
+
"eos_token": "<|endoftext|>",
|
16 |
+
"model_max_length": 1024,
|
17 |
+
"pad_token": "<|endoftext|>",
|
18 |
+
"tokenizer_class": "GPT2Tokenizer",
|
19 |
+
"unk_token": "<|endoftext|>"
|
20 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6d87fe5a206faa2a5643bfadf6399ef5280ff74dfa9de929f84cd38d421824e0
|
3 |
+
size 4856
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|