nicolarsen commited on
Commit
39d6f5e
·
verified ·
1 Parent(s): 9d2d07c

Upload 3 files

Browse files
Morpheus Chunky 8000 Heavy.json ADDED
@@ -0,0 +1,282 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eval_perpl": [
3
+ 39986.122178376485,
4
+ 525.4462461857372,
5
+ 146.85947892622303,
6
+ 137.59400363889685,
7
+ 119.03408556378925,
8
+ 68.2911806791608,
9
+ 51.53597224805516,
10
+ 38.91977052757043,
11
+ 29.888799922291856,
12
+ 23.923145248287085,
13
+ 20.419716922015837,
14
+ 18.86853028551586,
15
+ 16.859550920712056,
16
+ 17.433234450641617,
17
+ 16.350264114087548,
18
+ 16.402020486930322,
19
+ 24.348873141653428,
20
+ 25.79407871706006,
21
+ 27.238566520144857,
22
+ 28.85022429173128,
23
+ 28.683021455541656,
24
+ 29.658791384779285,
25
+ 11.76860632197504,
26
+ 12.206002667112749,
27
+ 10.52406853095854,
28
+ 11.039844481214304,
29
+ 11.839845666017055,
30
+ 12.903261344489982,
31
+ 12.96627741455161,
32
+ 12.885166487594889,
33
+ 13.45052764436131,
34
+ 12.7280196850858,
35
+ 12.998030919591098,
36
+ 12.909741821766548,
37
+ 12.882312857026708,
38
+ 13.06622231207254,
39
+ 12.515110690680235,
40
+ 13.573780014462306,
41
+ 12.325732942306088,
42
+ 12.510113765039383,
43
+ 12.697609965065057,
44
+ 12.686731318533027,
45
+ 12.4630617901934,
46
+ 12.503517896934227,
47
+ 12.407749979294273,
48
+ 12.010980188213875,
49
+ 12.055773530795815,
50
+ 11.488287621765831,
51
+ 9.693441917597589,
52
+ 9.578203163048764,
53
+ 9.427283620088943,
54
+ 9.368114419220754,
55
+ 9.297290156561022,
56
+ 10.034645195060842,
57
+ 10.05689564926465,
58
+ 10.434355138627009,
59
+ 10.466024818066556,
60
+ 10.657169647687061,
61
+ 10.23354649648894,
62
+ 8.25616542211981,
63
+ 8.05616744716054,
64
+ 7.88023337186201,
65
+ 7.74232220968035,
66
+ 7.769741853842186,
67
+ 7.807061752431781,
68
+ 7.741015413247497,
69
+ 7.641678347708009,
70
+ 7.722017989352184
71
+ ],
72
+ "eval_loss": [
73
+ 10.596287727355957,
74
+ 6.264247894287109,
75
+ 4.989476203918457,
76
+ 4.924307346343994,
77
+ 4.779409885406494,
78
+ 4.223780632019043,
79
+ 3.9422800540924072,
80
+ 3.6615023612976074,
81
+ 3.3974838256835938,
82
+ 3.1748464107513428,
83
+ 3.016500949859619,
84
+ 2.937495470046997,
85
+ 2.8249173164367676,
86
+ 2.8583784103393555,
87
+ 2.7942440509796143,
88
+ 2.7974045276641846,
89
+ 3.1924855709075928,
90
+ 3.2501449584960938,
91
+ 3.304633855819702,
92
+ 3.3621177673339844,
93
+ 3.3563053607940674,
94
+ 3.389758586883545,
95
+ 2.46543550491333,
96
+ 2.5019278526306152,
97
+ 2.3536648750305176,
98
+ 2.4015109539031982,
99
+ 2.471470594406128,
100
+ 2.5574800968170166,
101
+ 2.562351942062378,
102
+ 2.556076765060425,
103
+ 2.5990183353424072,
104
+ 2.5438058376312256,
105
+ 2.564797878265381,
106
+ 2.5579822063446045,
107
+ 2.5558552742004395,
108
+ 2.570030450820923,
109
+ 2.5269367694854736,
110
+ 2.608139991760254,
111
+ 2.5116891860961914,
112
+ 2.5265374183654785,
113
+ 2.5414137840270996,
114
+ 2.5405566692352295,
115
+ 2.5227692127227783,
116
+ 2.526010036468506,
117
+ 2.5183212757110596,
118
+ 2.48582124710083,
119
+ 2.4895436763763428,
120
+ 2.4413280487060547,
121
+ 2.271449565887451,
122
+ 2.2594900131225586,
123
+ 2.243607997894287,
124
+ 2.237311840057373,
125
+ 2.2297229766845703,
126
+ 2.3060436248779297,
127
+ 2.308258533477783,
128
+ 2.3451037406921387,
129
+ 2.3481342792510986,
130
+ 2.3662328720092773,
131
+ 2.3256711959838867,
132
+ 2.1109602451324463,
133
+ 2.086437940597534,
134
+ 2.0643575191497803,
135
+ 2.046701669692993,
136
+ 2.050236940383911,
137
+ 2.0550286769866943,
138
+ 2.0465328693389893,
139
+ 2.0336172580718994,
140
+ 2.0440757274627686
141
+ ],
142
+ "train_loss": [
143
+ null,
144
+ null,
145
+ null,
146
+ null,
147
+ null,
148
+ null,
149
+ null,
150
+ null,
151
+ null,
152
+ null,
153
+ null,
154
+ null,
155
+ null,
156
+ null,
157
+ null,
158
+ null,
159
+ null,
160
+ null,
161
+ null,
162
+ null,
163
+ null,
164
+ null,
165
+ null,
166
+ null,
167
+ null,
168
+ null,
169
+ null,
170
+ null,
171
+ null,
172
+ null,
173
+ null,
174
+ null,
175
+ null,
176
+ null,
177
+ null,
178
+ null,
179
+ null,
180
+ null,
181
+ null,
182
+ null,
183
+ null,
184
+ null,
185
+ null,
186
+ null,
187
+ null,
188
+ null,
189
+ null,
190
+ null,
191
+ null,
192
+ null,
193
+ null,
194
+ null,
195
+ null,
196
+ null,
197
+ null,
198
+ null,
199
+ null,
200
+ null,
201
+ null,
202
+ null,
203
+ null,
204
+ null,
205
+ null,
206
+ null,
207
+ null,
208
+ null,
209
+ null,
210
+ null
211
+ ],
212
+ "completed_steps": [
213
+ 0,
214
+ 100,
215
+ 200,
216
+ 300,
217
+ 400,
218
+ 500,
219
+ 600,
220
+ 700,
221
+ 800,
222
+ 900,
223
+ 1000,
224
+ 1100,
225
+ 1200,
226
+ 1300,
227
+ 1400,
228
+ 1500,
229
+ 1600,
230
+ 1700,
231
+ 1800,
232
+ 1900,
233
+ 2000,
234
+ 2100,
235
+ 2200,
236
+ 2300,
237
+ 2400,
238
+ 2500,
239
+ 2600,
240
+ 2700,
241
+ 2800,
242
+ 2900,
243
+ 3000,
244
+ 3100,
245
+ 3200,
246
+ 3300,
247
+ 3400,
248
+ 3500,
249
+ 3600,
250
+ 3700,
251
+ 3800,
252
+ 3900,
253
+ 4000,
254
+ 4100,
255
+ 4200,
256
+ 4300,
257
+ 4400,
258
+ 4500,
259
+ 4600,
260
+ 4700,
261
+ 4800,
262
+ 4900,
263
+ 5000,
264
+ 5100,
265
+ 5200,
266
+ 5300,
267
+ 5400,
268
+ 5500,
269
+ 5600,
270
+ 5700,
271
+ 5800,
272
+ 5900,
273
+ 6000,
274
+ 6100,
275
+ 6200,
276
+ 6300,
277
+ 6400,
278
+ 6500,
279
+ 6600,
280
+ 6700
281
+ ]
282
+ }
Morpheus Chunky Config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "huawei-noah/TinyBERT_General_4L_312D",
3
+ "architectures": [
4
+ "BertForMaskedLM"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "cell": {},
8
+ "classifier_dropout": null,
9
+ "emb_size": 312,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 312,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 1200,
15
+ "layer_norm_eps": 1e-12,
16
+ "max_position_embeddings": 512,
17
+ "model_type": "bert",
18
+ "num_attention_heads": 12,
19
+ "num_hidden_layers": 4,
20
+ "pad_token_id": 0,
21
+ "position_embedding_type": "absolute",
22
+ "pre_trained": "",
23
+ "structure": [],
24
+ "torch_dtype": "float32",
25
+ "transformers_version": "4.44.2",
26
+ "type_vocab_size": 2,
27
+ "use_cache": true,
28
+ "vocab_size": 30522
29
+ }
Morpheus Chunky Special Tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"[UNK]": 0, "[PAD]": 1, "[CLS]": 2, "[SEP]": 3, "[MASK]": 4}